From 7e2813c5090cd10fb1dde709253db526ae7214d4 Mon Sep 17 00:00:00 2001 From: ambuj Date: Sat, 27 Jul 2024 23:34:41 +0530 Subject: [PATCH 001/545] add-cwe-data-in-multiple-importers Signed-off-by: ambuj --- vulnerabilities/importers/apache_httpd.py | 73 +++++++++++++++++++++++ vulnerabilities/importers/debian.py | 32 ++++++++++ vulnerabilities/importers/fireeye.py | 34 +++++++++++ vulnerabilities/tests/test_debian.py | 38 +++++++++++- vulnerabilities/tests/test_fireeye.py | 18 ++++++ 5 files changed, 194 insertions(+), 1 deletion(-) diff --git a/vulnerabilities/importers/apache_httpd.py b/vulnerabilities/importers/apache_httpd.py index 10a99dd02..75e8c8aae 100644 --- a/vulnerabilities/importers/apache_httpd.py +++ b/vulnerabilities/importers/apache_httpd.py @@ -8,10 +8,12 @@ # import logging +import re import urllib import requests from bs4 import BeautifulSoup +from cwe2.database import Database from packageurl import PackageURL from univers.version_constraint import VersionConstraint from univers.version_range import ApacheVersionRange @@ -23,6 +25,7 @@ from vulnerabilities.importer import Reference from vulnerabilities.importer import VulnerabilitySeverity from vulnerabilities.severity_systems import APACHE_HTTPD +from vulnerabilities.utils import get_cwe_id from vulnerabilities.utils import get_item logger = logging.getLogger(__name__) @@ -102,11 +105,14 @@ def to_advisory(self, data): ) ) + weaknesses = get_weaknesses(data) + return AdvisoryData( aliases=[alias], summary=description or "", affected_packages=affected_packages, references=[reference], + weaknesses=weaknesses, url=reference.url, ) @@ -152,3 +158,70 @@ def fetch_links(url): continue links.append(urllib.parse.urljoin(url, link)) return links + + +def get_weaknesses(cve_data): + """ + Extract CWE IDs from CVE data. + + Args: + cve_data (dict): The CVE data in a dictionary format. + + Returns: + List[int]: A list of unique CWE IDs. + + >>> mock_cve_data = { + ... "containers": { + ... "cna": { + ... "providerMetadata": { + ... "orgId": "f0158376-9dc2-43b6-827c-5f631a4d8d09" + ... }, + ... "title": "mod_macro buffer over-read", + ... "problemTypes": [ + ... { + ... "descriptions": [ + ... { + ... "description": "CWE-125 Out-of-bounds Read", + ... "lang": "en", + ... "cweId": "CWE-125", + ... "type": "CWE" + ... } + ... ] + ... } + ... ] + ... } + ... } + ... } + >>> get_weaknesses(mock_cve_data) + [125] + """ + problem_types = cve_data.get("containers", {}).get("cna", {}).get("problemTypes", []) + descriptions = problem_types[0].get("descriptions", []) if len(problem_types) > 0 else [] + cwe_string = descriptions[0].get("cweId", "") if len(descriptions) > 0 else "" + cwe_pattern = r"CWE-\d+" + description = descriptions[0].get("description", "") if len(descriptions) > 0 else "" + matches = re.findall(cwe_pattern, description) + db = Database() + weaknesses = [] + cwe_string_from_description = "" + if matches: + cwe_string_from_description = matches[0] + if cwe_string or cwe_string_from_description: + if cwe_string: + cwe_id = get_cwe_id(cwe_string) + try: + db.get(cwe_id) + weaknesses.append(cwe_id) + except Exception: + logger.error("Invalid CWE id") + elif cwe_string_from_description: + cwe_id = get_cwe_id(cwe_string_from_description) + try: + db.get(cwe_id) + weaknesses.append(cwe_id) + except Exception: + logger.error("Invalid CWE id") + + seen = set() + unique_cwe = [x for x in weaknesses if not (x in seen or seen.add(x))] + return unique_cwe diff --git a/vulnerabilities/importers/debian.py b/vulnerabilities/importers/debian.py index 94057675f..ccce49634 100644 --- a/vulnerabilities/importers/debian.py +++ b/vulnerabilities/importers/debian.py @@ -8,12 +8,14 @@ # import logging +import re from typing import Any from typing import Iterable from typing import List from typing import Mapping import requests +from cwe2.database import Database from packageurl import PackageURL from univers.version_range import DebianVersionRange from univers.versions import DebianVersion @@ -23,6 +25,7 @@ from vulnerabilities.importer import Importer from vulnerabilities.importer import Reference from vulnerabilities.utils import dedupe +from vulnerabilities.utils import get_cwe_id from vulnerabilities.utils import get_item logger = logging.getLogger(__name__) @@ -93,6 +96,7 @@ def advisory_data(self) -> Iterable[AdvisoryData]: yield from self.parse(pkg_name, records) def parse(self, pkg_name: str, records: Mapping[str, Any]) -> Iterable[AdvisoryData]: + for cve_id, record in records.items(): affected_versions = [] fixed_versions = [] @@ -150,10 +154,38 @@ def parse(self, pkg_name: str, records: Mapping[str, Any]) -> Iterable[AdvisoryD fixed_version=DebianVersion(fixed_version), ) ) + weaknesses = get_cwe_from_debian_advisory(record) + yield AdvisoryData( aliases=[cve_id], summary=record.get("description", ""), affected_packages=affected_packages, references=references, + weaknesses=weaknesses, url=self.api_url, ) + + +def get_cwe_from_debian_advisory(record): + """ + Extracts CWE ID strings from the given raw_data and returns a list of CWE IDs. + + >>> get_cwe_from_debian_advisory({"description":"PEAR HTML_QuickForm version 3.2.14 contains an eval injection (CWE-95) vulnerability in HTML_QuickForm's getSubmitValue method, HTML_QuickForm's validate method, HTML_QuickForm_hierselect's _setOptions method, HTML_QuickForm_element's _findValue method, HTML_QuickForm_element's _prepareValue method. that can result in Possible information disclosure, possible impact on data integrity and execution of arbitrary code. This attack appear to be exploitable via A specially crafted query string could be utilised, e.g. http://www.example.com/admin/add_practice_type_id[1]=fubar%27])%20OR%20die(%27OOK!%27);%20//&mode=live. This vulnerability appears to have been fixed in 3.2.15."}) + [95] + >>> get_cwe_from_debian_advisory({"description":"There is no WEAKNESS DATA"}) + [] + """ + description = record.get("description") or "" + pattern = r"CWE-\d+" + cwe_strings = re.findall(pattern, description) + weaknesses = [] + db = Database() + for cwe_string in cwe_strings: + if cwe_string: + cwe_id = get_cwe_id(cwe_string) + try: + db.get(cwe_id) + weaknesses.append(cwe_id) + except Exception: + logger.error("Invalid CWE id") + return weaknesses diff --git a/vulnerabilities/importers/fireeye.py b/vulnerabilities/importers/fireeye.py index f39ff6c45..65b74eee5 100644 --- a/vulnerabilities/importers/fireeye.py +++ b/vulnerabilities/importers/fireeye.py @@ -12,11 +12,14 @@ from typing import Iterable from typing import List +from cwe2.database import Database + from vulnerabilities.importer import AdvisoryData from vulnerabilities.importer import Importer from vulnerabilities.importer import Reference from vulnerabilities.utils import build_description from vulnerabilities.utils import dedupe +from vulnerabilities.utils import get_cwe_id logger = logging.getLogger(__name__) @@ -77,10 +80,13 @@ def parse_advisory_data(raw_data, file, base_path) -> AdvisoryData: disc_credits = md_dict.get("## Discovery Credits") # not used disc_timeline = md_dict.get("## Disclosure Timeline") # not used references = md_dict.get("## References") or [] + cwe_data = md_dict.get("## Common Weakness Enumeration") or [] + return AdvisoryData( aliases=get_aliases(database_id, cve_ref), summary=build_description(" ".join(summary), " ".join(description)), references=get_references(references), + weaknesses=get_weaknesses(cwe_data), url=advisory_url, ) @@ -140,3 +146,31 @@ def md_list_to_dict(md_list): else: md_dict[md_key].append(md_line) return md_dict + + +def get_weaknesses(cwe_data): + """ + Return the list of CWE IDs as integers from a list of weakness summaries, e.g., [379]. + Extract the CWE strings from a list of weakness descriptions, + e.g., ["CWE-379: Creation of Temporary File in Directory with Insecure Permissions"], to obtain CWE IDs like CWE-379. + Remove the "CWE-" prefix from each CWE string and convert it to an integer (e.g., 379). + Then, check if the CWE ID exists in the CWE database. + """ + cwe_list = [] + for line in cwe_data: + cwe_ids = re.findall(r"CWE-\d+", line) + cwe_list.extend(cwe_ids) + + weaknesses = [] + db = Database() + + for cwe_string in cwe_list: + + if cwe_string: + cwe_id = get_cwe_id(cwe_string) + try: + db.get(cwe_id) + weaknesses.append(cwe_id) + except Exception: + logger.error("Invalid CWE id") + return weaknesses diff --git a/vulnerabilities/tests/test_debian.py b/vulnerabilities/tests/test_debian.py index ad21ef92a..35320ac84 100644 --- a/vulnerabilities/tests/test_debian.py +++ b/vulnerabilities/tests/test_debian.py @@ -6,13 +6,14 @@ # See https://github.com/nexB/vulnerablecode for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # - import json import os +import re from unittest.mock import patch from vulnerabilities.importer import AdvisoryData from vulnerabilities.importers.debian import DebianImporter +from vulnerabilities.importers.debian import get_cwe_from_debian_advisory from vulnerabilities.improvers.default import DefaultImprover from vulnerabilities.improvers.valid_versions import DebianBasicImprover from vulnerabilities.tests import util_tests @@ -55,3 +56,38 @@ def test_debian_improver(mock_response): result.extend(inference) expected_file = os.path.join(TEST_DATA, f"debian-improver-expected.json") util_tests.check_results_against_json(result, expected_file) + + +def test_get_cwe_from_debian_advisories(): + record = { + "description": "Legion of the Bouncy Castle Legion of the Bouncy Castle Java Cryptography APIs 1.58 up to but not including 1.60 contains a CWE-580: Use of Externally-Controlled Input to Select Classes or Code ('Unsafe Reflection') vulnerability in XMSS/XMSS^MT private key deserialization that can result in Deserializing an XMSS/XMSS^MT private key can result in the execution of unexpected code. This attack appear to be exploitable via A handcrafted private key can include references to unexpected classes which will be picked up from the class path for the executing application. This vulnerability appears to have been fixed in 1.60 and later.", + "scope": "local", + "releases": { + "bookworm": { + "status": "resolved", + "repositories": {"bookworm": "1.72-2"}, + "fixed_version": "1.60-1", + "urgency": "low", + }, + "bullseye": { + "status": "resolved", + "repositories": {"bullseye": "1.68-2"}, + "fixed_version": "1.60-1", + "urgency": "low", + }, + "sid": { + "status": "resolved", + "repositories": {"sid": "1.77-1"}, + "fixed_version": "1.60-1", + "urgency": "low", + }, + "trixie": { + "status": "resolved", + "repositories": {"trixie": "1.77-1"}, + "fixed_version": "1.60-1", + "urgency": "low", + }, + }, + } + result = get_cwe_from_debian_advisory(record) + assert result == [580] diff --git a/vulnerabilities/tests/test_fireeye.py b/vulnerabilities/tests/test_fireeye.py index 15935728c..94da186d9 100644 --- a/vulnerabilities/tests/test_fireeye.py +++ b/vulnerabilities/tests/test_fireeye.py @@ -13,6 +13,7 @@ from vulnerabilities.importer import Reference from vulnerabilities.importers.fireeye import get_aliases from vulnerabilities.importers.fireeye import get_references +from vulnerabilities.importers.fireeye import get_weaknesses from vulnerabilities.importers.fireeye import md_list_to_dict from vulnerabilities.importers.fireeye import parse_advisory_data from vulnerabilities.tests import util_tests @@ -217,3 +218,20 @@ def test_md_list_to_dict_2(self): md_list = f.readlines() md_dict = md_list_to_dict(md_list) assert md_dict == expected_output + + def test_get_weaknesses(self): + assert get_weaknesses( + [ + "CWE-379: Creation of Temporary File in Directory with Insecure Permissions", + "CWE-362: Concurrent Execution using Shared Resource with Improper Synchronization ('Race Condition')", + ] + ) == [379, 362] + + assert ( + get_weaknesses( + [ + "CWE-2345: This cwe id does not exist so it should generate Invalid CWE id error and return empty list." + ] + ) + == [] + ) From 131f37c35b3a29e76b57298220957e8bcad81b2f Mon Sep 17 00:00:00 2001 From: ambuj Date: Sun, 15 Sep 2024 19:11:02 +0530 Subject: [PATCH 002/545] Refactor get_weaknesses function in apache_httpd importer Signed-off-by: ambuj --- vulnerabilities/importers/apache_httpd.py | 163 +++++++++++------- .../CVE-2021-44224-apache-httpd-expected.json | 2 +- .../CVE-2022-28614-apache-httpd-expected.json | 2 +- .../apache-httpd-improver-expected.json | 4 +- 4 files changed, 105 insertions(+), 66 deletions(-) diff --git a/vulnerabilities/importers/apache_httpd.py b/vulnerabilities/importers/apache_httpd.py index 75e8c8aae..3914f4aae 100644 --- a/vulnerabilities/importers/apache_httpd.py +++ b/vulnerabilities/importers/apache_httpd.py @@ -161,67 +161,106 @@ def fetch_links(url): def get_weaknesses(cve_data): - """ - Extract CWE IDs from CVE data. - - Args: - cve_data (dict): The CVE data in a dictionary format. - - Returns: - List[int]: A list of unique CWE IDs. - - >>> mock_cve_data = { - ... "containers": { - ... "cna": { - ... "providerMetadata": { - ... "orgId": "f0158376-9dc2-43b6-827c-5f631a4d8d09" - ... }, - ... "title": "mod_macro buffer over-read", - ... "problemTypes": [ - ... { - ... "descriptions": [ - ... { - ... "description": "CWE-125 Out-of-bounds Read", - ... "lang": "en", - ... "cweId": "CWE-125", - ... "type": "CWE" - ... } - ... ] - ... } - ... ] - ... } - ... } - ... } - >>> get_weaknesses(mock_cve_data) - [125] - """ - problem_types = cve_data.get("containers", {}).get("cna", {}).get("problemTypes", []) - descriptions = problem_types[0].get("descriptions", []) if len(problem_types) > 0 else [] - cwe_string = descriptions[0].get("cweId", "") if len(descriptions) > 0 else "" - cwe_pattern = r"CWE-\d+" - description = descriptions[0].get("description", "") if len(descriptions) > 0 else "" - matches = re.findall(cwe_pattern, description) + # """ + # Extract CWE IDs from CVE data. + + # Args: + # cve_data (dict): The CVE data in a dictionary format. + + # Returns: + # List[int]: A list of unique CWE IDs. + + # Examples: + # >>> mock_cve_data1 = { + # ... "containers": { + # ... "cna": { + # ... "providerMetadata": { + # ... "orgId": "f0158376-9dc2-43b6-827c-5f631a4d8d09" + # ... }, + # ... "title": "mod_macro buffer over-read", + # ... "problemTypes": [ + # ... { + # ... "descriptions": [ + # ... { + # ... "description": "CWE-125 Out-of-bounds Read", + # ... "lang": "en", + # ... "cweId": "CWE-125", + # ... "type": "CWE" + # ... } + # ... ] + # ... } + # ... ] + # ... } + # ... } + # ... } + # >>> mock_cve_data2 = { + # ... "data_type": "CVE", + # ... "data_format": "MITRE", + # ... "data_version": "4.0", + # ... "generator": { + # ... "engine": "Vulnogram 0.0.9" + # ... }, + # ... "CVE_data_meta": { + # ... "ID": "CVE-2022-28614", + # ... "ASSIGNER": "security@apache.org", + # ... "TITLE": "read beyond bounds via ap_rwrite() ", + # ... "STATE": "PUBLIC" + # ... }, + # ... "problemtype": { + # ... "problemtype_data": [ + # ... { + # ... "description": [ + # ... { + # ... "lang": "eng", + # ... "value": "CWE-190 Integer Overflow or Wraparound" + # ... } + # ... ] + # ... }, + # ... { + # ... "description": [ + # ... { + # ... "lang": "eng", + # ... "value": "CWE-200 Exposure of Sensitive Information to an Unauthorized Actor" + # ... } + # ... ] + # ... } + # ... ] + # ... } + # ... } + + # >>> get_weaknesses(mock_cve_data1) + # [125] + + # >>> get_weaknesses(mock_cve_data2) + # [190, 200] + # """ + + alias = get_item(cve_data, "CVE_data_meta", "ID") + cwe_id = [] db = Database() + if alias: + problemtype_data = get_item(cve_data, "problemtype", "problemtype_data") or [] + for problem in problemtype_data: + for desc in problem["description"]: + value = desc.get("value", "") + cwe_pattern = r"CWE-\d+" + cwe_id_string_list = re.findall(cwe_pattern, value) + for cwe_id_string in cwe_id_string_list: + cwe_id.append(get_cwe_id(cwe_id_string)) + + else: + problemTypes = cve_data.get("containers", {}).get("cna", {}).get("problemTypes", []) + descriptions = problemTypes[0].get("descriptions", []) if len(problemTypes) > 0 else [] + for description in descriptions: + cwe_id_string = description.get("cweId", "") + cwe_id.append(get_cwe_id(cwe_id_string)) + weaknesses = [] - cwe_string_from_description = "" - if matches: - cwe_string_from_description = matches[0] - if cwe_string or cwe_string_from_description: - if cwe_string: - cwe_id = get_cwe_id(cwe_string) - try: - db.get(cwe_id) - weaknesses.append(cwe_id) - except Exception: - logger.error("Invalid CWE id") - elif cwe_string_from_description: - cwe_id = get_cwe_id(cwe_string_from_description) - try: - db.get(cwe_id) - weaknesses.append(cwe_id) - except Exception: - logger.error("Invalid CWE id") - - seen = set() - unique_cwe = [x for x in weaknesses if not (x in seen or seen.add(x))] - return unique_cwe + for cwe in cwe_id: + try: + db.get(cwe) + weaknesses.append(cwe) + except Exception: + logger.error("Invalid CWE id") + + return weaknesses diff --git a/vulnerabilities/tests/test_data/apache_httpd/CVE-2021-44224-apache-httpd-expected.json b/vulnerabilities/tests/test_data/apache_httpd/CVE-2021-44224-apache-httpd-expected.json index 6f8dee434..60385bae4 100644 --- a/vulnerabilities/tests/test_data/apache_httpd/CVE-2021-44224-apache-httpd-expected.json +++ b/vulnerabilities/tests/test_data/apache_httpd/CVE-2021-44224-apache-httpd-expected.json @@ -32,6 +32,6 @@ } ], "date_published": null, - "weaknesses": [], + "weaknesses": [476], "url": "https://httpd.apache.org/security/json/CVE-2021-44224.json" } \ No newline at end of file diff --git a/vulnerabilities/tests/test_data/apache_httpd/CVE-2022-28614-apache-httpd-expected.json b/vulnerabilities/tests/test_data/apache_httpd/CVE-2022-28614-apache-httpd-expected.json index dfbddbc8b..781a9a84b 100644 --- a/vulnerabilities/tests/test_data/apache_httpd/CVE-2022-28614-apache-httpd-expected.json +++ b/vulnerabilities/tests/test_data/apache_httpd/CVE-2022-28614-apache-httpd-expected.json @@ -32,6 +32,6 @@ } ], "date_published": null, - "weaknesses": [], + "weaknesses": [190, 200], "url": "https://httpd.apache.org/security/json/CVE-2022-28614.json" } \ No newline at end of file diff --git a/vulnerabilities/tests/test_data/apache_httpd/apache-httpd-improver-expected.json b/vulnerabilities/tests/test_data/apache_httpd/apache-httpd-improver-expected.json index 2aeb3dd70..5c46fe2e6 100644 --- a/vulnerabilities/tests/test_data/apache_httpd/apache-httpd-improver-expected.json +++ b/vulnerabilities/tests/test_data/apache_httpd/apache-httpd-improver-expected.json @@ -54,7 +54,7 @@ ] } ], - "weaknesses": [] + "weaknesses": [476] }, { "vulnerability_id": null, @@ -103,6 +103,6 @@ ] } ], - "weaknesses": [] + "weaknesses": [476] } ] \ No newline at end of file From f079171aad4fab9085ae1e285bbb20bbe94a1be4 Mon Sep 17 00:00:00 2001 From: ambuj Date: Tue, 8 Oct 2024 23:57:08 +0530 Subject: [PATCH 003/545] add docstring in fireeye Signed-off-by: ambuj --- vulnerabilities/importers/apache_httpd.py | 136 +++++++++++----------- vulnerabilities/importers/fireeye.py | 10 +- 2 files changed, 74 insertions(+), 72 deletions(-) diff --git a/vulnerabilities/importers/apache_httpd.py b/vulnerabilities/importers/apache_httpd.py index 3914f4aae..b6501daad 100644 --- a/vulnerabilities/importers/apache_httpd.py +++ b/vulnerabilities/importers/apache_httpd.py @@ -161,79 +161,79 @@ def fetch_links(url): def get_weaknesses(cve_data): - # """ - # Extract CWE IDs from CVE data. + """ + Extract CWE IDs from CVE data. - # Args: - # cve_data (dict): The CVE data in a dictionary format. + Args: + cve_data (dict): The CVE data in a dictionary format. - # Returns: - # List[int]: A list of unique CWE IDs. + Returns: + List[int]: A list of unique CWE IDs. - # Examples: - # >>> mock_cve_data1 = { - # ... "containers": { - # ... "cna": { - # ... "providerMetadata": { - # ... "orgId": "f0158376-9dc2-43b6-827c-5f631a4d8d09" - # ... }, - # ... "title": "mod_macro buffer over-read", - # ... "problemTypes": [ - # ... { - # ... "descriptions": [ - # ... { - # ... "description": "CWE-125 Out-of-bounds Read", - # ... "lang": "en", - # ... "cweId": "CWE-125", - # ... "type": "CWE" - # ... } - # ... ] - # ... } - # ... ] - # ... } - # ... } - # ... } - # >>> mock_cve_data2 = { - # ... "data_type": "CVE", - # ... "data_format": "MITRE", - # ... "data_version": "4.0", - # ... "generator": { - # ... "engine": "Vulnogram 0.0.9" - # ... }, - # ... "CVE_data_meta": { - # ... "ID": "CVE-2022-28614", - # ... "ASSIGNER": "security@apache.org", - # ... "TITLE": "read beyond bounds via ap_rwrite() ", - # ... "STATE": "PUBLIC" - # ... }, - # ... "problemtype": { - # ... "problemtype_data": [ - # ... { - # ... "description": [ - # ... { - # ... "lang": "eng", - # ... "value": "CWE-190 Integer Overflow or Wraparound" - # ... } - # ... ] - # ... }, - # ... { - # ... "description": [ - # ... { - # ... "lang": "eng", - # ... "value": "CWE-200 Exposure of Sensitive Information to an Unauthorized Actor" - # ... } - # ... ] - # ... } - # ... ] - # ... } - # ... } + Examples: + >>> mock_cve_data1 = { + ... "containers": { + ... "cna": { + ... "providerMetadata": { + ... "orgId": "f0158376-9dc2-43b6-827c-5f631a4d8d09" + ... }, + ... "title": "mod_macro buffer over-read", + ... "problemTypes": [ + ... { + ... "descriptions": [ + ... { + ... "description": "CWE-125 Out-of-bounds Read", + ... "lang": "en", + ... "cweId": "CWE-125", + ... "type": "CWE" + ... } + ... ] + ... } + ... ] + ... } + ... } + ... } + >>> mock_cve_data2 = { + ... "data_type": "CVE", + ... "data_format": "MITRE", + ... "data_version": "4.0", + ... "generator": { + ... "engine": "Vulnogram 0.0.9" + ... }, + ... "CVE_data_meta": { + ... "ID": "CVE-2022-28614", + ... "ASSIGNER": "security@apache.org", + ... "TITLE": "read beyond bounds via ap_rwrite() ", + ... "STATE": "PUBLIC" + ... }, + ... "problemtype": { + ... "problemtype_data": [ + ... { + ... "description": [ + ... { + ... "lang": "eng", + ... "value": "CWE-190 Integer Overflow or Wraparound" + ... } + ... ] + ... }, + ... { + ... "description": [ + ... { + ... "lang": "eng", + ... "value": "CWE-200 Exposure of Sensitive Information to an Unauthorized Actor" + ... } + ... ] + ... } + ... ] + ... } + ... } - # >>> get_weaknesses(mock_cve_data1) - # [125] + >>> get_weaknesses(mock_cve_data1) + [125] - # >>> get_weaknesses(mock_cve_data2) - # [190, 200] - # """ + >>> get_weaknesses(mock_cve_data2) + [190, 200] + """ alias = get_item(cve_data, "CVE_data_meta", "ID") cwe_id = [] diff --git a/vulnerabilities/importers/fireeye.py b/vulnerabilities/importers/fireeye.py index de9d8fddd..69ce84176 100644 --- a/vulnerabilities/importers/fireeye.py +++ b/vulnerabilities/importers/fireeye.py @@ -151,10 +151,12 @@ def md_list_to_dict(md_list): def get_weaknesses(cwe_data): """ Return the list of CWE IDs as integers from a list of weakness summaries, e.g., [379]. - Extract the CWE strings from a list of weakness descriptions, - e.g., ["CWE-379: Creation of Temporary File in Directory with Insecure Permissions"], to obtain CWE IDs like CWE-379. - Remove the "CWE-" prefix from each CWE string and convert it to an integer (e.g., 379). - Then, check if the CWE ID exists in the CWE database. + + >>> get_weaknesses([ + ... "CWE-379: Creation of Temporary File in Directory with Insecure Permissions", + ... "CWE-362: Concurrent Execution using Shared Resource with Improper Synchronization ('Race Condition')" + ... ]) + [379, 362] """ cwe_list = [] for line in cwe_data: From 530cb52e124c6b1e9765d269d7b233c94bb2f42f Mon Sep 17 00:00:00 2001 From: ziadhany Date: Sat, 9 Nov 2024 02:28:00 +0200 Subject: [PATCH 004/545] Add exploitability and weighted_severity fields to the Vulnerability model. Create a pipeline for vulnerability risk assessment. Signed-off-by: ziad hany --- vulnerabilities/api.py | 3 + ...7_vulnerability_exploitability_and_more.py | 33 +++++++++++ vulnerabilities/models.py | 29 ++++++++++ .../pipelines/compute_package_risk.py | 56 ++++++++++++++++++- vulnerabilities/risk.py | 30 +++++++--- .../templates/vulnerability_details.html | 32 +++++++++++ .../pipelines/test_compute_package_risk.py | 3 +- vulnerabilities/tests/test_api.py | 6 ++ vulnerabilities/tests/test_risk.py | 3 +- 9 files changed, 183 insertions(+), 12 deletions(-) create mode 100644 vulnerabilities/migrations/0077_vulnerability_exploitability_and_more.py diff --git a/vulnerabilities/api.py b/vulnerabilities/api.py index b8bb703a6..cb8e68976 100644 --- a/vulnerabilities/api.py +++ b/vulnerabilities/api.py @@ -251,6 +251,9 @@ class Meta: "weaknesses", "exploits", "severity_range_score", + "exploitability", + "weighted_severity", + "risk_score", ] diff --git a/vulnerabilities/migrations/0077_vulnerability_exploitability_and_more.py b/vulnerabilities/migrations/0077_vulnerability_exploitability_and_more.py new file mode 100644 index 000000000..fccaafa0b --- /dev/null +++ b/vulnerabilities/migrations/0077_vulnerability_exploitability_and_more.py @@ -0,0 +1,33 @@ +# Generated by Django 4.2.16 on 2024-11-08 14:07 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("vulnerabilities", "0076_alter_packagechangelog_software_version_and_more"), + ] + + operations = [ + migrations.AddField( + model_name="vulnerability", + name="exploitability", + field=models.DecimalField( + decimal_places=2, + help_text="Exploitability refers to the potential or probability of a software package vulnerability being \n exploited by malicious actors to compromise systems, applications, or networks. \n It is determined automatically by the discovery of exploits.", + max_digits=4, + null=True, + ), + ), + migrations.AddField( + model_name="vulnerability", + name="weighted_severity", + field=models.DecimalField( + decimal_places=2, + help_text="Weighted Severity is the maximum value obtained when each Severity is multiplied by its associated Weight/10.", + max_digits=4, + null=True, + ), + ), + ] diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index b95a07297..9eb466d71 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -202,6 +202,35 @@ class Vulnerability(models.Model): choices=VulnerabilityStatusType.choices, default=VulnerabilityStatusType.PUBLISHED ) + exploitability = models.DecimalField( + null=True, + max_digits=4, + decimal_places=2, + help_text="""Exploitability refers to the potential or probability of a software package vulnerability being + exploited by malicious actors to compromise systems, applications, or networks. + It is determined automatically by the discovery of exploits.""", + ) + + weighted_severity = models.DecimalField( + null=True, + max_digits=4, + decimal_places=2, + help_text="Weighted Severity is the maximum value obtained when each Severity is multiplied by its associated Weight/10.", + ) + + @property + def risk_score(self): + """ + Risk expressed as a number ranging from 0 to 10. + Risk is calculated from weighted severity and exploitability values. + It is the maximum value of (the weighted severity multiplied by its exploitability) or 10 + + Risk = min(weighted severity * exploitability, 10) + """ + if self.exploitability is not None and self.weighted_severity is not None: + return f"{min(float(self.exploitability) * float(self.weighted_severity), 10.0):.2f}" + return None + objects = VulnerabilityQuerySet.as_manager() class Meta: diff --git a/vulnerabilities/pipelines/compute_package_risk.py b/vulnerabilities/pipelines/compute_package_risk.py index e5b48ea0e..7ac179689 100644 --- a/vulnerabilities/pipelines/compute_package_risk.py +++ b/vulnerabilities/pipelines/compute_package_risk.py @@ -9,9 +9,12 @@ from aboutcode.pipeline import LoopProgress +from vulnerabilities.models import AffectedByPackageRelatedVulnerability from vulnerabilities.models import Package +from vulnerabilities.models import Vulnerability from vulnerabilities.pipelines import VulnerableCodePipeline from vulnerabilities.risk import compute_package_risk +from vulnerabilities.risk import compute_vulnerability_risk class ComputePackageRiskPipeline(VulnerableCodePipeline): @@ -26,7 +29,44 @@ class ComputePackageRiskPipeline(VulnerableCodePipeline): @classmethod def steps(cls): - return (cls.add_package_risk_score,) + return (cls.add_vulnerability_risk_score, cls.add_package_risk_score) + + def add_vulnerability_risk_score(self): + affected_vulnerabilities = Vulnerability.objects.filter( + affectedbypackagerelatedvulnerability__isnull=False + ) + + self.log( + f"Calculating risk for {affected_vulnerabilities.count():,d} vulnerability with a affected packages records" + ) + + progress = LoopProgress(total_iterations=affected_vulnerabilities.count(), logger=self.log) + + updatables = [] + updated_vulnerability_count = 0 + batch_size = 5000 + + for vulnerability in progress.iter(affected_vulnerabilities.paginated()): + + vulnerability = compute_vulnerability_risk(vulnerability) + + if not vulnerability: + continue + + updatables.append(vulnerability) + + if len(updatables) >= batch_size: + updated_vulnerability_count += bulk_update_vulnerability_risk_score( + vulnerabilities=updatables, + logger=self.log, + ) + updated_vulnerability_count += bulk_update_vulnerability_risk_score( + vulnerabilities=updatables, + logger=self.log, + ) + self.log( + f"Successfully added risk score for {updated_vulnerability_count:,d} vulnerability" + ) def add_package_risk_score(self): affected_packages = Package.objects.filter( @@ -72,3 +112,17 @@ def bulk_update_package_risk_score(packages, logger): logger(f"Error updating packages: {e}") packages.clear() return package_count + + +def bulk_update_vulnerability_risk_score(vulnerabilities, logger): + vulnerabilities_count = 0 + if vulnerabilities: + try: + Vulnerability.objects.bulk_update( + objs=vulnerabilities, fields=["weighted_severity", "exploitability"] + ) + vulnerabilities_count += len(vulnerabilities) + except Exception as e: + logger(f"Error updating vulnerability: {e}") + vulnerabilities.clear() + return vulnerabilities_count diff --git a/vulnerabilities/risk.py b/vulnerabilities/risk.py index 9eb4ac6ec..bc06e898c 100644 --- a/vulnerabilities/risk.py +++ b/vulnerabilities/risk.py @@ -92,19 +92,31 @@ def get_exploitability_level(exploits, references, severities): def compute_vulnerability_risk(vulnerability: Vulnerability): """ - Risk may be expressed as a number ranging from 0 to 10. - Risk is calculated from weighted severity and exploitability values. - It is the maximum value of (the weighted severity multiplied by its exploitability) or 10 + Computes the risk score for a given vulnerability. - Risk = min(weighted severity * exploitability, 10) + Risk is expressed as a number ranging from 0 to 10 and is calculated based on: + - Weighted severity: a value derived from the associated severities of the vulnerability. + - Exploitability: a measure of how easily the vulnerability can be exploited. + + The risk score is computed as: + Risk = min(weighted_severity * exploitability, 10) + + Args: + vulnerability (Vulnerability): The vulnerability object to compute the risk for. + + Returns: + Vulnerability: The updated vulnerability object with computed risk-related attributes. + + Notes: + - If there are no associated references, severities, or exploits, the computation is skipped. """ references = vulnerability.references severities = vulnerability.severities.select_related("reference") exploits = Exploit.objects.filter(vulnerability=vulnerability) if references.exists() or severities.exists() or exploits.exists(): - weighted_severity = get_weighted_severity(severities) - exploitability = get_exploitability_level(exploits, references, severities) - return min(weighted_severity * exploitability, 10) + vulnerability.weighted_severity = get_weighted_severity(severities) + vulnerability.exploitability = get_exploitability_level(exploits, references, severities) + return vulnerability def compute_package_risk(package: Package): @@ -117,8 +129,8 @@ def compute_package_risk(package: Package): for pkg_related_vul in AffectedByPackageRelatedVulnerability.objects.filter( package=package ).prefetch_related("vulnerability"): - if risk := compute_vulnerability_risk(pkg_related_vul.vulnerability): - result.append(risk) + if risk := pkg_related_vul.vulnerability.risk_score: + result.append(float(risk)) if not result: return diff --git a/vulnerabilities/templates/vulnerability_details.html b/vulnerabilities/templates/vulnerability_details.html index d1f2fb6de..ed9f38a7a 100644 --- a/vulnerabilities/templates/vulnerability_details.html +++ b/vulnerabilities/templates/vulnerability_details.html @@ -121,6 +121,38 @@ Status {{ status }} + + + + Exploitability + + {{ vulnerability.exploitability }} + + + + + Weighted Severity + + {{ vulnerability.weighted_severity }} + + + + + Risk + + {{ vulnerability.risk_score }} + + + diff --git a/vulnerabilities/tests/pipelines/test_compute_package_risk.py b/vulnerabilities/tests/pipelines/test_compute_package_risk.py index 7c197e812..1e828d58f 100644 --- a/vulnerabilities/tests/pipelines/test_compute_package_risk.py +++ b/vulnerabilities/tests/pipelines/test_compute_package_risk.py @@ -6,6 +6,7 @@ # See https://github.com/aboutcode-org/vulnerablecode for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # +from decimal import Decimal import pytest @@ -30,4 +31,4 @@ def test_simple_risk_pipeline(vulnerability): improver.execute() pkg = Package.objects.get(type="pypi", name="foo", version="2.3.0") - assert str(pkg.risk_score) == str(3.11) + assert f"{pkg.risk_score:.2f}" == "3.10" diff --git a/vulnerabilities/tests/test_api.py b/vulnerabilities/tests/test_api.py index 926d3c219..c21451e78 100644 --- a/vulnerabilities/tests/test_api.py +++ b/vulnerabilities/tests/test_api.py @@ -300,6 +300,9 @@ def test_api_with_single_vulnerability(self): }, ], "exploits": [], + "risk_score": None, + "exploitability": None, + "weighted_severity": None, } def test_api_with_single_vulnerability_with_filters(self): @@ -346,6 +349,9 @@ def test_api_with_single_vulnerability_with_filters(self): }, ], "exploits": [], + "risk_score": None, + "exploitability": None, + "weighted_severity": None, } diff --git a/vulnerabilities/tests/test_risk.py b/vulnerabilities/tests/test_risk.py index 96d9f9445..5b0aa4c5f 100644 --- a/vulnerabilities/tests/test_risk.py +++ b/vulnerabilities/tests/test_risk.py @@ -170,4 +170,5 @@ def test_get_weighted_severity(vulnerability): @pytest.mark.django_db def test_compute_vulnerability_risk(vulnerability): - assert compute_vulnerability_risk(vulnerability) == 3.1050000000000004 + vulnerability = compute_vulnerability_risk(vulnerability) + assert vulnerability.risk_score == str(3.11) From 86f69275735b68540293bd3c48ec2f512d15853c Mon Sep 17 00:00:00 2001 From: ziadhany Date: Tue, 12 Nov 2024 17:50:09 +0200 Subject: [PATCH 005/545] Optimize performance, refactor, and rename the add_vulnerability_risk_score function. Rename the help text for the model. Signed-off-by: ziad hany --- ...7_vulnerability_exploitability_and_more.py | 8 ++-- vulnerabilities/models.py | 12 ++--- .../pipelines/compute_package_risk.py | 28 ++++++----- vulnerabilities/risk.py | 44 +++++++++--------- .../templates/vulnerability_details.html | 8 ++-- .../pipelines/test_compute_package_risk.py | 2 +- vulnerabilities/tests/test_risk.py | 46 +++++++++++++++++-- 7 files changed, 96 insertions(+), 52 deletions(-) diff --git a/vulnerabilities/migrations/0077_vulnerability_exploitability_and_more.py b/vulnerabilities/migrations/0077_vulnerability_exploitability_and_more.py index fccaafa0b..3a10bf4b3 100644 --- a/vulnerabilities/migrations/0077_vulnerability_exploitability_and_more.py +++ b/vulnerabilities/migrations/0077_vulnerability_exploitability_and_more.py @@ -1,4 +1,4 @@ -# Generated by Django 4.2.16 on 2024-11-08 14:07 +# Generated by Django 4.2.16 on 2024-11-12 12:16 from django.db import migrations, models @@ -15,7 +15,9 @@ class Migration(migrations.Migration): name="exploitability", field=models.DecimalField( decimal_places=2, - help_text="Exploitability refers to the potential or probability of a software package vulnerability being \n exploited by malicious actors to compromise systems, applications, or networks. \n It is determined automatically by the discovery of exploits.", + help_text="""Exploitability indicates the likelihood that a vulnerability in a software package could + be used by malicious actors to compromise systems, applications, or networks. + This metric is determined automatically based on the discovery of known exploits.""", max_digits=4, null=True, ), @@ -25,7 +27,7 @@ class Migration(migrations.Migration): name="weighted_severity", field=models.DecimalField( decimal_places=2, - help_text="Weighted Severity is the maximum value obtained when each Severity is multiplied by its associated Weight/10.", + help_text="Weighted severity is the highest value calculated by multiplying each severity by its corresponding weight, divided by 10.", max_digits=4, null=True, ), diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index 9eb466d71..689a7c992 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -206,16 +206,16 @@ class Vulnerability(models.Model): null=True, max_digits=4, decimal_places=2, - help_text="""Exploitability refers to the potential or probability of a software package vulnerability being - exploited by malicious actors to compromise systems, applications, or networks. - It is determined automatically by the discovery of exploits.""", + help_text=""""Exploitability indicates the likelihood that a vulnerability in a software package could be used + by malicious actors to compromise systems, applications, or networks. + This metric is determined automatically based on the discovery of known exploits.""", ) weighted_severity = models.DecimalField( null=True, max_digits=4, decimal_places=2, - help_text="Weighted Severity is the maximum value obtained when each Severity is multiplied by its associated Weight/10.", + help_text="Weighted severity is the highest value calculated by multiplying each severity by its corresponding weight, divided by 10.", ) @property @@ -228,8 +228,8 @@ def risk_score(self): Risk = min(weighted severity * exploitability, 10) """ if self.exploitability is not None and self.weighted_severity is not None: - return f"{min(float(self.exploitability) * float(self.weighted_severity), 10.0):.2f}" - return None + risk_score = min(float(self.exploitability) * float(self.weighted_severity), 10.0) + return f"{risk_score:.2f}".rstrip("0").rstrip(".") objects = VulnerabilityQuerySet.as_manager() diff --git a/vulnerabilities/pipelines/compute_package_risk.py b/vulnerabilities/pipelines/compute_package_risk.py index 7ac179689..57409a22e 100644 --- a/vulnerabilities/pipelines/compute_package_risk.py +++ b/vulnerabilities/pipelines/compute_package_risk.py @@ -9,12 +9,11 @@ from aboutcode.pipeline import LoopProgress -from vulnerabilities.models import AffectedByPackageRelatedVulnerability from vulnerabilities.models import Package from vulnerabilities.models import Vulnerability from vulnerabilities.pipelines import VulnerableCodePipeline from vulnerabilities.risk import compute_package_risk -from vulnerabilities.risk import compute_vulnerability_risk +from vulnerabilities.risk import compute_vulnerability_risk_factors class ComputePackageRiskPipeline(VulnerableCodePipeline): @@ -29,11 +28,16 @@ class ComputePackageRiskPipeline(VulnerableCodePipeline): @classmethod def steps(cls): - return (cls.add_vulnerability_risk_score, cls.add_package_risk_score) + return ( + cls.compute_and_store_vulnerability_risk_score, + cls.compute_and_store_package_risk_score, + ) - def add_vulnerability_risk_score(self): - affected_vulnerabilities = Vulnerability.objects.filter( - affectedbypackagerelatedvulnerability__isnull=False + def compute_and_store_vulnerability_risk_score(self): + affected_vulnerabilities = ( + Vulnerability.objects.filter(affectedbypackagerelatedvulnerability__isnull=False) + .prefetch_related("references") + .only("references", "exploits") ) self.log( @@ -47,11 +51,13 @@ def add_vulnerability_risk_score(self): batch_size = 5000 for vulnerability in progress.iter(affected_vulnerabilities.paginated()): + references = vulnerability.references + severities = vulnerability.severities.select_related("reference") - vulnerability = compute_vulnerability_risk(vulnerability) - - if not vulnerability: - continue + ( + vulnerability.weighted_severity, + vulnerability.exploitability, + ) = compute_vulnerability_risk_factors(references, severities, vulnerability.exploits) updatables.append(vulnerability) @@ -68,7 +74,7 @@ def add_vulnerability_risk_score(self): f"Successfully added risk score for {updated_vulnerability_count:,d} vulnerability" ) - def add_package_risk_score(self): + def compute_and_store_package_risk_score(self): affected_packages = Package.objects.filter( affected_by_vulnerabilities__isnull=False ).distinct() diff --git a/vulnerabilities/risk.py b/vulnerabilities/risk.py index bc06e898c..d733f12c6 100644 --- a/vulnerabilities/risk.py +++ b/vulnerabilities/risk.py @@ -10,6 +10,8 @@ from urllib.parse import urlparse +from django.db.models import Prefetch + from vulnerabilities.models import AffectedByPackageRelatedVulnerability from vulnerabilities.models import Exploit from vulnerabilities.models import Package @@ -27,6 +29,8 @@ def get_weighted_severity(severities): by its associated Weight/10. Example of Weighted Severity: max(7*(10/10), 8*(3/10), 6*(8/10)) = 7 """ + if not severities: + return 0 score_map = { "low": 3, @@ -90,33 +94,21 @@ def get_exploitability_level(exploits, references, severities): return exploit_level -def compute_vulnerability_risk(vulnerability: Vulnerability): +def compute_vulnerability_risk_factors(references, severities, exploits): """ - Computes the risk score for a given vulnerability. - - Risk is expressed as a number ranging from 0 to 10 and is calculated based on: - - Weighted severity: a value derived from the associated severities of the vulnerability. - - Exploitability: a measure of how easily the vulnerability can be exploited. - - The risk score is computed as: - Risk = min(weighted_severity * exploitability, 10) + Compute weighted severity and exploitability for a vulnerability. Args: - vulnerability (Vulnerability): The vulnerability object to compute the risk for. + references (list): References linked to the vulnerability. + severities (list): Severity levels of the vulnerability. + exploits (list): Exploit details for the vulnerability. Returns: - Vulnerability: The updated vulnerability object with computed risk-related attributes. - - Notes: - - If there are no associated references, severities, or exploits, the computation is skipped. + tuple: (weighted_severity, exploitability). """ - references = vulnerability.references - severities = vulnerability.severities.select_related("reference") - exploits = Exploit.objects.filter(vulnerability=vulnerability) - if references.exists() or severities.exists() or exploits.exists(): - vulnerability.weighted_severity = get_weighted_severity(severities) - vulnerability.exploitability = get_exploitability_level(exploits, references, severities) - return vulnerability + weighted_severity = get_weighted_severity(severities) + exploitability = get_exploitability_level(exploits, references, severities) + return weighted_severity, exploitability def compute_package_risk(package: Package): @@ -126,9 +118,15 @@ def compute_package_risk(package: Package): """ result = [] - for pkg_related_vul in AffectedByPackageRelatedVulnerability.objects.filter( + affected_pkg_related_vul = AffectedByPackageRelatedVulnerability.objects.filter( package=package - ).prefetch_related("vulnerability"): + ).prefetch_related( + Prefetch( + "vulnerability", + queryset=Vulnerability.objects.only("weighted_severity", "exploitability"), + ) + ) + for pkg_related_vul in affected_pkg_related_vul: if risk := pkg_related_vul.vulnerability.risk_score: result.append(float(risk)) diff --git a/vulnerabilities/templates/vulnerability_details.html b/vulnerabilities/templates/vulnerability_details.html index ed9f38a7a..03dd61749 100644 --- a/vulnerabilities/templates/vulnerability_details.html +++ b/vulnerabilities/templates/vulnerability_details.html @@ -124,9 +124,9 @@ + data-tooltip="Exploitability indicates the likelihood that a vulnerability in a software package could be used + by malicious actors to compromise systems, applications, or networks. + This metric is determined automatically based on the discovery of known exploits."> Exploitability {{ vulnerability.exploitability }} @@ -135,7 +135,7 @@ Weighted Severity {{ vulnerability.weighted_severity }} diff --git a/vulnerabilities/tests/pipelines/test_compute_package_risk.py b/vulnerabilities/tests/pipelines/test_compute_package_risk.py index 1e828d58f..b8608cbc0 100644 --- a/vulnerabilities/tests/pipelines/test_compute_package_risk.py +++ b/vulnerabilities/tests/pipelines/test_compute_package_risk.py @@ -31,4 +31,4 @@ def test_simple_risk_pipeline(vulnerability): improver.execute() pkg = Package.objects.get(type="pypi", name="foo", version="2.3.0") - assert f"{pkg.risk_score:.2f}" == "3.10" + assert pkg.risk_score == Decimal("10") diff --git a/vulnerabilities/tests/test_risk.py b/vulnerabilities/tests/test_risk.py index 5b0aa4c5f..8155a11e8 100644 --- a/vulnerabilities/tests/test_risk.py +++ b/vulnerabilities/tests/test_risk.py @@ -15,7 +15,7 @@ from vulnerabilities.models import VulnerabilityRelatedReference from vulnerabilities.models import VulnerabilitySeverity from vulnerabilities.models import Weakness -from vulnerabilities.risk import compute_vulnerability_risk +from vulnerabilities.risk import compute_vulnerability_risk_factors from vulnerabilities.risk import get_exploitability_level from vulnerabilities.risk import get_weighted_severity from vulnerabilities.severity_systems import CVSSV3 @@ -169,6 +169,44 @@ def test_get_weighted_severity(vulnerability): @pytest.mark.django_db -def test_compute_vulnerability_risk(vulnerability): - vulnerability = compute_vulnerability_risk(vulnerability) - assert vulnerability.risk_score == str(3.11) +def test_compute_vulnerability_risk_factors(vulnerability): + assert compute_vulnerability_risk_factors( + vulnerability.references, vulnerability.severities, vulnerability.exploits + ) == (6.210000000000001, 2) + assert compute_vulnerability_risk_factors( + vulnerability.references, vulnerability.severities, None + ) == ( + 6.210000000000001, + 0.5, + ) + assert compute_vulnerability_risk_factors( + vulnerability.references, None, vulnerability.exploits + ) == ( + 0, + 2, + ) + assert compute_vulnerability_risk_factors(None, None, None) == (0, 0.5) + + +@pytest.mark.django_db +def test_get_vulnerability_risk_score(vulnerability): + vulnerability.weighted_severity = 6.0 + vulnerability.exploitability = 2 + + assert vulnerability.risk_score == "10" # max risk_score can be reached + + vulnerability.weighted_severity = 6 + vulnerability.exploitability = 0.5 + assert vulnerability.risk_score == "3" + + vulnerability.weighted_severity = 5.6 + vulnerability.exploitability = 0.5 + assert vulnerability.risk_score == "2.8" + + vulnerability.weighted_severity = None + vulnerability.exploitability = 0.5 + assert vulnerability.risk_score is None + + vulnerability.weighted_severity = None + vulnerability.exploitability = None + assert vulnerability.risk_score is None From 405bf864f2424ca82cf8c1444005241ad189c226 Mon Sep 17 00:00:00 2001 From: ziadhany Date: Tue, 12 Nov 2024 18:13:59 +0200 Subject: [PATCH 006/545] Resolve migration conflict Signed-off-by: ziad hany --- ..._more.py => 0078_vulnerability_exploitability_and_more.py} | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) rename vulnerabilities/migrations/{0077_vulnerability_exploitability_and_more.py => 0078_vulnerability_exploitability_and_more.py} (91%) diff --git a/vulnerabilities/migrations/0077_vulnerability_exploitability_and_more.py b/vulnerabilities/migrations/0078_vulnerability_exploitability_and_more.py similarity index 91% rename from vulnerabilities/migrations/0077_vulnerability_exploitability_and_more.py rename to vulnerabilities/migrations/0078_vulnerability_exploitability_and_more.py index 3a10bf4b3..06311036d 100644 --- a/vulnerabilities/migrations/0077_vulnerability_exploitability_and_more.py +++ b/vulnerabilities/migrations/0078_vulnerability_exploitability_and_more.py @@ -1,4 +1,4 @@ -# Generated by Django 4.2.16 on 2024-11-12 12:16 +# Generated by Django 4.2.16 on 2024-11-12 16:03 from django.db import migrations, models @@ -6,7 +6,7 @@ class Migration(migrations.Migration): dependencies = [ - ("vulnerabilities", "0076_alter_packagechangelog_software_version_and_more"), + ("vulnerabilities", "0077_alter_packagechangelog_software_version_and_more"), ] operations = [ From ec5b972a825f787de83964c0ecd36b8a2d55d6a0 Mon Sep 17 00:00:00 2001 From: ziadhany Date: Sat, 16 Nov 2024 22:54:25 +0200 Subject: [PATCH 007/545] Resolve migration conflict & add weighted_severity, exploitability to api_v2 Signed-off-by: ziad hany --- vulnerabilities/api_v2.py | 6 +++ ...8_vulnerability_exploitability_and_more.py | 35 --------------- ...2_vulnerability_exploitability_and_more.py | 43 +++++++++++++++++++ vulnerabilities/models.py | 32 +++++++++++++- .../pipelines/compute_package_risk.py | 26 ++++------- vulnerabilities/risk.py | 20 ++++----- vulnerabilities/tests/test_risk.py | 30 ++++++------- 7 files changed, 109 insertions(+), 83 deletions(-) delete mode 100644 vulnerabilities/migrations/0078_vulnerability_exploitability_and_more.py create mode 100644 vulnerabilities/migrations/0082_vulnerability_exploitability_and_more.py diff --git a/vulnerabilities/api_v2.py b/vulnerabilities/api_v2.py index b0a3fa125..58771c916 100644 --- a/vulnerabilities/api_v2.py +++ b/vulnerabilities/api_v2.py @@ -67,6 +67,9 @@ class VulnerabilityV2Serializer(serializers.ModelSerializer): weaknesses = WeaknessV2Serializer(many=True) references = VulnerabilityReferenceV2Serializer(many=True, source="vulnerabilityreference_set") severities = VulnerabilitySeverityV2Serializer(many=True) + exploitability = serializers.FloatField(read_only=True) + weighted_severity = serializers.FloatField(read_only=True) + risk_score = serializers.FloatField(read_only=True) class Meta: model = Vulnerability @@ -77,6 +80,9 @@ class Meta: "severities", "weaknesses", "references", + "exploitability", + "weighted_severity", + "risk_score", ] def get_aliases(self, obj): diff --git a/vulnerabilities/migrations/0078_vulnerability_exploitability_and_more.py b/vulnerabilities/migrations/0078_vulnerability_exploitability_and_more.py deleted file mode 100644 index 06311036d..000000000 --- a/vulnerabilities/migrations/0078_vulnerability_exploitability_and_more.py +++ /dev/null @@ -1,35 +0,0 @@ -# Generated by Django 4.2.16 on 2024-11-12 16:03 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("vulnerabilities", "0077_alter_packagechangelog_software_version_and_more"), - ] - - operations = [ - migrations.AddField( - model_name="vulnerability", - name="exploitability", - field=models.DecimalField( - decimal_places=2, - help_text="""Exploitability indicates the likelihood that a vulnerability in a software package could - be used by malicious actors to compromise systems, applications, or networks. - This metric is determined automatically based on the discovery of known exploits.""", - max_digits=4, - null=True, - ), - ), - migrations.AddField( - model_name="vulnerability", - name="weighted_severity", - field=models.DecimalField( - decimal_places=2, - help_text="Weighted severity is the highest value calculated by multiplying each severity by its corresponding weight, divided by 10.", - max_digits=4, - null=True, - ), - ), - ] diff --git a/vulnerabilities/migrations/0082_vulnerability_exploitability_and_more.py b/vulnerabilities/migrations/0082_vulnerability_exploitability_and_more.py new file mode 100644 index 000000000..f7517070a --- /dev/null +++ b/vulnerabilities/migrations/0082_vulnerability_exploitability_and_more.py @@ -0,0 +1,43 @@ +# Generated by Django 4.2.16 on 2024-11-16 20:41 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("vulnerabilities", "0081_alter_packagechangelog_software_version_and_more"), + ] + + operations = [ + migrations.AddField( + model_name="vulnerability", + name="exploitability", + field=models.DecimalField( + decimal_places=1, + help_text='"Exploitability indicates the likelihood that a vulnerability in a software package could be used \n by malicious actors to compromise systems, applications, or networks. \n This metric is determined automatically based on the discovery of known exploits.', + max_digits=2, + null=True, + ), + ), + migrations.AddField( + model_name="vulnerability", + name="weighted_severity", + field=models.DecimalField( + decimal_places=1, + help_text="Weighted severity is the highest value calculated by multiplying each severity by its corresponding weight, divided by 10", + max_digits=3, + null=True, + ), + ), + migrations.AlterField( + model_name="package", + name="risk_score", + field=models.DecimalField( + decimal_places=1, + help_text="Risk score between 0.00 and 10.00, where higher values indicate greater vulnerability risk for the package.", + max_digits=3, + null=True, + ), + ), + ] diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index c62949992..8cca1c021 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -243,6 +243,34 @@ class Vulnerability(models.Model): related_name="vulnerabilities", ) + exploitability = models.DecimalField( + null=True, + max_digits=2, + decimal_places=1, + help_text=""""Exploitability indicates the likelihood that a vulnerability in a software package could be used + by malicious actors to compromise systems, applications, or networks. + This metric is determined automatically based on the discovery of known exploits.""", + ) + + weighted_severity = models.DecimalField( + null=True, + max_digits=3, + decimal_places=1, + help_text="Weighted severity is the highest value calculated by multiplying each severity by its corresponding weight, divided by 10", + ) + + @property + def risk_score(self): + """ + Risk expressed as a number ranging from 0 to 10. + Risk is calculated from weighted severity and exploitability values. + It is the maximum value of (the weighted severity multiplied by its exploitability) or 10 + Risk = min(weighted severity * exploitability, 10) + """ + if self.exploitability and self.weighted_severity: + risk_score = min(float(self.exploitability * self.weighted_severity), 10.0) + return round(risk_score, 1) + objects = VulnerabilityQuerySet.as_manager() class Meta: @@ -672,8 +700,8 @@ class Package(PackageURLMixin): risk_score = models.DecimalField( null=True, - max_digits=4, - decimal_places=2, + max_digits=3, + decimal_places=1, help_text="Risk score between 0.00 and 10.00, where higher values " "indicate greater vulnerability risk for the package.", ) diff --git a/vulnerabilities/pipelines/compute_package_risk.py b/vulnerabilities/pipelines/compute_package_risk.py index 839d5241b..e8973b480 100644 --- a/vulnerabilities/pipelines/compute_package_risk.py +++ b/vulnerabilities/pipelines/compute_package_risk.py @@ -34,10 +34,11 @@ def steps(cls): ) def compute_and_store_vulnerability_risk_score(self): - affected_vulnerabilities = ( - Vulnerability.objects.filter(affectedbypackagerelatedvulnerability__isnull=False) - .prefetch_related("references") - .only("references", "exploits") + affected_vulnerabilities = Vulnerability.objects.filter( + affectedbypackagerelatedvulnerability__isnull=False + ).prefetch_related( + "references", + "exploits", ) self.log( @@ -51,8 +52,8 @@ def compute_and_store_vulnerability_risk_score(self): batch_size = 5000 for vulnerability in progress.iter(affected_vulnerabilities.paginated()): - references = vulnerability.references - severities = vulnerability.severities.select_related("reference") + severities = vulnerability.severities.all() + references = vulnerability.references.all() ( vulnerability.weighted_severity, @@ -76,17 +77,8 @@ def compute_and_store_vulnerability_risk_score(self): def compute_and_store_package_risk_score(self): affected_packages = ( - Package.objects.filter(affected_by_vulnerabilities__isnull=False).prefetch_related( - "affectedbypackagerelatedvulnerability_set__vulnerability", - "affectedbypackagerelatedvulnerability_set__vulnerability__references", - "affectedbypackagerelatedvulnerability_set__vulnerability__severities", - "affectedbypackagerelatedvulnerability_set__vulnerability__exploits", - ) - ).distinct() - - affected_packages = Package.objects.filter( - affected_by_vulnerabilities__isnull=False - ).distinct() + Package.objects.filter(affected_by_vulnerabilities__isnull=False).only("id").distinct() + ) self.log(f"Calculating risk for {affected_packages.count():,d} affected package records") diff --git a/vulnerabilities/risk.py b/vulnerabilities/risk.py index 5f5c480e5..6c28faa36 100644 --- a/vulnerabilities/risk.py +++ b/vulnerabilities/risk.py @@ -6,24 +6,22 @@ # See https://github.com/aboutcode-org/vulnerablecode for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # - - +from typing import List from urllib.parse import urlparse from django.db.models import Prefetch from vulnerabilities.models import AffectedByPackageRelatedVulnerability -from vulnerabilities.models import Exploit -from vulnerabilities.models import Package from vulnerabilities.models import Vulnerability from vulnerabilities.models import VulnerabilityReference +from vulnerabilities.models import VulnerabilitySeverity from vulnerabilities.severity_systems import EPSS from vulnerabilities.weight_config import WEIGHT_CONFIG DEFAULT_WEIGHT = 5 -def get_weighted_severity(severities): +def get_weighted_severity(severities: List[VulnerabilitySeverity]): """ Weighted Severity is the maximum value obtained when each Severity is multiplied by its associated Weight/10. @@ -57,7 +55,9 @@ def get_weighted_severity(severities): vul_score_value = score_map.get(vul_score, 0) * max_weight score_list.append(vul_score_value) - return max(score_list) if score_list else 0 + + max_score = max(score_list) if score_list else 0 + return round(max_score, 1) def get_exploitability_level(exploits, references, severities): @@ -99,12 +99,8 @@ def compute_vulnerability_risk_factors(references, severities, exploits): Risk = min(weighted severity * exploitability, 10) """ - severities = severities.all() - exploits = exploits.all() - reference = references.all() - weighted_severity = get_weighted_severity(severities) - exploitability = get_exploitability_level(exploits, reference, severities) + exploitability = get_exploitability_level(exploits, references, severities) return weighted_severity, exploitability @@ -130,4 +126,4 @@ def compute_package_risk(package): if not result: return - return f"{max(result):.2f}" + return round(max(result), 1) diff --git a/vulnerabilities/tests/test_risk.py b/vulnerabilities/tests/test_risk.py index 454fc3dd1..cbe1003b2 100644 --- a/vulnerabilities/tests/test_risk.py +++ b/vulnerabilities/tests/test_risk.py @@ -131,7 +131,7 @@ def test_exploitability_level( @pytest.mark.django_db def test_get_weighted_severity(vulnerability): severities = vulnerability.severities.all() - assert get_weighted_severity(severities) == 6.210000000000001 + assert get_weighted_severity(severities) == 6.2 severity2 = VulnerabilitySeverity.objects.create( url="https://security-tracker.debian.org/tracker/CVE-2019-13057", @@ -146,21 +146,17 @@ def test_get_weighted_severity(vulnerability): @pytest.mark.django_db def test_compute_vulnerability_risk_factors(vulnerability): - assert compute_vulnerability_risk_factors( - vulnerability.references, vulnerability.severities, vulnerability.exploits - ) == (6.210000000000001, 2) - assert compute_vulnerability_risk_factors( - vulnerability.references, vulnerability.severities, None - ) == ( - 6.210000000000001, - 0.5, - ) - assert compute_vulnerability_risk_factors( - vulnerability.references, None, vulnerability.exploits - ) == ( - 0, + severities = vulnerability.severities.all() + references = vulnerability.references.all() + + assert compute_vulnerability_risk_factors(references, severities, vulnerability.exploits) == ( + 6.2, 2, ) + + assert compute_vulnerability_risk_factors(references, severities, None) == (6.2, 0.5) + assert compute_vulnerability_risk_factors(references, None, vulnerability.exploits) == (0, 2) + assert compute_vulnerability_risk_factors(None, None, None) == (0, 0.5) @@ -169,15 +165,15 @@ def test_get_vulnerability_risk_score(vulnerability): vulnerability.weighted_severity = 6.0 vulnerability.exploitability = 2 - assert vulnerability.risk_score == "10" # max risk_score can be reached + assert vulnerability.risk_score == 10.0 # max risk_score can be reached vulnerability.weighted_severity = 6 vulnerability.exploitability = 0.5 - assert vulnerability.risk_score == "3" + assert vulnerability.risk_score == 3.0 vulnerability.weighted_severity = 5.6 vulnerability.exploitability = 0.5 - assert vulnerability.risk_score == "2.8" + assert vulnerability.risk_score == 2.8 vulnerability.weighted_severity = None vulnerability.exploitability = 0.5 From 893183f169a58ea73370c5900489029a527d2fa3 Mon Sep 17 00:00:00 2001 From: ziadhany Date: Sun, 17 Nov 2024 15:53:35 +0200 Subject: [PATCH 008/545] Correct help text and remove inconsistent typing. Signed-off-by: ziad hany --- .../0082_vulnerability_exploitability_and_more.py | 6 +++--- vulnerabilities/models.py | 7 +++---- vulnerabilities/risk.py | 4 +--- vulnerabilities/templates/vulnerability_details.html | 6 +++--- 4 files changed, 10 insertions(+), 13 deletions(-) diff --git a/vulnerabilities/migrations/0082_vulnerability_exploitability_and_more.py b/vulnerabilities/migrations/0082_vulnerability_exploitability_and_more.py index f7517070a..26a55e714 100644 --- a/vulnerabilities/migrations/0082_vulnerability_exploitability_and_more.py +++ b/vulnerabilities/migrations/0082_vulnerability_exploitability_and_more.py @@ -1,4 +1,4 @@ -# Generated by Django 4.2.16 on 2024-11-16 20:41 +# Generated by Django 4.2.16 on 2024-11-17 13:52 from django.db import migrations, models @@ -15,7 +15,7 @@ class Migration(migrations.Migration): name="exploitability", field=models.DecimalField( decimal_places=1, - help_text='"Exploitability indicates the likelihood that a vulnerability in a software package could be used \n by malicious actors to compromise systems, applications, or networks. \n This metric is determined automatically based on the discovery of known exploits.', + help_text="Exploitability indicates the likelihood that a vulnerability in a software package could be used by malicious actors to compromise systems, applications, or networks. This metric is determined automatically based on the discovery of known exploits.", max_digits=2, null=True, ), @@ -25,7 +25,7 @@ class Migration(migrations.Migration): name="weighted_severity", field=models.DecimalField( decimal_places=1, - help_text="Weighted severity is the highest value calculated by multiplying each severity by its corresponding weight, divided by 10", + help_text="Weighted severity is the highest value calculated by multiplying each severity by its corresponding weight, divided by 10.", max_digits=3, null=True, ), diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index 8cca1c021..e5fe231f6 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -247,16 +247,15 @@ class Vulnerability(models.Model): null=True, max_digits=2, decimal_places=1, - help_text=""""Exploitability indicates the likelihood that a vulnerability in a software package could be used - by malicious actors to compromise systems, applications, or networks. - This metric is determined automatically based on the discovery of known exploits.""", + help_text="Exploitability indicates the likelihood that a vulnerability in a software package could be used by malicious actors to compromise systems, " + "applications, or networks. This metric is determined automatically based on the discovery of known exploits.", ) weighted_severity = models.DecimalField( null=True, max_digits=3, decimal_places=1, - help_text="Weighted severity is the highest value calculated by multiplying each severity by its corresponding weight, divided by 10", + help_text="Weighted severity is the highest value calculated by multiplying each severity by its corresponding weight, divided by 10.", ) @property diff --git a/vulnerabilities/risk.py b/vulnerabilities/risk.py index 6c28faa36..8ccbd0e3d 100644 --- a/vulnerabilities/risk.py +++ b/vulnerabilities/risk.py @@ -6,7 +6,6 @@ # See https://github.com/aboutcode-org/vulnerablecode for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # -from typing import List from urllib.parse import urlparse from django.db.models import Prefetch @@ -14,14 +13,13 @@ from vulnerabilities.models import AffectedByPackageRelatedVulnerability from vulnerabilities.models import Vulnerability from vulnerabilities.models import VulnerabilityReference -from vulnerabilities.models import VulnerabilitySeverity from vulnerabilities.severity_systems import EPSS from vulnerabilities.weight_config import WEIGHT_CONFIG DEFAULT_WEIGHT = 5 -def get_weighted_severity(severities: List[VulnerabilitySeverity]): +def get_weighted_severity(severities): """ Weighted Severity is the maximum value obtained when each Severity is multiplied by its associated Weight/10. diff --git a/vulnerabilities/templates/vulnerability_details.html b/vulnerabilities/templates/vulnerability_details.html index 7381c058c..e9e58c79e 100644 --- a/vulnerabilities/templates/vulnerability_details.html +++ b/vulnerabilities/templates/vulnerability_details.html @@ -124,9 +124,9 @@ + data-tooltip="Exploitability indicates the likelihood that a vulnerability in a software package + could be used by malicious actors to compromise systems, + applications, or networks. This metric is determined automatically based on the discovery of known exploits."> Exploitability {{ vulnerability.exploitability }} From f29ef16a91c1b53d9bdb436dfc9d91525dea7949 Mon Sep 17 00:00:00 2001 From: ziadhany Date: Mon, 18 Nov 2024 16:21:25 +0200 Subject: [PATCH 009/545] Add severities in the prefetch and optimize the prefetching process for compute_and_store_package_risk_score Signed-off-by: ziad hany --- .../pipelines/compute_package_risk.py | 13 +++++++++++-- vulnerabilities/risk.py | 18 +++--------------- 2 files changed, 14 insertions(+), 17 deletions(-) diff --git a/vulnerabilities/pipelines/compute_package_risk.py b/vulnerabilities/pipelines/compute_package_risk.py index e8973b480..22b3305a6 100644 --- a/vulnerabilities/pipelines/compute_package_risk.py +++ b/vulnerabilities/pipelines/compute_package_risk.py @@ -8,6 +8,7 @@ # from aboutcode.pipeline import LoopProgress +from django.db.models import Prefetch from vulnerabilities.models import Package from vulnerabilities.models import Vulnerability @@ -38,6 +39,7 @@ def compute_and_store_vulnerability_risk_score(self): affectedbypackagerelatedvulnerability__isnull=False ).prefetch_related( "references", + "severities", "exploits", ) @@ -77,8 +79,15 @@ def compute_and_store_vulnerability_risk_score(self): def compute_and_store_package_risk_score(self): affected_packages = ( - Package.objects.filter(affected_by_vulnerabilities__isnull=False).only("id").distinct() - ) + Package.objects.filter(affected_by_vulnerabilities__isnull=False) + .only("id") + .prefetch_related( + Prefetch( + "affectedbypackagerelatedvulnerability_set__vulnerability", + queryset=Vulnerability.objects.only("weighted_severity", "exploitability"), + ), + ) + ).distinct() self.log(f"Calculating risk for {affected_packages.count():,d} affected package records") diff --git a/vulnerabilities/risk.py b/vulnerabilities/risk.py index 8ccbd0e3d..e6306a704 100644 --- a/vulnerabilities/risk.py +++ b/vulnerabilities/risk.py @@ -8,10 +8,6 @@ # from urllib.parse import urlparse -from django.db.models import Prefetch - -from vulnerabilities.models import AffectedByPackageRelatedVulnerability -from vulnerabilities.models import Vulnerability from vulnerabilities.models import VulnerabilityReference from vulnerabilities.severity_systems import EPSS from vulnerabilities.weight_config import WEIGHT_CONFIG @@ -107,18 +103,10 @@ def compute_package_risk(package): Calculate the risk for a package by iterating over all vulnerabilities that affects this package and determining the associated risk. """ - result = [] - affected_pkg_related_vul = AffectedByPackageRelatedVulnerability.objects.filter( - package=package - ).prefetch_related( - Prefetch( - "vulnerability", - queryset=Vulnerability.objects.only("weighted_severity", "exploitability"), - ) - ) - for pkg_related_vul in affected_pkg_related_vul: - if risk := pkg_related_vul.vulnerability.risk_score: + vulnerabilities = package.vulnerabilities.all() + for vulnerability in vulnerabilities: + if risk := vulnerability.risk_score: result.append(float(risk)) if not result: From 4920e1f71b6c6f8f05a24a3aeefbc3d3f00a3a58 Mon Sep 17 00:00:00 2001 From: ziadhany Date: Mon, 18 Nov 2024 17:45:00 +0200 Subject: [PATCH 010/545] Refactor the risk score calculation for vulnerabilities and packages. Update the tests for exploits and the simple_risk_pipeline. Signed-off-by: ziad hany --- .../pipelines/compute_package_risk.py | 89 +++++++++---------- vulnerabilities/risk.py | 5 +- .../pipelines/test_compute_package_risk.py | 2 +- vulnerabilities/tests/test_risk.py | 7 +- 4 files changed, 51 insertions(+), 52 deletions(-) diff --git a/vulnerabilities/pipelines/compute_package_risk.py b/vulnerabilities/pipelines/compute_package_risk.py index 22b3305a6..7ac4de838 100644 --- a/vulnerabilities/pipelines/compute_package_risk.py +++ b/vulnerabilities/pipelines/compute_package_risk.py @@ -6,7 +6,6 @@ # See https://github.com/aboutcode-org/vulnerablecode for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # - from aboutcode.pipeline import LoopProgress from django.db.models import Prefetch @@ -35,12 +34,14 @@ def steps(cls): ) def compute_and_store_vulnerability_risk_score(self): - affected_vulnerabilities = Vulnerability.objects.filter( - affectedbypackagerelatedvulnerability__isnull=False - ).prefetch_related( - "references", - "severities", - "exploits", + affected_vulnerabilities = ( + Vulnerability.objects.filter(affecting_packages__isnull=False) + .prefetch_related( + "references", + "severities", + "exploits", + ) + .distinct() ) self.log( @@ -53,35 +54,43 @@ def compute_and_store_vulnerability_risk_score(self): updated_vulnerability_count = 0 batch_size = 5000 - for vulnerability in progress.iter(affected_vulnerabilities.paginated()): + for vulnerability in progress.iter(affected_vulnerabilities.paginated(per_page=batch_size)): severities = vulnerability.severities.all() references = vulnerability.references.all() + exploits = vulnerability.exploits.all() - ( - vulnerability.weighted_severity, - vulnerability.exploitability, - ) = compute_vulnerability_risk_factors(references, severities, vulnerability.exploits) + weighted_severity, exploitability = compute_vulnerability_risk_factors( + references=references, + severities=severities, + exploits=exploits, + ) + vulnerability.weighted_severity = weighted_severity + vulnerability.exploitability = exploitability updatables.append(vulnerability) if len(updatables) >= batch_size: - updated_vulnerability_count += bulk_update_vulnerability_risk_score( - vulnerabilities=updatables, + updated_vulnerability_count += bulk_update( + model=Vulnerability, + items=updatables, + fields=["weighted_severity", "exploitability"], logger=self.log, ) - updated_vulnerability_count += bulk_update_vulnerability_risk_score( - vulnerabilities=updatables, + + updated_vulnerability_count += bulk_update( + model=Vulnerability, + items=updatables, + fields=["weighted_severity", "exploitability"], logger=self.log, ) + self.log( f"Successfully added risk score for {updated_vulnerability_count:,d} vulnerability" ) def compute_and_store_package_risk_score(self): affected_packages = ( - Package.objects.filter(affected_by_vulnerabilities__isnull=False) - .only("id") - .prefetch_related( + Package.objects.filter(affected_by_vulnerabilities__isnull=False).prefetch_related( Prefetch( "affectedbypackagerelatedvulnerability_set__vulnerability", queryset=Vulnerability.objects.only("weighted_severity", "exploitability"), @@ -111,38 +120,28 @@ def compute_and_store_package_risk_score(self): updatables.append(package) if len(updatables) >= batch_size: - updated_package_count += bulk_update_package_risk_score( - packages=updatables, + updated_package_count += bulk_update( + model=Package, + items=updatables, + fields=["risk_score"], logger=self.log, ) - updated_package_count += bulk_update_package_risk_score( - packages=updatables, + updated_package_count += bulk_update( + model=Package, + items=updatables, + fields=["risk_score"], logger=self.log, ) self.log(f"Successfully added risk score for {updated_package_count:,d} package") -def bulk_update_package_risk_score(packages, logger): - package_count = 0 - if packages: - try: - Package.objects.bulk_update(objs=packages, fields=["risk_score"]) - package_count += len(packages) - except Exception as e: - logger(f"Error updating packages: {e}") - packages.clear() - return package_count - - -def bulk_update_vulnerability_risk_score(vulnerabilities, logger): - vulnerabilities_count = 0 - if vulnerabilities: +def bulk_update(model, items, fields, logger): + item_count = 0 + if items: try: - Vulnerability.objects.bulk_update( - objs=vulnerabilities, fields=["weighted_severity", "exploitability"] - ) - vulnerabilities_count += len(vulnerabilities) + model.objects.bulk_update(objs=items, fields=fields) + item_count += len(items) except Exception as e: - logger(f"Error updating vulnerability: {e}") - vulnerabilities.clear() - return vulnerabilities_count + logger(f"Error updating {model.__name__}: {e}") + items.clear() + return item_count diff --git a/vulnerabilities/risk.py b/vulnerabilities/risk.py index e6306a704..b38d50a4e 100644 --- a/vulnerabilities/risk.py +++ b/vulnerabilities/risk.py @@ -104,9 +104,8 @@ def compute_package_risk(package): and determining the associated risk. """ result = [] - vulnerabilities = package.vulnerabilities.all() - for vulnerability in vulnerabilities: - if risk := vulnerability.risk_score: + for vulnerability in package.affectedbypackagerelatedvulnerability_set.all(): + if risk := vulnerability.vulnerability.risk_score: result.append(float(risk)) if not result: diff --git a/vulnerabilities/tests/pipelines/test_compute_package_risk.py b/vulnerabilities/tests/pipelines/test_compute_package_risk.py index b8608cbc0..a366d32e8 100644 --- a/vulnerabilities/tests/pipelines/test_compute_package_risk.py +++ b/vulnerabilities/tests/pipelines/test_compute_package_risk.py @@ -31,4 +31,4 @@ def test_simple_risk_pipeline(vulnerability): improver.execute() pkg = Package.objects.get(type="pypi", name="foo", version="2.3.0") - assert pkg.risk_score == Decimal("10") + assert pkg.risk_score == Decimal("3.1") # max( 6.9 * 9/10 , 6.5 * 9/10 ) * .5 = 3.105 diff --git a/vulnerabilities/tests/test_risk.py b/vulnerabilities/tests/test_risk.py index cbe1003b2..420c8c402 100644 --- a/vulnerabilities/tests/test_risk.py +++ b/vulnerabilities/tests/test_risk.py @@ -145,17 +145,18 @@ def test_get_weighted_severity(vulnerability): @pytest.mark.django_db -def test_compute_vulnerability_risk_factors(vulnerability): +def test_compute_vulnerability_risk_factors(vulnerability, exploit): severities = vulnerability.severities.all() references = vulnerability.references.all() - assert compute_vulnerability_risk_factors(references, severities, vulnerability.exploits) == ( + assert compute_vulnerability_risk_factors(references, severities, exploit) == ( 6.2, 2, ) assert compute_vulnerability_risk_factors(references, severities, None) == (6.2, 0.5) - assert compute_vulnerability_risk_factors(references, None, vulnerability.exploits) == (0, 2) + + assert compute_vulnerability_risk_factors(references, None, exploit) == (0, 2) assert compute_vulnerability_risk_factors(None, None, None) == (0, 0.5) From 1a9df9b6501f5f5d270eacf3c61827eae6d3a579 Mon Sep 17 00:00:00 2001 From: ziadhany Date: Mon, 18 Nov 2024 21:28:20 +0200 Subject: [PATCH 011/545] Rename vulnerability to relation for clarity in compute_package_risk Signed-off-by: ziad hany --- vulnerabilities/risk.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/vulnerabilities/risk.py b/vulnerabilities/risk.py index b38d50a4e..a4508a03f 100644 --- a/vulnerabilities/risk.py +++ b/vulnerabilities/risk.py @@ -104,8 +104,8 @@ def compute_package_risk(package): and determining the associated risk. """ result = [] - for vulnerability in package.affectedbypackagerelatedvulnerability_set.all(): - if risk := vulnerability.vulnerability.risk_score: + for relation in package.affectedbypackagerelatedvulnerability_set.all(): + if risk := relation.vulnerability.risk_score: result.append(float(risk)) if not result: From b7b9ab779d53278cb03d4f7137cbe9890327d6ea Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Fri, 15 Nov 2024 16:40:34 +0530 Subject: [PATCH 012/545] Avoid migrations on version bumps Signed-off-by: Keshav Priyadarshi --- ...kagechangelog_software_version_and_more.py | 27 ++++++++ vulnerabilities/models.py | 3 +- vulnerabilities/tests/test_changelog.py | 69 ++++++++++++++----- vulnerablecode/__init__.py | 2 - 4 files changed, 79 insertions(+), 22 deletions(-) create mode 100644 vulnerabilities/migrations/0082_alter_packagechangelog_software_version_and_more.py diff --git a/vulnerabilities/migrations/0082_alter_packagechangelog_software_version_and_more.py b/vulnerabilities/migrations/0082_alter_packagechangelog_software_version_and_more.py new file mode 100644 index 000000000..2314460d2 --- /dev/null +++ b/vulnerabilities/migrations/0082_alter_packagechangelog_software_version_and_more.py @@ -0,0 +1,27 @@ +# Generated by Django 4.2.16 on 2024-11-15 11:34 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("vulnerabilities", "0081_alter_packagechangelog_software_version_and_more"), + ] + + operations = [ + migrations.AlterField( + model_name="packagechangelog", + name="software_version", + field=models.CharField( + help_text="Version of the software at the time of change", max_length=100 + ), + ), + migrations.AlterField( + model_name="vulnerabilitychangelog", + name="software_version", + field=models.CharField( + help_text="Version of the software at the time of change", max_length=100 + ), + ), + ] diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index e5fe231f6..d6fae2408 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -1281,7 +1281,8 @@ class ChangeLog(models.Model): software_version = models.CharField( max_length=100, help_text="Version of the software at the time of change", - default=VULNERABLECODE_VERSION, + blank=False, + null=False, ) @property diff --git a/vulnerabilities/tests/test_changelog.py b/vulnerabilities/tests/test_changelog.py index b560d7338..1d5eedaea 100644 --- a/vulnerabilities/tests/test_changelog.py +++ b/vulnerabilities/tests/test_changelog.py @@ -7,22 +7,23 @@ # See https://aboutcode.org for more information about nexB OSS projects. # from datetime import datetime +from unittest.mock import patch import pytest +from packageurl import PackageURL from univers.version_range import NpmVersionRange from univers.versions import SemverVersion -from vulnerabilities.import_runner import ImportRunner +from vulnerabilities import models from vulnerabilities.importer import AffectedPackage -from vulnerabilities.models import * from vulnerabilities.pipelines.npm_importer import NpmImporterPipeline @pytest.mark.django_db def test_package_changelog(): - pkg, _ = Package.objects.get_or_create_from_purl("pkg:npm/foo@1.0.0") - assert PackageChangeLog.objects.filter(package=pkg).count() == 0 - adv = Advisory.objects.create( + pkg, _ = models.Package.objects.get_or_create_from_purl("pkg:npm/foo@1.0.0") + assert models.PackageChangeLog.objects.filter(package=pkg).count() == 0 + adv = models.Advisory.objects.create( created_by=NpmImporterPipeline.pipeline_id, summary="TEST", date_collected=datetime.now(), @@ -39,16 +40,18 @@ def test_package_changelog(): aliases=["CVE-123"], ) NpmImporterPipeline().import_advisory(advisory=adv) - assert PackageChangeLog.objects.filter(package=pkg).count() == 1 + assert models.PackageChangeLog.objects.filter(package=pkg).count() == 1 NpmImporterPipeline().import_advisory(advisory=adv) - assert PackageChangeLog.objects.filter(package=pkg).count() == 1 + assert models.PackageChangeLog.objects.filter(package=pkg).count() == 1 assert ( - PackageChangeLog.objects.filter(action_type=PackageChangeLog.FIXING, package=pkg).count() + models.PackageChangeLog.objects.filter( + action_type=models.PackageChangeLog.FIXING, package=pkg + ).count() == 1 ) - pkg1, _ = Package.objects.get_or_create_from_purl("pkg:npm/foo@2.0.0") - assert PackageChangeLog.objects.filter(package=pkg1).count() == 0 - adv = Advisory.objects.create( + pkg1, _ = models.Package.objects.get_or_create_from_purl("pkg:npm/foo@2.0.0") + assert models.PackageChangeLog.objects.filter(package=pkg1).count() == 0 + adv = models.Advisory.objects.create( created_by=NpmImporterPipeline.pipeline_id, summary="TEST-1", date_collected=datetime.now(), @@ -65,12 +68,13 @@ def test_package_changelog(): aliases=["CVE-145"], ) NpmImporterPipeline().import_advisory(advisory=adv) - assert PackageChangeLog.objects.filter(package=pkg1).count() == 1 + assert models.PackageChangeLog.objects.filter(package=pkg1).count() == 1 NpmImporterPipeline().import_advisory(advisory=adv) - assert PackageChangeLog.objects.filter(package=pkg1).count() == 1 + assert models.PackageChangeLog.objects.filter(package=pkg1).count() == 1 assert ( - PackageChangeLog.objects.filter( - action_type=PackageChangeLog.AFFECTED_BY, package=pkg1 + models.PackageChangeLog.objects.filter( + action_type=models.PackageChangeLog.AFFECTED_BY, + package=pkg1, ).count() == 1 ) @@ -78,7 +82,7 @@ def test_package_changelog(): @pytest.mark.django_db def test_vulnerability_changelog(): - adv = Advisory.objects.create( + adv = models.Advisory.objects.create( created_by=NpmImporterPipeline.pipeline_id, summary="TEST_1", date_collected=datetime.now(), @@ -97,10 +101,37 @@ def test_vulnerability_changelog(): NpmImporterPipeline().import_advisory(advisory=adv) # 1 Changelogs is expected here: # 1 for importing vuln details - assert VulnerabilityChangeLog.objects.count() == 1 + assert models.VulnerabilityChangeLog.objects.count() == 1 NpmImporterPipeline().import_advisory(advisory=adv) - assert VulnerabilityChangeLog.objects.count() == 1 + assert models.VulnerabilityChangeLog.objects.count() == 1 assert ( - VulnerabilityChangeLog.objects.filter(action_type=VulnerabilityChangeLog.IMPORT).count() + models.VulnerabilityChangeLog.objects.filter( + action_type=models.VulnerabilityChangeLog.IMPORT + ).count() == 1 ) + + +@patch("vulnerabilities.models.VULNERABLECODE_VERSION", "test-version") +@pytest.mark.django_db +def test_vulnerability_changelog_software_version(): + adv = models.Advisory.objects.create( + created_by=NpmImporterPipeline.pipeline_id, + summary="TEST_1", + date_collected=datetime.now(), + url="https://test.com/source", + affected_packages=[ + AffectedPackage( + package=PackageURL( + type="npm", + name="foo", + ), + fixed_version=SemverVersion("1.0"), + ).to_dict() + ], + aliases=["CVE-TEST-1234"], + ) + NpmImporterPipeline().import_advisory(advisory=adv) + npm_vulnerability_log = models.VulnerabilityChangeLog.objects.first() + + assert ("test-version", npm_vulnerability_log.software_version) diff --git a/vulnerablecode/__init__.py b/vulnerablecode/__init__.py index bfc9e4eea..10dd64cfd 100644 --- a/vulnerablecode/__init__.py +++ b/vulnerablecode/__init__.py @@ -9,8 +9,6 @@ import os import sys -import warnings -from pathlib import Path __version__ = "34.3.2" From 91112af3c0d35fada608cdd0bd82cedd09bb3f60 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Tue, 19 Nov 2024 10:58:13 +0530 Subject: [PATCH 013/545] Resolve migration conflict Signed-off-by: Keshav Priyadarshi --- ...=> 0083_alter_packagechangelog_software_version_and_more.py} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename vulnerabilities/migrations/{0082_alter_packagechangelog_software_version_and_more.py => 0083_alter_packagechangelog_software_version_and_more.py} (89%) diff --git a/vulnerabilities/migrations/0082_alter_packagechangelog_software_version_and_more.py b/vulnerabilities/migrations/0083_alter_packagechangelog_software_version_and_more.py similarity index 89% rename from vulnerabilities/migrations/0082_alter_packagechangelog_software_version_and_more.py rename to vulnerabilities/migrations/0083_alter_packagechangelog_software_version_and_more.py index 2314460d2..54c5a7b14 100644 --- a/vulnerabilities/migrations/0082_alter_packagechangelog_software_version_and_more.py +++ b/vulnerabilities/migrations/0083_alter_packagechangelog_software_version_and_more.py @@ -6,7 +6,7 @@ class Migration(migrations.Migration): dependencies = [ - ("vulnerabilities", "0081_alter_packagechangelog_software_version_and_more"), + ("vulnerabilities", "0082_vulnerability_exploitability_and_more"), ] operations = [ From ff2b64c250772020e7a5f98e99be204b4d3b89b4 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Tue, 19 Nov 2024 13:00:41 +0530 Subject: [PATCH 014/545] Prepare v35.0.0rc1 Signed-off-by: Tushar Goel --- CHANGELOG.rst | 7 +++++++ setup.cfg | 2 +- vulnerablecode/__init__.py | 2 +- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 306bf9649..40fb3d159 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,6 +1,13 @@ Release notes ============= +Version v35.0.0rc1 +--------------------- + +- Add support for storing exploitability and weighted severity #1646 +- Avoid migrations on version bumps #1660 + + Version v34.3.2 ---------------- diff --git a/setup.cfg b/setup.cfg index a37c4fbec..749e0fc0c 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = vulnerablecode -version = 34.3.2 +version = 35.0.0rc1 license = Apache-2.0 AND CC-BY-SA-4.0 # description must be on ONE line https://github.com/pypa/setuptools/issues/1390 diff --git a/vulnerablecode/__init__.py b/vulnerablecode/__init__.py index 10dd64cfd..b05f0fa1f 100644 --- a/vulnerablecode/__init__.py +++ b/vulnerablecode/__init__.py @@ -10,7 +10,7 @@ import os import sys -__version__ = "34.3.2" +__version__ = "35.0.0rc1" def command_line(): From ca23847c72610c11aa5f3648c966aba3bfab7de8 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Wed, 20 Nov 2024 13:53:25 +0530 Subject: [PATCH 015/545] Add scores in bulk search V1 API Signed-off-by: Tushar Goel --- vulnerabilities/api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vulnerabilities/api.py b/vulnerabilities/api.py index 18c5f3cb2..3a99c9c38 100644 --- a/vulnerabilities/api.py +++ b/vulnerabilities/api.py @@ -177,7 +177,7 @@ def get_references(self, vulnerability): class Meta: model = Vulnerability - fields = ["url", "vulnerability_id", "summary", "references", "fixed_packages", "aliases"] + fields = ["url", "vulnerability_id", "summary", "references", "fixed_packages", "aliases", "risk_score", "exploitability", "weighted_severity"] class WeaknessSerializer(serializers.HyperlinkedModelSerializer): From dc19883e9130dcf54183fc861501d6413b96a4ac Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Wed, 20 Nov 2024 13:54:02 +0530 Subject: [PATCH 016/545] Fix formatting Signed-off-by: Tushar Goel --- vulnerabilities/api.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/vulnerabilities/api.py b/vulnerabilities/api.py index 3a99c9c38..285ccea79 100644 --- a/vulnerabilities/api.py +++ b/vulnerabilities/api.py @@ -177,7 +177,17 @@ def get_references(self, vulnerability): class Meta: model = Vulnerability - fields = ["url", "vulnerability_id", "summary", "references", "fixed_packages", "aliases", "risk_score", "exploitability", "weighted_severity"] + fields = [ + "url", + "vulnerability_id", + "summary", + "references", + "fixed_packages", + "aliases", + "risk_score", + "exploitability", + "weighted_severity", + ] class WeaknessSerializer(serializers.HyperlinkedModelSerializer): From a03d01b9ab608346fc367336f00aff854490fc6a Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Wed, 20 Nov 2024 14:01:43 +0530 Subject: [PATCH 017/545] Fix tests Signed-off-by: Tushar Goel --- vulnerabilities/tests/test_api.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/vulnerabilities/tests/test_api.py b/vulnerabilities/tests/test_api.py index 1ded8c824..06ae64fcb 100644 --- a/vulnerabilities/tests/test_api.py +++ b/vulnerabilities/tests/test_api.py @@ -636,6 +636,9 @@ def test_api_with_lesser_and_greater_fixed_by_packages(self): } ], "aliases": ["CVE-2020-36518", "GHSA-57j2-w4cx-62h2"], + "risk_score": None, + "exploitability": None, + "weighted_severity": None, "resource_url": "http://testserver/vulnerabilities/VCID-vul1-vul1-vul1", } ], @@ -678,6 +681,9 @@ def test_api_with_lesser_and_greater_fixed_by_packages(self): }, ], "aliases": ["CVE-2021-46877", "GHSA-3x8x-79m2-3w2w"], + "risk_score": None, + "exploitability": None, + "weighted_severity": None, "resource_url": "http://testserver/vulnerabilities/VCID-vul3-vul3-vul3", } ], From b45ae4562bfdcf90f632a2cb64cbb9dc9011e631 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Wed, 20 Nov 2024 15:43:20 +0530 Subject: [PATCH 018/545] Prepare for release v35.0.0 Signed-off-by: Tushar Goel --- CHANGELOG.rst | 59 ++++++++++++++++++++++++++++++++++++++ setup.cfg | 2 +- vulnerablecode/__init__.py | 2 +- 3 files changed, 61 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 40fb3d159..9ddaad005 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,6 +1,65 @@ Release notes ============= + +Version v35.0.0 +--------------------- + +- Add scores in bulk search V1 API #1675 +- Add improver pipeline to flag ghost packages #644 #917 #1395 by @keshav-space in https://github.com/aboutcode-org/vulnerablecode/pull/1533 +- Add base pipeline for importers and migrate PyPa importer to aboutcode pipeline by @keshav-space in https://github.com/aboutcode-org/vulnerablecode/pull/1559 +- Remove dupe Package.get_non_vulnerable_versions by @pombredanne in https://github.com/aboutcode-org/vulnerablecode/pull/1570 +- Import data from GSD #706 by @ziadhany in https://github.com/aboutcode-org/vulnerablecode/pull/787 +- Add curl advisories importer by @ambuj-1211 in https://github.com/aboutcode-org/vulnerablecode/pull/1439 +- Update dependencies by @TG1999 in https://github.com/aboutcode-org/vulnerablecode/pull/1590 +- Bump django from 4.2.0 to 4.2.15 by @dependabot in https://github.com/aboutcode-org/vulnerablecode/pull/1591 +- Bump cryptography from 42.0.4 to 43.0.1 by @dependabot in https://github.com/aboutcode-org/vulnerablecode/pull/1582 +- Bump actions/download-artifact from 3 to 4.1.7 in /.github/workflows by @dependabot in https://github.com/aboutcode-org/vulnerablecode/pull/1581 +- Improve export command by @pombredanne in https://github.com/aboutcode-org/vulnerablecode/pull/1571 +- Fix typo in Kev requests import by @ziadhany in https://github.com/aboutcode-org/vulnerablecode/pull/1594 +- Prepare for release v34.0.1 by @TG1999 in https://github.com/aboutcode-org/vulnerablecode/pull/1595 +- Bump upload-artifact to v4 by @keshav-space in https://github.com/aboutcode-org/vulnerablecode/pull/1596 +- Migrate Npm importer to aboutcode pipeline by @keshav-space in https://github.com/aboutcode-org/vulnerablecode/pull/1574 +- Use correct regex for CVE by @pombredanne in https://github.com/aboutcode-org/vulnerablecode/pull/1599 +- Migrate Nginx importer to aboutcode pipeline by @keshav-space in https://github.com/aboutcode-org/vulnerablecode/pull/1575 +- Migrate GitLab importer to aboutcode pipeline by @keshav-space in https://github.com/aboutcode-org/vulnerablecode/pull/1580 +- Migrate GitHub importer to aboutcode pipeline by @keshav-space in https://github.com/aboutcode-org/vulnerablecode/pull/1584 +- Migrate NVD importer to aboutcode pipeline by @keshav-space in https://github.com/aboutcode-org/vulnerablecode/pull/1587 +- Match affected and fixed-by Packages by @johnmhoran in https://github.com/aboutcode-org/vulnerablecode/pull/1528 +- Add management command to commit exported data by @keshav-space in https://github.com/aboutcode-org/vulnerablecode/pull/1600 +- Add support to Exploits model by @ziadhany in https://github.com/aboutcode-org/vulnerablecode/pull/1562 +- Fix 500 Server Error with DRF browsable API and resolve blank Swagger API documentation by @keshav-space in https://github.com/aboutcode-org/vulnerablecode/pull/1603 +- Release v34.0.2 by @TG1999 in https://github.com/aboutcode-org/vulnerablecode/pull/1604 +- Bump VCIO version by @TG1999 in https://github.com/aboutcode-org/vulnerablecode/pull/1605 +- Bump django from 4.2.15 to 4.2.16 by @dependabot in https://github.com/aboutcode-org/vulnerablecode/pull/1608 +- Bump fetchcode from v0.3.0 to v0.6.0 by @keshav-space in https://github.com/aboutcode-org/vulnerablecode/pull/1607 +- Use 4-tier system for storing package metadata by @keshav-space in https://github.com/aboutcode-org/vulnerablecode/pull/1609 +- Fix vers range crash by @pombredanne in https://github.com/aboutcode-org/vulnerablecode/pull/1598 +- Add GitHub action to publish aboutcode.hashid PyPI by @keshav-space in https://github.com/aboutcode-org/vulnerablecode/pull/1615 +- Segregate PackageRelatedVulnerability model to new models by @TG1999 in https://github.com/aboutcode-org/vulnerablecode/pull/1612 +- Add documentation for new pipeline design by @keshav-space in https://github.com/aboutcode-org/vulnerablecode/pull/1621 +- Fix 500 error in /api/cpes endpoint by @keshav-space in https://github.com/aboutcode-org/vulnerablecode/pull/1629 +- Migrate pysec importer to aboutcode pipeline by @keshav-space in https://github.com/aboutcode-org/vulnerablecode/pull/1628 +- Avoid memory exhaustion during data migration by @keshav-space in https://github.com/aboutcode-org/vulnerablecode/pull/1630 +- Add support for Calculating Risk in VulnerableCode by @ziadhany in https://github.com/aboutcode-org/vulnerablecode/pull/1593 +- Bulk create in migrations by @TG1999 in https://github.com/aboutcode-org/vulnerablecode/pull/1640 +- Update README.rst by @TG1999 in https://github.com/aboutcode-org/vulnerablecode/pull/1641 +- Prepare for release v34.1.0 by @TG1999 in https://github.com/aboutcode-org/vulnerablecode/pull/1642 +- Add V2 API endpoints by @TG1999 in https://github.com/aboutcode-org/vulnerablecode/pull/1631 +- Prepare for release v34.2.0 by @TG1999 in https://github.com/aboutcode-org/vulnerablecode/pull/1647 +- Refactor severity score model and fix incorrect suse scores by @keshav-space in https://github.com/aboutcode-org/vulnerablecode/pull/1636 +- Add bulk search in v2 by @TG1999 in https://github.com/aboutcode-org/vulnerablecode/pull/1649 +- Prepare release v34.3.0 by @TG1999 in https://github.com/aboutcode-org/vulnerablecode/pull/1652 +- Add `on_failure` to handle cleanup during pipeline failure by @keshav-space in https://github.com/aboutcode-org/vulnerablecode/pull/1651 +- Fix API bug by @TG1999 in https://github.com/aboutcode-org/vulnerablecode/pull/1654 +- Add reference score to package endpoint by @keshav-space in https://github.com/aboutcode-org/vulnerablecode/pull/1655 +- Prepare for release v34.3.2 by @TG1999 in https://github.com/aboutcode-org/vulnerablecode/pull/1656 +- Add support for storing exploitability and weighted severity by @ziadhany in https://github.com/aboutcode-org/vulnerablecode/pull/1646 +- Avoid migrations on version bumps by @keshav-space in https://github.com/aboutcode-org/vulnerablecode/pull/1660 +- Prepare v35.0.0rc1 by @TG1999 in https://github.com/aboutcode-org/vulnerablecode/pull/1664 + + + Version v35.0.0rc1 --------------------- diff --git a/setup.cfg b/setup.cfg index 749e0fc0c..c6fe7712a 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = vulnerablecode -version = 35.0.0rc1 +version = 35.0.0 license = Apache-2.0 AND CC-BY-SA-4.0 # description must be on ONE line https://github.com/pypa/setuptools/issues/1390 diff --git a/vulnerablecode/__init__.py b/vulnerablecode/__init__.py index b05f0fa1f..deb04c6fa 100644 --- a/vulnerablecode/__init__.py +++ b/vulnerablecode/__init__.py @@ -10,7 +10,7 @@ import os import sys -__version__ = "35.0.0rc1" +__version__ = "35.0.0" def command_line(): From f270de2c955974b67a3730702725d80898ad2279 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Mon, 2 Dec 2024 20:21:21 +0530 Subject: [PATCH 019/545] Use AboutCode mirror for collecting CISA KEV Signed-off-by: Keshav Priyadarshi --- vulnerabilities/pipelines/enhance_with_kev.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/vulnerabilities/pipelines/enhance_with_kev.py b/vulnerabilities/pipelines/enhance_with_kev.py index d8244944a..e20f61653 100644 --- a/vulnerabilities/pipelines/enhance_with_kev.py +++ b/vulnerabilities/pipelines/enhance_with_kev.py @@ -35,9 +35,7 @@ def steps(cls): ) def fetch_exploits(self): - kev_url = ( - "https://www.cisa.gov/sites/default/files/feeds/known_exploited_vulnerabilities.json" - ) + kev_url = "https://raw.githubusercontent.com/aboutcode-org/aboutcode-mirror-kev/refs/heads/main/known_exploited_vulnerabilities.json" self.log(f"Fetching {kev_url}") try: From ae4abd86e8d9f8ecab22482e45790a64e30e4f47 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Wed, 20 Nov 2024 18:30:42 +0530 Subject: [PATCH 020/545] Do not report ghost packages as fix for vulnerabilities in UI Signed-off-by: Keshav Priyadarshi --- vulnerabilities/views.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/vulnerabilities/views.py b/vulnerabilities/views.py index c51e77e93..4b7a9390d 100644 --- a/vulnerabilities/views.py +++ b/vulnerabilities/views.py @@ -119,7 +119,10 @@ def get_context_data(self, **kwargs): package = self.object context["package"] = package context["affected_by_vulnerabilities"] = package.affected_by.order_by("vulnerability_id") - context["fixing_vulnerabilities"] = package.fixing.order_by("vulnerability_id") + # Ghost package should not fix any vulnerability. + context["fixing_vulnerabilities"] = ( + None if package.is_ghost else package.fixing.order_by("vulnerability_id") + ) context["package_search_form"] = PackageSearchForm(self.request.GET) context["fixed_package_details"] = package.fixed_package_details @@ -193,6 +196,11 @@ def get_context_data(self, **kwargs): affected_fixed_by_matches["affected_package"] = sorted_affected_package matched_fixed_by_packages = [] for fixed_by_package in sorted_fixed_by_packages: + + # Ghost Package can't fix vulnerability. + if fixed_by_package.is_ghost: + continue + sorted_affected_version_class = get_purl_version_class(sorted_affected_package) fixed_by_version_class = get_purl_version_class(fixed_by_package) if ( From 7ff2a0f9c10b572beb0a9bc8e104e8ea8a2eb31b Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Wed, 20 Nov 2024 18:34:14 +0530 Subject: [PATCH 021/545] Prefetch severities and exploits in VulnerabilityDetails Signed-off-by: Keshav Priyadarshi --- vulnerabilities/views.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/vulnerabilities/views.py b/vulnerabilities/views.py index 4b7a9390d..fd57acea5 100644 --- a/vulnerabilities/views.py +++ b/vulnerabilities/views.py @@ -156,7 +156,17 @@ class VulnerabilityDetails(DetailView): slug_field = "vulnerability_id" def get_queryset(self): - return super().get_queryset().prefetch_related("references", "aliases", "weaknesses") + return ( + super() + .get_queryset() + .prefetch_related( + "references", + "aliases", + "weaknesses", + "severities", + "exploits", + ) + ) def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) From 7fe3a7e0c1996cc7ee9806605a976e2207a591a5 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Wed, 20 Nov 2024 18:36:39 +0530 Subject: [PATCH 022/545] Do not report ghost packages as fix for vulnerabilities in APIv1 Signed-off-by: Keshav Priyadarshi --- vulnerabilities/api.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/vulnerabilities/api.py b/vulnerabilities/api.py index 285ccea79..d9d4c8d63 100644 --- a/vulnerabilities/api.py +++ b/vulnerabilities/api.py @@ -22,9 +22,7 @@ from rest_framework import viewsets from rest_framework.decorators import action from rest_framework.response import Response -from rest_framework.reverse import reverse from rest_framework.throttling import AnonRateThrottle -from rest_framework.throttling import UserRateThrottle from vulnerabilities.models import Alias from vulnerabilities.models import Exploit @@ -369,6 +367,10 @@ def get_fixing_vulnerabilities(self, package) -> dict: """ Return a mapping of vulnerabilities fixed in the given `package`. """ + # Ghost package should not fix any vulnerability. + if package.is_ghost: + return + return self.get_vulnerabilities_for_a_package(package=package, fix=True) def get_affected_vulnerabilities(self, package) -> dict: @@ -643,7 +645,10 @@ def get_fixed_packages_qs(self): """ return ( self.get_packages_qs() - .filter(fixingpackagerelatedvulnerability__isnull=False) + .filter( + fixingpackagerelatedvulnerability__isnull=False, + is_ghost=False, + ) .with_is_vulnerable() ) From a5a58456446bb66dd07bd9c9e8a86fe907468a56 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Wed, 20 Nov 2024 18:39:43 +0530 Subject: [PATCH 023/545] Prefetch related in VulnerabilityViewSet Signed-off-by: Keshav Priyadarshi --- vulnerabilities/api.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/vulnerabilities/api.py b/vulnerabilities/api.py index d9d4c8d63..f1e7a03c4 100644 --- a/vulnerabilities/api.py +++ b/vulnerabilities/api.py @@ -674,6 +674,9 @@ def get_queryset(self): .get_queryset() .prefetch_related( "weaknesses", + "references", + "exploits", + "severities", Prefetch( "fixed_by_packages", queryset=self.get_fixed_packages_qs(), From d695d97d1c526dc2e5c8b46542bdff5c5ea2558c Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Fri, 22 Nov 2024 17:07:23 +0530 Subject: [PATCH 024/545] Do not report ghost packages as fix for vulnerabilities in APIv2 Signed-off-by: Keshav Priyadarshi --- vulnerabilities/api_v2.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/vulnerabilities/api_v2.py b/vulnerabilities/api_v2.py index 58771c916..2ab782d59 100644 --- a/vulnerabilities/api_v2.py +++ b/vulnerabilities/api_v2.py @@ -198,6 +198,9 @@ def get_affected_by_vulnerabilities(self, obj): return [vuln.vulnerability_id for vuln in obj.affected_by_vulnerabilities.all()] def get_fixing_vulnerabilities(self, obj): + # Ghost package should not fix any vulnerability. + if obj.is_ghost: + return [] return [vuln.vulnerability_id for vuln in obj.fixing_vulnerabilities.all()] From 6a517502ce691b2fa9132a1452307385140b0ed3 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Fri, 22 Nov 2024 17:09:11 +0530 Subject: [PATCH 025/545] Do not report ghost package as latest/next non-vulnerable version Signed-off-by: Keshav Priyadarshi --- vulnerabilities/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index d6fae2408..9cafe6d15 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -632,7 +632,7 @@ def only_vulnerable(self): return self._vulnerable(True) def only_non_vulnerable(self): - return self._vulnerable(False) + return self._vulnerable(False).filter(is_ghost=False) def _vulnerable(self, vulnerable=True): """ From 650df4ca29a3b8b986261f6d46f2772fcc51622a Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Fri, 22 Nov 2024 19:37:48 +0530 Subject: [PATCH 026/545] Test API with ghost packages Signed-off-by: Keshav Priyadarshi --- vulnerabilities/api.py | 2 +- vulnerabilities/tests/test_api.py | 221 +++++++++++++++++++++++++++++- 2 files changed, 220 insertions(+), 3 deletions(-) diff --git a/vulnerabilities/api.py b/vulnerabilities/api.py index f1e7a03c4..1fd480ce9 100644 --- a/vulnerabilities/api.py +++ b/vulnerabilities/api.py @@ -369,7 +369,7 @@ def get_fixing_vulnerabilities(self, package) -> dict: """ # Ghost package should not fix any vulnerability. if package.is_ghost: - return + return [] return self.get_vulnerabilities_for_a_package(package=package, fix=True) diff --git a/vulnerabilities/tests/test_api.py b/vulnerabilities/tests/test_api.py index 06ae64fcb..14a361ecf 100644 --- a/vulnerabilities/tests/test_api.py +++ b/vulnerabilities/tests/test_api.py @@ -9,7 +9,6 @@ import json import os -from collections import OrderedDict from urllib.parse import quote from django.test import TestCase @@ -31,7 +30,6 @@ from vulnerabilities.models import VulnerabilitySeverity from vulnerabilities.models import Weakness from vulnerabilities.severity_systems import EPSS -from vulnerabilities.tests import util_tests BASE_DIR = os.path.dirname(os.path.abspath(__file__)) TEST_DATA = os.path.join(BASE_DIR, "test_data") @@ -355,6 +353,55 @@ def test_api_with_single_vulnerability_with_filters(self): "weighted_severity": None, } + def test_api_with_single_vulnerability_no_ghost_fix(self): + self.pkg2.is_ghost = True + self.pkg1.is_ghost = True + self.pkg2.save() + self.pkg1.save() + + response = self.csrf_client.get( + f"/api/vulnerabilities/{self.vulnerability.id}", format="json" + ).data + + expected = { + "url": f"http://testserver/api/vulnerabilities/{self.vulnerability.id}", + "vulnerability_id": self.vulnerability.vulnerability_id, + "summary": "test", + "severity_range_score": None, + "aliases": [], + "resource_url": f"http://testserver/vulnerabilities/{self.vulnerability.vulnerability_id}", + "fixed_packages": [], + "affected_packages": [], + "references": [ + { + "reference_url": "https://.com", + "reference_id": "", + "reference_type": "", + "scores": [ + { + "value": "0.526", + "scoring_system": "epss", + "scoring_elements": ".0016", + } + ], + "url": "https://.com", + } + ], + "weaknesses": [ + { + "cwe_id": 119, + "name": "Improper Restriction of Operations within the Bounds of a Memory Buffer", + "description": "The product performs operations on a memory buffer, but it can read from or write to a memory location that is outside of the intended boundary of the buffer.", + }, + ], + "exploits": [], + "risk_score": None, + "exploitability": None, + "weighted_severity": None, + } + + assert expected == response + def set_as_affected_by(package, vulnerability): """ @@ -743,6 +790,176 @@ def test_api_with_ignorning_qualifiers(self): == "pkg:maven/com.fasterxml.jackson.core/jackson-databind@2.14.0-rc1" ) + def test_api_with_ghost_package_no_fixing_vulnerabilities(self): + self.pkg_2_13_1.is_ghost = True + self.pkg_2_13_1.save() + + response = self.csrf_client.get(f"/api/packages/{self.pkg_2_13_1.id}", format="json").data + + expected = { + "url": "http://testserver/api/packages/{0}".format(self.pkg_2_13_1.id), + "purl": "pkg:maven/com.fasterxml.jackson.core/jackson-databind@2.13.1", + "type": "maven", + "namespace": "com.fasterxml.jackson.core", + "name": "jackson-databind", + "version": "2.13.1", + "qualifiers": {}, + "subpath": "", + "is_vulnerable": True, + "next_non_vulnerable_version": "2.14.0-rc1", + "latest_non_vulnerable_version": "2.14.0-rc1", + "affected_by_vulnerabilities": [ + { + "url": "http://testserver/api/vulnerabilities/{0}".format(self.vul1.id), + "vulnerability_id": "VCID-vul1-vul1-vul1", + "summary": "This is VCID-vul1-vul1-vul1", + "references": [ + { + "reference_url": "https://example.com", + "reference_id": "CVE-xxx-xxx", + "reference_type": "advisory", + "scores": [ + { + "value": "0.526", + "scoring_system": "epss", + "scoring_elements": ".0016", + } + ], + "url": "https://example.com", + } + ], + "fixed_packages": [ + { + "url": "http://testserver/api/packages/{0}".format(self.pkg_2_13_2.id), + "purl": "pkg:maven/com.fasterxml.jackson.core/jackson-databind@2.13.2", + "is_vulnerable": True, + "affected_by_vulnerabilities": [ + {"vulnerability": "VCID-vul2-vul2-vul2"} + ], + "resource_url": "http://testserver/packages/pkg:maven/com.fasterxml.jackson.core/jackson-databind@2.13.2", + } + ], + "aliases": ["CVE-2020-36518", "GHSA-57j2-w4cx-62h2"], + "risk_score": None, + "exploitability": None, + "weighted_severity": None, + "resource_url": "http://testserver/vulnerabilities/VCID-vul1-vul1-vul1", + } + ], + "fixing_vulnerabilities": [], + "risk_score": None, + "resource_url": "http://testserver/packages/pkg:maven/com.fasterxml.jackson.core/jackson-databind@2.13.1", + } + + assert response == expected + + def test_api_with_ghost_package_no_next_latest_non_vulnerabilities(self): + self.pkg_2_14_0_rc1.is_ghost = True + self.pkg_2_14_0_rc1.save() + + response = self.csrf_client.get(f"/api/packages/{self.pkg_2_13_1.id}", format="json").data + + expected = { + "url": "http://testserver/api/packages/{0}".format(self.pkg_2_13_1.id), + "purl": "pkg:maven/com.fasterxml.jackson.core/jackson-databind@2.13.1", + "type": "maven", + "namespace": "com.fasterxml.jackson.core", + "name": "jackson-databind", + "version": "2.13.1", + "qualifiers": {}, + "subpath": "", + "is_vulnerable": True, + "next_non_vulnerable_version": None, + "latest_non_vulnerable_version": None, + "affected_by_vulnerabilities": [ + { + "url": "http://testserver/api/vulnerabilities/{0}".format(self.vul1.id), + "vulnerability_id": "VCID-vul1-vul1-vul1", + "summary": "This is VCID-vul1-vul1-vul1", + "references": [ + { + "reference_url": "https://example.com", + "reference_id": "CVE-xxx-xxx", + "reference_type": "advisory", + "scores": [ + { + "value": "0.526", + "scoring_system": "epss", + "scoring_elements": ".0016", + } + ], + "url": "https://example.com", + } + ], + "fixed_packages": [ + { + "url": "http://testserver/api/packages/{0}".format(self.pkg_2_13_2.id), + "purl": "pkg:maven/com.fasterxml.jackson.core/jackson-databind@2.13.2", + "is_vulnerable": True, + "affected_by_vulnerabilities": [ + {"vulnerability": "VCID-vul2-vul2-vul2"} + ], + "resource_url": "http://testserver/packages/pkg:maven/com.fasterxml.jackson.core/jackson-databind@2.13.2", + } + ], + "aliases": ["CVE-2020-36518", "GHSA-57j2-w4cx-62h2"], + "risk_score": None, + "exploitability": None, + "weighted_severity": None, + "resource_url": "http://testserver/vulnerabilities/VCID-vul1-vul1-vul1", + } + ], + "fixing_vulnerabilities": [ + { + "url": "http://testserver/api/vulnerabilities/{0}".format(self.vul3.id), + "vulnerability_id": "VCID-vul3-vul3-vul3", + "summary": "This is VCID-vul3-vul3-vul3", + "references": [ + { + "reference_url": "https://example.com", + "reference_id": "CVE-xxx-xxx", + "reference_type": "advisory", + "scores": [ + { + "value": "0.526", + "scoring_system": "epss", + "scoring_elements": ".0016", + } + ], + "url": "https://example.com", + } + ], + "fixed_packages": [ + { + "url": "http://testserver/api/packages/{0}".format(self.pkg_2_12_6.id), + "purl": "pkg:maven/com.fasterxml.jackson.core/jackson-databind@2.12.6", + "is_vulnerable": False, + "affected_by_vulnerabilities": [], + "resource_url": "http://testserver/packages/pkg:maven/com.fasterxml.jackson.core/jackson-databind@2.12.6", + }, + { + "url": "http://testserver/api/packages/{0}".format(self.pkg_2_13_1.id), + "purl": "pkg:maven/com.fasterxml.jackson.core/jackson-databind@2.13.1", + "is_vulnerable": True, + "affected_by_vulnerabilities": [ + {"vulnerability": "VCID-vul1-vul1-vul1"} + ], + "resource_url": "http://testserver/packages/pkg:maven/com.fasterxml.jackson.core/jackson-databind@2.13.1", + }, + ], + "aliases": ["CVE-2021-46877", "GHSA-3x8x-79m2-3w2w"], + "risk_score": None, + "exploitability": None, + "weighted_severity": None, + "resource_url": "http://testserver/vulnerabilities/VCID-vul3-vul3-vul3", + } + ], + "risk_score": None, + "resource_url": "http://testserver/packages/pkg:maven/com.fasterxml.jackson.core/jackson-databind@2.13.1", + } + + assert response == expected + class CPEApi(TestCase): def setUp(self): From b15ff1651986806a50189e14256805975999156d Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Fri, 6 Dec 2024 18:14:39 +0530 Subject: [PATCH 027/545] Prepare aboutcode.hashid v0.2.0 release Signed-off-by: Keshav Priyadarshi --- pyproject-aboutcode.hashid.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject-aboutcode.hashid.toml b/pyproject-aboutcode.hashid.toml index 84c494ad8..a2c304323 100644 --- a/pyproject-aboutcode.hashid.toml +++ b/pyproject-aboutcode.hashid.toml @@ -4,7 +4,7 @@ build-backend = "flot.buildapi" [project] name = "aboutcode.hashid" -version = "0.1.0" +version = "0.2.0" description = "A library for aboutcode hash-based identifiers for VCID, and PURLs" readme = "aboutcode/hashid/README.rst" license = { text = "Apache-2.0 AND Python-2.0" } From 41f6475b252f05c1e74296493491c1fcee498948 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Fri, 6 Dec 2024 21:38:16 +0530 Subject: [PATCH 028/545] Add CHANGELOG for aboutcode.hashid Signed-off-by: Keshav Priyadarshi --- aboutcode/hashid/CHANGELOG.rst | 13 +++++++++++++ aboutcode/hashid/__init__.py | 3 +++ 2 files changed, 16 insertions(+) create mode 100644 aboutcode/hashid/CHANGELOG.rst diff --git a/aboutcode/hashid/CHANGELOG.rst b/aboutcode/hashid/CHANGELOG.rst new file mode 100644 index 000000000..2d1f39adf --- /dev/null +++ b/aboutcode/hashid/CHANGELOG.rst @@ -0,0 +1,13 @@ +Changelog +============= + + +v0.2.0 (December 05, 2024) +--------------------------- + +- Use 4-tier system for storing package metadata https://github.com/aboutcode-org/vulnerablecode/pull/1609 + +v0.1.0 (September 12, 2024) +--------------------------- + +- Initial release of the ``aboutcode.hashid`` library. \ No newline at end of file diff --git a/aboutcode/hashid/__init__.py b/aboutcode/hashid/__init__.py index 5ccba8e04..0d80ffafd 100644 --- a/aboutcode/hashid/__init__.py +++ b/aboutcode/hashid/__init__.py @@ -19,6 +19,9 @@ from packageurl import normalize_qualifiers from packageurl import normalize_subpath +__version__ = "0.2.0" + + """ General purpose utilities to create Vulnerability Ids aka. VCID and content-defined, hash-based paths to store Vulnerability and Package data using these paths in many balanced directories. From c9233647e7aab3d50e28d02c7bd7fc3d0babe294 Mon Sep 17 00:00:00 2001 From: Tushar Goel <34160672+TG1999@users.noreply.github.com> Date: Sun, 8 Dec 2024 14:49:26 +0530 Subject: [PATCH 029/545] Add pipeline to sort packages (#1686) * Add pipeline to sort packages Signed-off-by: Tushar Goel * Add tests Signed-off-by: Tushar Goel * Add calculate_version_rank on Package Signed-off-by: Tushar Goel * Start enumerating from 1 Signed-off-by: Tushar Goel * Fix tests Signed-off-by: Tushar Goel * Return version rank anyhow Signed-off-by: Tushar Goel * Fix API tests Signed-off-by: Tushar Goel * Address review comments Signed-off-by: Tushar Goel --------- Signed-off-by: Tushar Goel --- vulnerabilities/improvers/__init__.py | 2 + ...er_package_options_package_version_rank.py | 35 +++++++ vulnerabilities/models.py | 62 +++++++++---- .../pipelines/compute_package_version_rank.py | 93 +++++++++++++++++++ vulnerabilities/tests/test_api.py | 4 +- .../test_compute_package_version_rank.py | 59 ++++++++++++ vulnerabilities/tests/test_models.py | 7 +- 7 files changed, 241 insertions(+), 21 deletions(-) create mode 100644 vulnerabilities/migrations/0084_alter_package_options_package_version_rank.py create mode 100644 vulnerabilities/pipelines/compute_package_version_rank.py create mode 100644 vulnerabilities/tests/test_compute_package_version_rank.py diff --git a/vulnerabilities/improvers/__init__.py b/vulnerabilities/improvers/__init__.py index fd18fb28c..dd73eb02d 100644 --- a/vulnerabilities/improvers/__init__.py +++ b/vulnerabilities/improvers/__init__.py @@ -11,6 +11,7 @@ from vulnerabilities.improvers import vulnerability_status from vulnerabilities.pipelines import VulnerableCodePipeline from vulnerabilities.pipelines import compute_package_risk +from vulnerabilities.pipelines import compute_package_version_rank from vulnerabilities.pipelines import enhance_with_exploitdb from vulnerabilities.pipelines import enhance_with_kev from vulnerabilities.pipelines import enhance_with_metasploit @@ -39,6 +40,7 @@ enhance_with_metasploit.MetasploitImproverPipeline, enhance_with_exploitdb.ExploitDBImproverPipeline, compute_package_risk.ComputePackageRiskPipeline, + compute_package_version_rank.ComputeVersionRankPipeline, ] IMPROVERS_REGISTRY = { diff --git a/vulnerabilities/migrations/0084_alter_package_options_package_version_rank.py b/vulnerabilities/migrations/0084_alter_package_options_package_version_rank.py new file mode 100644 index 000000000..6b33c1a59 --- /dev/null +++ b/vulnerabilities/migrations/0084_alter_package_options_package_version_rank.py @@ -0,0 +1,35 @@ +# Generated by Django 4.2.16 on 2024-12-04 11:50 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("vulnerabilities", "0083_alter_packagechangelog_software_version_and_more"), + ] + + operations = [ + migrations.AlterModelOptions( + name="package", + options={ + "ordering": [ + "type", + "namespace", + "name", + "version_rank", + "version", + "qualifiers", + "subpath", + ] + }, + ), + migrations.AddField( + model_name="package", + name="version_rank", + field=models.IntegerField( + default=0, + help_text="Rank of the version to support ordering by version. Rank zero means the rank has not been defined yet", + ), + ), + ] diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index 9cafe6d15..c2e89022f 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -705,6 +705,12 @@ class Package(PackageURLMixin): "indicate greater vulnerability risk for the package.", ) + version_rank = models.IntegerField( + help_text="Rank of the version to support ordering by version. Rank " + "zero means the rank has not been defined yet", + default=0, + ) + objects = PackageQuerySet.as_manager() def save(self, *args, **kwargs): @@ -738,11 +744,34 @@ def purl(self): class Meta: unique_together = ["type", "namespace", "name", "version", "qualifiers", "subpath"] - ordering = ["type", "namespace", "name", "version", "qualifiers", "subpath"] + ordering = ["type", "namespace", "name", "version_rank", "version", "qualifiers", "subpath"] def __str__(self): return self.package_url + @property + def calculate_version_rank(self): + """ + Calculate and return the `version_rank` for a package that does not have one. + If this package already has a `version_rank`, return it. + + The calculated rank will be interpolated between two packages that have + `version_rank` values and are closest to this package in terms of version order. + """ + + group_packages = Package.objects.filter( + type=self.type, + namespace=self.namespace, + name=self.name, + ) + + if any(p.version_rank == 0 for p in group_packages): + sorted_packages = sorted(group_packages, key=lambda p: self.version_class(p.version)) + for rank, package in enumerate(sorted_packages, start=1): + package.version_rank = rank + Package.objects.bulk_update(sorted_packages, fields=["version_rank"]) + return self.version_rank + @property def affected_by(self): """ @@ -789,14 +818,6 @@ def get_details_url(self, request): return reverse("package_details", kwargs={"purl": self.purl}, request=request) - def sort_by_version(self, packages): - """ - Return a sequence of `packages` sorted by version. - """ - if not packages: - return [] - return sorted(packages, key=lambda x: self.version_class(x.version)) - @cached_property def version_class(self): range_class = RANGE_CLASS_BY_SCHEMES.get(self.type) @@ -831,19 +852,20 @@ def get_non_vulnerable_versions(self): Return a tuple of the next and latest non-vulnerable versions as Package instance. Return a tuple of (None, None) if there is no non-vulnerable version. """ + if self.version_rank == 0: + self.calculate_version_rank non_vulnerable_versions = Package.objects.get_fixed_by_package_versions( self, fix=False ).only_non_vulnerable() - sorted_versions = self.sort_by_version(non_vulnerable_versions) - later_non_vulnerable_versions = [ - non_vuln_ver - for non_vuln_ver in sorted_versions - if self.version_class(non_vuln_ver.version) > self.current_version - ] + later_non_vulnerable_versions = non_vulnerable_versions.filter( + version_rank__gt=self.version_rank + ) + + later_non_vulnerable_versions = list(later_non_vulnerable_versions) if later_non_vulnerable_versions: - sorted_versions = self.sort_by_version(later_non_vulnerable_versions) + sorted_versions = later_non_vulnerable_versions next_non_vulnerable = sorted_versions[0] latest_non_vulnerable = sorted_versions[-1] return next_non_vulnerable, latest_non_vulnerable @@ -872,6 +894,8 @@ def get_affecting_vulnerabilities(self): Return a list of vulnerabilities that affect this package together with information regarding the versions that fix the vulnerabilities. """ + if self.version_rank == 0: + self.calculate_version_rank package_details_vulns = [] fixed_by_packages = Package.objects.get_fixed_by_package_versions(self, fix=True) @@ -895,12 +919,13 @@ def get_affecting_vulnerabilities(self): if fixed_version > self.current_version: later_fixed_packages.append(fixed_pkg) - next_fixed_package = None next_fixed_package_vulns = [] sort_fixed_by_packages_by_version = [] if later_fixed_packages: - sort_fixed_by_packages_by_version = self.sort_by_version(later_fixed_packages) + sort_fixed_by_packages_by_version = sorted( + later_fixed_packages, key=lambda p: p.version_rank + ) fixed_by_pkgs = [] @@ -930,6 +955,7 @@ def fixing_vulnerabilities(self): """ Return a queryset of Vulnerabilities that are fixed by this package. """ + print("A") return self.fixed_by_vulnerabilities.all() @property diff --git a/vulnerabilities/pipelines/compute_package_version_rank.py b/vulnerabilities/pipelines/compute_package_version_rank.py new file mode 100644 index 000000000..73d4aa60a --- /dev/null +++ b/vulnerabilities/pipelines/compute_package_version_rank.py @@ -0,0 +1,93 @@ +# +# Copyright (c) nexB Inc. and others. All rights reserved. +# VulnerableCode is a trademark of nexB Inc. +# SPDX-License-Identifier: Apache-2.0 +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/aboutcode-org/vulnerablecode for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. +# + +from itertools import groupby + +from aboutcode.pipeline import LoopProgress +from django.db import transaction +from univers.version_range import RANGE_CLASS_BY_SCHEMES +from univers.versions import Version + +from vulnerabilities.models import Package +from vulnerabilities.pipelines import VulnerableCodePipeline + + +class ComputeVersionRankPipeline(VulnerableCodePipeline): + """ + A pipeline to compute and assign version ranks for all packages. + """ + + pipeline_id = "compute_version_rank" + license_expression = None + + @classmethod + def steps(cls): + return (cls.compute_and_store_version_rank,) + + def compute_and_store_version_rank(self): + """ + Compute and assign version ranks to all packages. + """ + groups = Package.objects.only("type", "namespace", "name").order_by( + "type", "namespace", "name" + ) + + def key(package): + return package.type, package.namespace, package.name + + groups = groupby(groups, key=key) + + groups = [(list(x), list(y)) for x, y in groups] + + total_groups = len(groups) + self.log(f"Calculating `version_rank` for {total_groups:,d} groups of packages.") + + progress = LoopProgress( + total_iterations=total_groups, + logger=self.log, + progress_step=5, + ) + + for group, packages in progress.iter(groups): + type, namespace, name = group + if type not in RANGE_CLASS_BY_SCHEMES: + continue + self.update_version_rank_for_group(packages) + + self.log("Successfully populated `version_rank` for all packages.") + + @transaction.atomic + def update_version_rank_for_group(self, packages): + """ + Update the `version_rank` for all packages in a specific group. + """ + + # Sort the packages by version + sorted_packages = self.sort_packages_by_version(packages) + + # Assign version ranks + updates = [] + for rank, package in enumerate(sorted_packages, start=1): + package.version_rank = rank + updates.append(package) + + # Bulk update to save the ranks + Package.objects.bulk_update(updates, fields=["version_rank"]) + + def sort_packages_by_version(self, packages): + """ + Sort packages by version using `version_class`. + """ + + if not packages: + return [] + version_class = RANGE_CLASS_BY_SCHEMES.get(packages[0].type).version_class + if not version_class: + version_class = Version + return sorted(packages, key=lambda p: version_class(p.version)) diff --git a/vulnerabilities/tests/test_api.py b/vulnerabilities/tests/test_api.py index 14a361ecf..a5f80aa06 100644 --- a/vulnerabilities/tests/test_api.py +++ b/vulnerabilities/tests/test_api.py @@ -489,6 +489,7 @@ def setUp(self): self.pkg_2_14_0_rc1 = from_purl( "pkg:maven/com.fasterxml.jackson.core/jackson-databind@2.14.0-rc1" ) + self.pkg_2_12_6.calculate_version_rank set_as_fixing(package=self.pkg_2_12_6, vulnerability=self.vul3) @@ -608,6 +609,7 @@ def setUp(self): self.pkg_2_14_0_rc1 = from_purl( "pkg:maven/com.fasterxml.jackson.core/jackson-databind@2.14.0-rc1" ) + self.pkg_2_12_6.calculate_version_rank self.ref = VulnerabilityReference.objects.create( reference_type="advisory", reference_id="CVE-xxx-xxx", url="https://example.com" @@ -806,7 +808,7 @@ def test_api_with_ghost_package_no_fixing_vulnerabilities(self): "qualifiers": {}, "subpath": "", "is_vulnerable": True, - "next_non_vulnerable_version": "2.14.0-rc1", + "next_non_vulnerable_version": "2.12.6", "latest_non_vulnerable_version": "2.14.0-rc1", "affected_by_vulnerabilities": [ { diff --git a/vulnerabilities/tests/test_compute_package_version_rank.py b/vulnerabilities/tests/test_compute_package_version_rank.py new file mode 100644 index 000000000..12cd172a8 --- /dev/null +++ b/vulnerabilities/tests/test_compute_package_version_rank.py @@ -0,0 +1,59 @@ +from unittest.mock import patch + +import pytest +from univers.versions import Version + +from vulnerabilities.models import Package +from vulnerabilities.pipelines.compute_package_version_rank import ComputeVersionRankPipeline + + +@pytest.mark.django_db +class TestComputeVersionRankPipeline: + @pytest.fixture + def pipeline(self): + return ComputeVersionRankPipeline() + + @pytest.fixture + def packages(self, db): + package_type = "pypi" + namespace = "test_namespace" + name = "test_package" + Package.objects.create(type=package_type, namespace=namespace, name=name, version="1.0.0") + Package.objects.create(type=package_type, namespace=namespace, name=name, version="1.1.0") + Package.objects.create(type=package_type, namespace=namespace, name=name, version="0.9.0") + return Package.objects.filter(type=package_type, namespace=namespace, name=name) + + def test_compute_and_store_version_rank(self, pipeline, packages): + with patch.object(pipeline, "log") as mock_log: + pipeline.compute_and_store_version_rank() + assert mock_log.call_count > 0 + for package in packages: + assert package.version_rank is not None + + def test_update_version_rank_for_group(self, pipeline, packages): + with patch.object(Package.objects, "bulk_update") as mock_bulk_update: + pipeline.update_version_rank_for_group(packages) + mock_bulk_update.assert_called_once() + updated_packages = mock_bulk_update.call_args[0][0] + assert len(updated_packages) == len(packages) + for idx, package in enumerate(sorted(packages, key=lambda p: Version(p.version))): + assert updated_packages[idx].version_rank == idx + + def test_sort_packages_by_version(self, pipeline, packages): + sorted_packages = pipeline.sort_packages_by_version(packages) + versions = [p.version for p in sorted_packages] + assert versions == sorted(versions, key=Version) + + def test_sort_packages_by_version_empty(self, pipeline): + assert pipeline.sort_packages_by_version([]) == [] + + def test_sort_packages_by_version_invalid_scheme(self, pipeline, packages): + for package in packages: + package.type = "invalid" + assert pipeline.sort_packages_by_version(packages) == [] + + def test_compute_and_store_version_rank_invalid_scheme(self, pipeline): + Package.objects.create(type="invalid", namespace="test", name="package", version="1.0.0") + with patch.object(pipeline, "log") as mock_log: + pipeline.compute_and_store_version_rank() + mock_log.assert_any_call("Successfully populated `version_rank` for all packages.") diff --git a/vulnerabilities/tests/test_models.py b/vulnerabilities/tests/test_models.py index 78da37b9d..014754786 100644 --- a/vulnerabilities/tests/test_models.py +++ b/vulnerabilities/tests/test_models.py @@ -423,8 +423,11 @@ def test_sort_by_version(self): version="3.0.0", ) - sorted_pkgs = requesting_package.sort_by_version(vuln_pkg_list) - first_sorted_item = sorted_pkgs[0] + requesting_package.calculate_version_rank + + sorted_pkgs = Package.objects.filter(package_url__in=list_to_sort) + + sorted_pkgs = list(sorted_pkgs) assert sorted_pkgs[0].purl == "pkg:npm/sequelize@3.9.1" assert sorted_pkgs[-1].purl == "pkg:npm/sequelize@3.40.1" From ae220d1acfe9b9bc3fa2c995229e7338fa3f8152 Mon Sep 17 00:00:00 2001 From: Tushar Goel <34160672+TG1999@users.noreply.github.com> Date: Sun, 8 Dec 2024 14:55:41 +0530 Subject: [PATCH 030/545] Fix urls for API (#1678) Signed-off-by: Tushar Goel --- vulnerablecode/urls.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vulnerablecode/urls.py b/vulnerablecode/urls.py index a4da0a7c0..10f7db13f 100644 --- a/vulnerablecode/urls.py +++ b/vulnerablecode/urls.py @@ -50,6 +50,7 @@ def __init__(self, *args, **kwargs): api_v2_router.register("vulnerabilities", VulnerabilityV2ViewSet, basename="vulnerability-v2") urlpatterns = [ + path("api/v2/", include(api_v2_router.urls)), path( "robots.txt", TemplateView.as_view(template_name="robots.txt", content_type="text/plain"), @@ -104,7 +105,6 @@ def __init__(self, *args, **kwargs): TemplateView.as_view(template_name="tos.html"), name="api_tos", ), - path("api/v2/", include(api_v2_router.urls)), path( "admin/", admin.site.urls, From b47f382eab5a0d9da10d4b9d3b8e7efda97463f8 Mon Sep 17 00:00:00 2001 From: Tushar Goel <34160672+TG1999@users.noreply.github.com> Date: Sun, 8 Dec 2024 15:00:33 +0530 Subject: [PATCH 031/545] Preapre for release version v35.1.0 (#1689) Signed-off-by: Tushar Goel --- CHANGELOG.rst | 9 +++++++++ setup.cfg | 2 +- vulnerablecode/__init__.py | 2 +- 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 9ddaad005..7f6debf44 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -2,6 +2,15 @@ Release notes ============= +Version v35.1.0 +--------------------- + +- Use AboutCode mirror for collecting CISA KEV #1685 +- Do not report ghost package as a fix for vulnerability #1679 +- Add pipeline to sort packages #1686 +- Fix urls for API #1678 + + Version v35.0.0 --------------------- diff --git a/setup.cfg b/setup.cfg index c6fe7712a..a3db96abd 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = vulnerablecode -version = 35.0.0 +version = 35.1.0 license = Apache-2.0 AND CC-BY-SA-4.0 # description must be on ONE line https://github.com/pypa/setuptools/issues/1390 diff --git a/vulnerablecode/__init__.py b/vulnerablecode/__init__.py index deb04c6fa..ee339e883 100644 --- a/vulnerablecode/__init__.py +++ b/vulnerablecode/__init__.py @@ -10,7 +10,7 @@ import os import sys -__version__ = "35.0.0" +__version__ = "35.1.0" def command_line(): From 26b1de9ccb85626967b26ca87b281bbcad558b79 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 8 Dec 2024 09:31:22 +0000 Subject: [PATCH 032/545] Bump django from 4.2.16 to 4.2.17 Bumps [django](https://github.com/django/django) from 4.2.16 to 4.2.17. - [Commits](https://github.com/django/django/compare/4.2.16...4.2.17) --- updated-dependencies: - dependency-name: django dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 84ea22538..e05816191 100644 --- a/requirements.txt +++ b/requirements.txt @@ -27,7 +27,7 @@ dateparser==1.1.1 decorator==5.1.1 defusedxml==0.7.1 distro==1.7.0 -Django==4.2.16 +Django==4.2.17 django-crispy-forms==2.3 django-environ==0.11.2 django-filter==24.3 From 7a3d91e8b4cd373cd93a97d137833cc27ca7980e Mon Sep 17 00:00:00 2001 From: ambuj Date: Thu, 12 Dec 2024 23:45:33 +0530 Subject: [PATCH 033/545] Add redundant code to utils Signed-off-by: ambuj --- vulnerabilities/importers/apache_httpd.py | 28 +++++++---------------- vulnerabilities/importers/debian.py | 13 ++--------- vulnerabilities/importers/fireeye.py | 20 ++++------------ vulnerabilities/tests/test_fireeye.py | 1 - vulnerabilities/utils.py | 26 +++++++++++++++++++++ 5 files changed, 40 insertions(+), 48 deletions(-) diff --git a/vulnerabilities/importers/apache_httpd.py b/vulnerabilities/importers/apache_httpd.py index b6501daad..73abd288a 100644 --- a/vulnerabilities/importers/apache_httpd.py +++ b/vulnerabilities/importers/apache_httpd.py @@ -13,7 +13,6 @@ import requests from bs4 import BeautifulSoup -from cwe2.database import Database from packageurl import PackageURL from univers.version_constraint import VersionConstraint from univers.version_range import ApacheVersionRange @@ -25,7 +24,8 @@ from vulnerabilities.importer import Reference from vulnerabilities.importer import VulnerabilitySeverity from vulnerabilities.severity_systems import APACHE_HTTPD -from vulnerabilities.utils import get_cwe_id +from vulnerabilities.utils import create_weaknesses_list +from vulnerabilities.utils import cwe_regex from vulnerabilities.utils import get_item logger = logging.getLogger(__name__) @@ -234,33 +234,21 @@ def get_weaknesses(cve_data): >>> get_weaknesses(mock_cve_data2) [190, 200] """ - alias = get_item(cve_data, "CVE_data_meta", "ID") - cwe_id = [] - db = Database() + cwe_strings = [] if alias: problemtype_data = get_item(cve_data, "problemtype", "problemtype_data") or [] for problem in problemtype_data: - for desc in problem["description"]: + for desc in problem.get("description", []): value = desc.get("value", "") - cwe_pattern = r"CWE-\d+" - cwe_id_string_list = re.findall(cwe_pattern, value) - for cwe_id_string in cwe_id_string_list: - cwe_id.append(get_cwe_id(cwe_id_string)) - + cwe_id_string_list = re.findall(cwe_regex, value) + cwe_strings.extend(cwe_id_string_list) else: problemTypes = cve_data.get("containers", {}).get("cna", {}).get("problemTypes", []) descriptions = problemTypes[0].get("descriptions", []) if len(problemTypes) > 0 else [] for description in descriptions: cwe_id_string = description.get("cweId", "") - cwe_id.append(get_cwe_id(cwe_id_string)) - - weaknesses = [] - for cwe in cwe_id: - try: - db.get(cwe) - weaknesses.append(cwe) - except Exception: - logger.error("Invalid CWE id") + cwe_strings.append(cwe_id_string) + weaknesses = create_weaknesses_list(cwe_strings) return weaknesses diff --git a/vulnerabilities/importers/debian.py b/vulnerabilities/importers/debian.py index ccce49634..3953443ad 100644 --- a/vulnerabilities/importers/debian.py +++ b/vulnerabilities/importers/debian.py @@ -24,8 +24,8 @@ from vulnerabilities.importer import AffectedPackage from vulnerabilities.importer import Importer from vulnerabilities.importer import Reference +from vulnerabilities.utils import create_weaknesses_list from vulnerabilities.utils import dedupe -from vulnerabilities.utils import get_cwe_id from vulnerabilities.utils import get_item logger = logging.getLogger(__name__) @@ -178,14 +178,5 @@ def get_cwe_from_debian_advisory(record): description = record.get("description") or "" pattern = r"CWE-\d+" cwe_strings = re.findall(pattern, description) - weaknesses = [] - db = Database() - for cwe_string in cwe_strings: - if cwe_string: - cwe_id = get_cwe_id(cwe_string) - try: - db.get(cwe_id) - weaknesses.append(cwe_id) - except Exception: - logger.error("Invalid CWE id") + weaknesses = create_weaknesses_list(cwe_strings) return weaknesses diff --git a/vulnerabilities/importers/fireeye.py b/vulnerabilities/importers/fireeye.py index 69ce84176..c26cabcf6 100644 --- a/vulnerabilities/importers/fireeye.py +++ b/vulnerabilities/importers/fireeye.py @@ -12,14 +12,13 @@ from typing import Iterable from typing import List -from cwe2.database import Database - from vulnerabilities.importer import AdvisoryData from vulnerabilities.importer import Importer from vulnerabilities.importer import Reference from vulnerabilities.utils import build_description +from vulnerabilities.utils import create_weaknesses_list +from vulnerabilities.utils import cwe_regex from vulnerabilities.utils import dedupe -from vulnerabilities.utils import get_cwe_id logger = logging.getLogger(__name__) @@ -160,19 +159,8 @@ def get_weaknesses(cwe_data): """ cwe_list = [] for line in cwe_data: - cwe_ids = re.findall(r"CWE-\d+", line) + cwe_ids = re.findall(cwe_regex, line) cwe_list.extend(cwe_ids) - weaknesses = [] - db = Database() - - for cwe_string in cwe_list: - - if cwe_string: - cwe_id = get_cwe_id(cwe_string) - try: - db.get(cwe_id) - weaknesses.append(cwe_id) - except Exception: - logger.error("Invalid CWE id") + weaknesses = create_weaknesses_list(cwe_list) return weaknesses diff --git a/vulnerabilities/tests/test_fireeye.py b/vulnerabilities/tests/test_fireeye.py index 94da186d9..3f03bbb85 100644 --- a/vulnerabilities/tests/test_fireeye.py +++ b/vulnerabilities/tests/test_fireeye.py @@ -226,7 +226,6 @@ def test_get_weaknesses(self): "CWE-362: Concurrent Execution using Shared Resource with Improper Synchronization ('Race Condition')", ] ) == [379, 362] - assert ( get_weaknesses( [ diff --git a/vulnerabilities/utils.py b/vulnerabilities/utils.py index cb16f0eb6..cf39f52a0 100644 --- a/vulnerabilities/utils.py +++ b/vulnerabilities/utils.py @@ -29,6 +29,8 @@ import saneyaml import toml import urllib3 +from cwe2.database import Database +from cwe2.database import InvalidCWEError from packageurl import PackageURL from packageurl.contrib.django.utils import without_empty_values from univers.version_range import RANGE_CLASS_BY_SCHEMES @@ -42,6 +44,7 @@ cve_regex = re.compile(r"CVE-[0-9]{4}-[0-9]{4,19}", re.IGNORECASE) is_cve = cve_regex.match find_all_cve = cve_regex.findall +cwe_regex = r"CWE-\d+" @dataclasses.dataclass(order=True, frozen=True) @@ -399,6 +402,29 @@ def get_cwe_id(cwe_string: str) -> int: return int(cwe_id) +def create_weaknesses_list(cwe_strings: str): + """ + Convert the CWE string to CWE ids and store them to weaknesses list. + >>> create_weaknesses_list(["CWE-125","CWE-379"]) + [125, 379] + """ + weaknesses = [] + db = Database() + for cwe_string in cwe_strings: + if not cwe_string: + continue + cwe_id = get_cwe_id(cwe_string) + if not cwe_id: + logger.error("Invalid CWE id: No CWE ID found") + continue + try: + db.get(cwe_id) + weaknesses.append(cwe_id) + except InvalidCWEError as e: + logger.error(f"Error: {e}") + return weaknesses + + def clean_nginx_git_tag(tag): """ Return a cleaned ``version`` string from an nginx git tag. From 3cee7717864c54c50b865cefc7d6c18d7a8783b7 Mon Sep 17 00:00:00 2001 From: Tushar Goel <34160672+TG1999@users.noreply.github.com> Date: Wed, 18 Dec 2024 16:15:17 +0530 Subject: [PATCH 034/545] Add indexes for models (#1701) * Reorder Meta and core properties Signed-off-by: Tushar Goel * Add todo Signed-off-by: Tushar Goel * Add indexes on models Signed-off-by: Tushar Goel * Add migrations Signed-off-by: Tushar Goel * Add indexes Signed-off-by: Tushar Goel * Add migrations Signed-off-by: Tushar Goel * Fix formatting issues Signed-off-by: Tushar Goel --------- Signed-off-by: Tushar Goel --- ...ost_alter_package_version_rank_and_more.py | 83 +++++++++++++++++++ vulnerabilities/models.py | 44 +++++++--- 2 files changed, 116 insertions(+), 11 deletions(-) create mode 100644 vulnerabilities/migrations/0085_alter_package_is_ghost_alter_package_version_rank_and_more.py diff --git a/vulnerabilities/migrations/0085_alter_package_is_ghost_alter_package_version_rank_and_more.py b/vulnerabilities/migrations/0085_alter_package_is_ghost_alter_package_version_rank_and_more.py new file mode 100644 index 000000000..1b1181179 --- /dev/null +++ b/vulnerabilities/migrations/0085_alter_package_is_ghost_alter_package_version_rank_and_more.py @@ -0,0 +1,83 @@ +# Generated by Django 4.2.16 on 2024-12-18 10:09 + +import aboutcode.hashid +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("vulnerabilities", "0084_alter_package_options_package_version_rank"), + ] + + operations = [ + migrations.AlterField( + model_name="package", + name="is_ghost", + field=models.BooleanField( + db_index=True, + default=False, + help_text="True if the package does not exist in the upstream package manager or its repository.", + ), + ), + migrations.AlterField( + model_name="package", + name="version_rank", + field=models.IntegerField( + db_index=True, + default=0, + help_text="Rank of the version to support ordering by version. Rank zero means the rank has not been defined yet", + ), + ), + migrations.AlterField( + model_name="vulnerability", + name="vulnerability_id", + field=models.CharField( + blank=True, + db_index=True, + default=aboutcode.hashid.build_vcid, + help_text="Unique identifier for a vulnerability in the external representation. It is prefixed with VCID-", + max_length=20, + unique=True, + ), + ), + migrations.AlterField( + model_name="vulnerabilityreference", + name="reference_id", + field=models.CharField( + blank=True, + db_index=True, + help_text="An optional reference ID, such as DSA-4465-1 when available", + max_length=200, + ), + ), + migrations.AlterField( + model_name="vulnerabilityseverity", + name="url", + field=models.URLField( + db_index=True, + help_text="URL to the vulnerability severity", + max_length=1024, + null=True, + ), + ), + migrations.AddIndex( + model_name="package", + index=models.Index( + fields=["type", "namespace", "name"], name="vulnerabili_type_825918_idx" + ), + ), + migrations.AddIndex( + model_name="package", + index=models.Index( + fields=["type", "namespace", "name", "qualifiers", "subpath"], + name="vulnerabili_type_8e6aff_idx", + ), + ), + migrations.AddIndex( + model_name="package", + index=models.Index( + fields=["type", "namespace", "name", "version"], name="vulnerabili_type_f6687a_idx" + ), + ), + ] diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index c2e89022f..6248e1e47 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -161,6 +161,7 @@ class VulnerabilitySeverity(models.Model): max_length=1024, null=True, help_text="URL to the vulnerability severity", + db_index=True, ) scoring_system_choices = tuple( @@ -212,6 +213,7 @@ class Vulnerability(models.Model): default=utils.build_vcid, help_text="Unique identifier for a vulnerability in the external representation. " "It is prefixed with VCID-", + db_index=True, ) summary = models.TextField( @@ -453,6 +455,7 @@ class VulnerabilityReference(models.Model): max_length=200, help_text="An optional reference ID, such as DSA-4465-1 when available", blank=True, + db_index=True, ) objects = VulnerabilityReferenceQuerySet.as_manager() @@ -509,6 +512,7 @@ def get_fixed_by_package_versions(self, purl: PackageURL, fix=True): if fix: filter_dict["fixing_vulnerabilities__isnull"] = False + # TODO: why do we need distinct return Package.objects.filter(**filter_dict).distinct() def get_or_create_from_purl(self, purl: Union[PackageURL, str]): @@ -695,6 +699,7 @@ class Package(PackageURLMixin): is_ghost = models.BooleanField( default=False, help_text="True if the package does not exist in the upstream package manager or its repository.", + db_index=True, ) risk_score = models.DecimalField( @@ -709,10 +714,36 @@ class Package(PackageURLMixin): help_text="Rank of the version to support ordering by version. Rank " "zero means the rank has not been defined yet", default=0, + db_index=True, ) objects = PackageQuerySet.as_manager() + class Meta: + unique_together = ["type", "namespace", "name", "version", "qualifiers", "subpath"] + ordering = ["type", "namespace", "name", "version_rank", "version", "qualifiers", "subpath"] + indexes = [ + # Index for getting al versions of a package + models.Index(fields=["type", "namespace", "name"]), + models.Index(fields=["type", "namespace", "name", "qualifiers", "subpath"]), + # Index for getting a specific version of a package + models.Index( + fields=[ + "type", + "namespace", + "name", + "version", + ] + ), + ] + + def __str__(self): + return self.package_url + + @property + def purl(self): + return self.package_url + def save(self, *args, **kwargs): """ Save, normalizing PURL fields. @@ -738,17 +769,6 @@ def save(self, *args, **kwargs): self.plain_package_url = str(plain_purl) super().save(*args, **kwargs) - @property - def purl(self): - return self.package_url - - class Meta: - unique_together = ["type", "namespace", "name", "version", "qualifiers", "subpath"] - ordering = ["type", "namespace", "name", "version_rank", "version", "qualifiers", "subpath"] - - def __str__(self): - return self.package_url - @property def calculate_version_rank(self): """ @@ -981,12 +1001,14 @@ class PackageRelatedVulnerabilityBase(models.Model): package = models.ForeignKey( Package, on_delete=models.CASCADE, + db_index=True, # related_name="%(class)s_set", # Unique related_name per subclass ) vulnerability = models.ForeignKey( Vulnerability, on_delete=models.CASCADE, + db_index=True, # related_name="%(class)s_set", # Unique related_name per subclass ) From cebb5d64d1a0c0f7479df999a83510a5bf76d798 Mon Sep 17 00:00:00 2001 From: Tushar Goel <34160672+TG1999@users.noreply.github.com> Date: Mon, 6 Jan 2025 18:36:23 +0530 Subject: [PATCH 035/545] Add fixed by package in V2 API (#1706) * Add fixed by package in V2 API Signed-off-by: Tushar Goel * Add tests Signed-off-by: Tushar Goel * Add tests Signed-off-by: Tushar Goel --------- Signed-off-by: Tushar Goel --- vulnerabilities/api_v2.py | 26 ++++- vulnerabilities/tests/test_api_v2.py | 156 ++++++++++++++++++++------- 2 files changed, 142 insertions(+), 40 deletions(-) diff --git a/vulnerabilities/api_v2.py b/vulnerabilities/api_v2.py index 2ab782d59..b570570ed 100644 --- a/vulnerabilities/api_v2.py +++ b/vulnerabilities/api_v2.py @@ -8,6 +8,7 @@ # +from django.db.models import Prefetch from django_filters import rest_framework as filters from drf_spectacular.utils import OpenApiParameter from drf_spectacular.utils import extend_schema @@ -20,8 +21,6 @@ from rest_framework.response import Response from rest_framework.reverse import reverse -from vulnerabilities.api import PackageFilterSet -from vulnerabilities.api import VulnerabilitySeveritySerializer from vulnerabilities.models import Package from vulnerabilities.models import Vulnerability from vulnerabilities.models import VulnerabilityReference @@ -195,7 +194,20 @@ class Meta: ] def get_affected_by_vulnerabilities(self, obj): - return [vuln.vulnerability_id for vuln in obj.affected_by_vulnerabilities.all()] + """ + Return a dictionary with vulnerabilities as keys and their details, including fixed_by_packages. + """ + result = {} + for vuln in getattr(obj, "prefetched_affected_vulnerabilities", []): + fixed_by_package = vuln.fixed_by_packages.first() + purl = None + if fixed_by_package: + purl = fixed_by_package.package_url + result[vuln.vulnerability_id] = { + "vulnerability_id": vuln.vulnerability_id, + "fixed_by_packages": purl, + } + return result def get_fixing_vulnerabilities(self, obj): # Ghost package should not fix any vulnerability. @@ -233,7 +245,13 @@ class PackageV2FilterSet(filters.FilterSet): class PackageV2ViewSet(viewsets.ReadOnlyModelViewSet): - queryset = Package.objects.all() + queryset = Package.objects.all().prefetch_related( + Prefetch( + "affected_by_vulnerabilities", + queryset=Vulnerability.objects.prefetch_related("fixed_by_packages"), + to_attr="prefetched_affected_vulnerabilities", + ) + ) serializer_class = PackageV2Serializer filter_backends = (filters.DjangoFilterBackend,) filterset_class = PackageV2FilterSet diff --git a/vulnerabilities/tests/test_api_v2.py b/vulnerabilities/tests/test_api_v2.py index fa3b7773c..af4dc47c8 100644 --- a/vulnerabilities/tests/test_api_v2.py +++ b/vulnerabilities/tests/test_api_v2.py @@ -7,6 +7,7 @@ # See https://aboutcode.org for more information about nexB OSS projects. # +from django.db.models import Prefetch from django.urls import reverse from packageurl import PackageURL from rest_framework import status @@ -67,6 +68,8 @@ def test_list_vulnerabilities(self): """ url = reverse("vulnerability-v2-list") response = self.client.get(url, format="json") + with self.assertNumQueries(5): + response = self.client.get(url, format="json") self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertIn("results", response.data) self.assertIn("vulnerabilities", response.data["results"]) @@ -80,7 +83,8 @@ def test_retrieve_vulnerability_detail(self): Test retrieving vulnerability details by vulnerability_id. """ url = reverse("vulnerability-v2-detail", kwargs={"vulnerability_id": "VCID-1234"}) - response = self.client.get(url, format="json") + with self.assertNumQueries(8): + response = self.client.get(url, format="json") self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data["vulnerability_id"], "VCID-1234") self.assertEqual(response.data["summary"], "Test vulnerability 1") @@ -93,7 +97,8 @@ def test_filter_vulnerability_by_vulnerability_id(self): Test filtering vulnerabilities by vulnerability_id. """ url = reverse("vulnerability-v2-list") - response = self.client.get(url, {"vulnerability_id": "VCID-1234"}, format="json") + with self.assertNumQueries(4): + response = self.client.get(url, {"vulnerability_id": "VCID-1234"}, format="json") self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data["vulnerability_id"], "VCID-1234") @@ -102,7 +107,8 @@ def test_filter_vulnerability_by_alias(self): Test filtering vulnerabilities by alias. """ url = reverse("vulnerability-v2-list") - response = self.client.get(url, {"alias": "CVE-2021-5678"}, format="json") + with self.assertNumQueries(5): + response = self.client.get(url, {"alias": "CVE-2021-5678"}, format="json") self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertIn("results", response.data) self.assertIn("vulnerabilities", response.data["results"]) @@ -116,9 +122,10 @@ def test_filter_vulnerabilities_multiple_ids(self): Test filtering vulnerabilities by multiple vulnerability_ids. """ url = reverse("vulnerability-v2-list") - response = self.client.get( - url, {"vulnerability_id": ["VCID-1234", "VCID-5678"]}, format="json" - ) + with self.assertNumQueries(5): + response = self.client.get( + url, {"vulnerability_id": ["VCID-1234", "VCID-5678"]}, format="json" + ) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(len(response.data["results"]["vulnerabilities"]), 2) @@ -127,9 +134,10 @@ def test_filter_vulnerabilities_multiple_aliases(self): Test filtering vulnerabilities by multiple aliases. """ url = reverse("vulnerability-v2-list") - response = self.client.get( - url, {"alias": ["CVE-2021-1234", "CVE-2021-5678"]}, format="json" - ) + with self.assertNumQueries(5): + response = self.client.get( + url, {"alias": ["CVE-2021-1234", "CVE-2021-5678"]}, format="json" + ) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(len(response.data["results"]["vulnerabilities"]), 2) @@ -139,7 +147,8 @@ def test_invalid_vulnerability_id(self): Should return 404 Not Found. """ url = reverse("vulnerability-v2-detail", kwargs={"vulnerability_id": "VCID-9999"}) - response = self.client.get(url, format="json") + with self.assertNumQueries(5): + response = self.client.get(url, format="json") self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) def test_get_url_in_serializer(self): @@ -207,7 +216,8 @@ def test_list_packages(self): Should return a list of packages with their details and associated vulnerabilities. """ url = reverse("package-v2-list") - response = self.client.get(url, format="json") + with self.assertNumQueries(31): + response = self.client.get(url, format="json") self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertIn("results", response.data) self.assertIn("packages", response.data["results"]) @@ -228,7 +238,8 @@ def test_filter_packages_by_purl(self): Test filtering packages by one or more PURLs. """ url = reverse("package-v2-list") - response = self.client.get(url, {"purl": "pkg:pypi/django@3.2"}, format="json") + with self.assertNumQueries(19): + response = self.client.get(url, {"purl": "pkg:pypi/django@3.2"}, format="json") self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(len(response.data["results"]["packages"]), 1) self.assertEqual(response.data["results"]["packages"][0]["purl"], "pkg:pypi/django@3.2") @@ -238,7 +249,10 @@ def test_filter_packages_by_affected_vulnerability(self): Test filtering packages by affected_by_vulnerability. """ url = reverse("package-v2-list") - response = self.client.get(url, {"affected_by_vulnerability": "VCID-1234"}, format="json") + with self.assertNumQueries(19): + response = self.client.get( + url, {"affected_by_vulnerability": "VCID-1234"}, format="json" + ) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(len(response.data["results"]["packages"]), 1) self.assertEqual(response.data["results"]["packages"][0]["purl"], "pkg:pypi/django@3.2") @@ -248,26 +262,59 @@ def test_filter_packages_by_fixing_vulnerability(self): Test filtering packages by fixing_vulnerability. """ url = reverse("package-v2-list") - response = self.client.get(url, {"fixing_vulnerability": "VCID-5678"}, format="json") + with self.assertNumQueries(18): + response = self.client.get(url, {"fixing_vulnerability": "VCID-5678"}, format="json") self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(len(response.data["results"]["packages"]), 1) self.assertEqual(response.data["results"]["packages"][0]["purl"], "pkg:npm/lodash@4.17.20") def test_package_serializer_fields(self): """ - Test that the PackageV2Serializer returns the correct fields. + Test that the PackageV2Serializer returns the correct fields and formats them correctly. """ + # Fetch the package package = Package.objects.get(package_url="pkg:pypi/django@3.2") + + # Ensure prefetched data is available for the serializer + package = ( + Package.objects.filter(package_url="pkg:pypi/django@3.2") + .prefetch_related( + Prefetch( + "affected_by_vulnerabilities", + queryset=Vulnerability.objects.prefetch_related("fixed_by_packages"), + to_attr="prefetched_affected_vulnerabilities", + ) + ) + .first() + ) + + # Serialize the package serializer = PackageV2Serializer(package) data = serializer.data + + # Verify the presence of required fields self.assertIn("purl", data) self.assertIn("affected_by_vulnerabilities", data) self.assertIn("fixing_vulnerabilities", data) self.assertIn("next_non_vulnerable_version", data) self.assertIn("latest_non_vulnerable_version", data) + self.assertIn("risk_score", data) + + # Verify field values self.assertEqual(data["purl"], "pkg:pypi/django@3.2") - self.assertEqual(data["affected_by_vulnerabilities"], ["VCID-1234"]) - self.assertEqual(data["fixing_vulnerabilities"], []) + self.assertEqual(data["next_non_vulnerable_version"], None) + self.assertEqual(data["latest_non_vulnerable_version"], None) + self.assertEqual(data["risk_score"], None) + + # Verify affected_by_vulnerabilities structure + expected_affected_by_vulnerabilities = { + "VCID-1234": {"vulnerability_id": "VCID-1234", "fixed_by_packages": None} + } + self.assertEqual(data["affected_by_vulnerabilities"], expected_affected_by_vulnerabilities) + + # Verify fixing_vulnerabilities structure + expected_fixing_vulnerabilities = [] + self.assertEqual(data["fixing_vulnerabilities"], expected_fixing_vulnerabilities) def test_list_packages_pagination(self): """ @@ -300,7 +347,10 @@ def test_invalid_vulnerability_filter(self): Should return an empty list. """ url = reverse("package-v2-list") - response = self.client.get(url, {"affected_by_vulnerability": "VCID-9999"}, format="json") + with self.assertNumQueries(4): + response = self.client.get( + url, {"affected_by_vulnerability": "VCID-9999"}, format="json" + ) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(len(response.data["results"]["packages"]), 0) @@ -310,7 +360,10 @@ def test_invalid_purl_filter(self): Should return an empty list. """ url = reverse("package-v2-list") - response = self.client.get(url, {"purl": "pkg:nonexistent/package@1.0.0"}, format="json") + with self.assertNumQueries(4): + response = self.client.get( + url, {"purl": "pkg:nonexistent/package@1.0.0"}, format="json" + ) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(len(response.data["results"]["packages"]), 0) @@ -318,10 +371,24 @@ def test_get_affected_by_vulnerabilities(self): """ Test the get_affected_by_vulnerabilities method in the serializer. """ - package = Package.objects.get(package_url="pkg:pypi/django@3.2") + package = ( + Package.objects.filter(package_url="pkg:pypi/django@3.2") + .prefetch_related( + Prefetch( + "affected_by_vulnerabilities", + queryset=Vulnerability.objects.prefetch_related("fixed_by_packages"), + to_attr="prefetched_affected_vulnerabilities", + ) + ) + .first() + ) + serializer = PackageV2Serializer() vulnerabilities = serializer.get_affected_by_vulnerabilities(package) - self.assertEqual(vulnerabilities, ["VCID-1234"]) + self.assertEqual( + vulnerabilities, + {"VCID-1234": {"vulnerability_id": "VCID-1234", "fixed_by_packages": None}}, + ) def test_get_fixing_vulnerabilities(self): """ @@ -339,7 +406,8 @@ def test_bulk_lookup_with_valid_purls(self): """ url = reverse("package-v2-bulk-lookup") data = {"purls": ["pkg:pypi/django@3.2", "pkg:npm/lodash@4.17.20"]} - response = self.client.post(url, data, format="json") + with self.assertNumQueries(28): + response = self.client.post(url, data, format="json") self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertIn("packages", response.data) self.assertIn("vulnerabilities", response.data) @@ -363,7 +431,8 @@ def test_bulk_lookup_with_invalid_purls(self): """ url = reverse("package-v2-bulk-lookup") data = {"purls": ["pkg:pypi/nonexistent@1.0.0", "pkg:npm/unknown@0.0.1"]} - response = self.client.post(url, data, format="json") + with self.assertNumQueries(4): + response = self.client.post(url, data, format="json") self.assertEqual(response.status_code, status.HTTP_200_OK) # Since the packages don't exist, the response should be empty self.assertEqual(len(response.data["packages"]), 0) @@ -376,7 +445,8 @@ def test_bulk_lookup_with_empty_purls(self): """ url = reverse("package-v2-bulk-lookup") data = {"purls": []} - response = self.client.post(url, data, format="json") + with self.assertNumQueries(3): + response = self.client.post(url, data, format="json") self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertIn("error", response.data) self.assertIn("message", response.data) @@ -389,7 +459,8 @@ def test_bulk_search_with_valid_purls(self): """ url = reverse("package-v2-bulk-search") data = {"purls": ["pkg:pypi/django@3.2", "pkg:npm/lodash@4.17.20"]} - response = self.client.post(url, data, format="json") + with self.assertNumQueries(28): + response = self.client.post(url, data, format="json") self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertIn("packages", response.data) self.assertIn("vulnerabilities", response.data) @@ -416,7 +487,8 @@ def test_bulk_search_with_purl_only_true(self): "purls": ["pkg:pypi/django@3.2", "pkg:npm/lodash@4.17.20"], "purl_only": True, } - response = self.client.post(url, data, format="json") + with self.assertNumQueries(17): + response = self.client.post(url, data, format="json") self.assertEqual(response.status_code, status.HTTP_200_OK) # Since purl_only=True, response should be a list of PURLs self.assertIsInstance(response.data, list) @@ -442,7 +514,8 @@ def test_bulk_search_with_plain_purl_true(self): "purls": ["pkg:pypi/django@3.2", "pkg:pypi/django@3.2?extension=tar.gz"], "plain_purl": True, } - response = self.client.post(url, data, format="json") + with self.assertNumQueries(16): + response = self.client.post(url, data, format="json") self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertIn("packages", response.data) self.assertIn("vulnerabilities", response.data) @@ -462,7 +535,8 @@ def test_bulk_search_with_purl_only_and_plain_purl_true(self): "purl_only": True, "plain_purl": True, } - response = self.client.post(url, data, format="json") + with self.assertNumQueries(11): + response = self.client.post(url, data, format="json") self.assertEqual(response.status_code, status.HTTP_200_OK) # Response should be a list of plain PURLs self.assertIsInstance(response.data, list) @@ -477,7 +551,8 @@ def test_bulk_search_with_invalid_purls(self): """ url = reverse("package-v2-bulk-search") data = {"purls": ["pkg:pypi/nonexistent@1.0.0", "pkg:npm/unknown@0.0.1"]} - response = self.client.post(url, data, format="json") + with self.assertNumQueries(4): + response = self.client.post(url, data, format="json") self.assertEqual(response.status_code, status.HTTP_200_OK) # Since the packages don't exist, the response should be empty self.assertEqual(len(response.data["packages"]), 0) @@ -490,7 +565,8 @@ def test_bulk_search_with_empty_purls(self): """ url = reverse("package-v2-bulk-search") data = {"purls": []} - response = self.client.post(url, data, format="json") + with self.assertNumQueries(3): + response = self.client.post(url, data, format="json") self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertIn("error", response.data) self.assertIn("message", response.data) @@ -501,7 +577,8 @@ def test_all_vulnerable_packages(self): Test the 'all' endpoint that returns all vulnerable package URLs. """ url = reverse("package-v2-all") - response = self.client.get(url, format="json") + with self.assertNumQueries(4): + response = self.client.get(url, format="json") self.assertEqual(response.status_code, status.HTTP_200_OK) # Since package1 is vulnerable, it should be returned expected_purls = ["pkg:pypi/django@3.2"] @@ -514,7 +591,8 @@ def test_lookup_with_valid_purl(self): """ url = reverse("package-v2-lookup") data = {"purl": "pkg:pypi/django@3.2"} - response = self.client.post(url, data, format="json") + with self.assertNumQueries(12): + response = self.client.post(url, data, format="json") self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(1, len(response.data)) self.assertIn("purl", response.data[0]) @@ -523,7 +601,10 @@ def test_lookup_with_valid_purl(self): self.assertIn("next_non_vulnerable_version", response.data[0]) self.assertIn("latest_non_vulnerable_version", response.data[0]) self.assertEqual(response.data[0]["purl"], "pkg:pypi/django@3.2") - self.assertEqual(response.data[0]["affected_by_vulnerabilities"], ["VCID-1234"]) + self.assertEqual( + response.data[0]["affected_by_vulnerabilities"], + {"VCID-1234": {"vulnerability_id": "VCID-1234", "fixed_by_packages": None}}, + ) self.assertEqual(response.data[0]["fixing_vulnerabilities"], []) def test_lookup_with_invalid_purl(self): @@ -533,7 +614,8 @@ def test_lookup_with_invalid_purl(self): """ url = reverse("package-v2-lookup") data = {"purl": "pkg:pypi/nonexistent@1.0.0"} - response = self.client.post(url, data, format="json") + with self.assertNumQueries(4): + response = self.client.post(url, data, format="json") self.assertEqual(response.status_code, status.HTTP_200_OK) # No packages or vulnerabilities should be returned self.assertEqual(len(response.data), 0) @@ -545,7 +627,8 @@ def test_lookup_with_missing_purl(self): """ url = reverse("package-v2-lookup") data = {} - response = self.client.post(url, data, format="json") + with self.assertNumQueries(3): + response = self.client.post(url, data, format="json") self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertIn("error", response.data) self.assertIn("message", response.data) @@ -558,7 +641,8 @@ def test_lookup_with_invalid_purl_format(self): """ url = reverse("package-v2-lookup") data = {"purl": "invalid_purl_format"} - response = self.client.post(url, data, format="json") + with self.assertNumQueries(4): + response = self.client.post(url, data, format="json") self.assertEqual(response.status_code, status.HTTP_200_OK) # No packages or vulnerabilities should be returned self.assertEqual(len(response.data), 0) From 6b4c6bb10f6f07d8f08077dbf7351164feb2027f Mon Sep 17 00:00:00 2001 From: Tushar Goel <34160672+TG1999@users.noreply.github.com> Date: Mon, 6 Jan 2025 18:40:48 +0530 Subject: [PATCH 036/545] Add tests for num queries for views (#1730) * Add tests for num queries for views Signed-off-by: Tushar Goel * Add tests for num queries for views Signed-off-by: Tushar Goel * Add tests for num queries for views Signed-off-by: Tushar Goel --------- Signed-off-by: Tushar Goel --- vulnerabilities/tests/test_view.py | 58 ++++++++++++++++++++++++++++++ 1 file changed, 58 insertions(+) diff --git a/vulnerabilities/tests/test_view.py b/vulnerabilities/tests/test_view.py index 692305f8d..fd62e94a1 100644 --- a/vulnerabilities/tests/test_view.py +++ b/vulnerabilities/tests/test_view.py @@ -8,6 +8,7 @@ # import os +import time import pytest from django.test import Client @@ -15,9 +16,13 @@ from packageurl import PackageURL from univers import versions +from vulnerabilities import models +from vulnerabilities.models import AffectedByPackageRelatedVulnerability from vulnerabilities.models import Alias +from vulnerabilities.models import FixingPackageRelatedVulnerability from vulnerabilities.models import Package from vulnerabilities.models import Vulnerability +from vulnerabilities.models import VulnerabilitySeverity from vulnerabilities.templatetags.url_filters import url_quote_filter from vulnerabilities.views import PackageDetails from vulnerabilities.views import PackageSearch @@ -273,3 +278,56 @@ class TestCustomFilters: def test_url_quote_filter(self, input_value, expected_output): filtered = url_quote_filter(input_value) assert filtered == expected_output + + +class VulnerabilitySearchTestCaseWithPackages(TestCase): + def setUp(self): + self.vuln1 = Vulnerability.objects.create(vulnerability_id="VCID-1", summary="Vuln 1") + self.vuln2 = Vulnerability.objects.create(vulnerability_id="VCID-2", summary="Vuln 2") + self.vuln3 = Vulnerability.objects.create(vulnerability_id="VCID-3", summary="Vuln 3") + self.vuln4 = Vulnerability.objects.create(vulnerability_id="VCID-4", summary="Vuln 4") + self.vuln5 = Vulnerability.objects.create(vulnerability_id="VCID-5", summary="Vuln 5") + + self.package1 = Package.objects.create(type="pypi", name="django", version="1.0.0") + self.package2 = Package.objects.create(type="pypi", name="django", version="2.0.0") + self.package3 = Package.objects.create(type="pypi", name="django", version="3.0.0") + + AffectedByPackageRelatedVulnerability.objects.create( + package=self.package1, vulnerability=self.vuln1 + ) + AffectedByPackageRelatedVulnerability.objects.create( + package=self.package1, vulnerability=self.vuln2 + ) + AffectedByPackageRelatedVulnerability.objects.create( + package=self.package2, vulnerability=self.vuln3 + ) + AffectedByPackageRelatedVulnerability.objects.create( + package=self.package2, vulnerability=self.vuln4 + ) + + FixingPackageRelatedVulnerability.objects.create( + package=self.package3, vulnerability=self.vuln5 + ) + + self.severity1 = VulnerabilitySeverity.objects.create( + scoring_system="CVSSv3", + value="9.8", + scoring_elements="AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", + ) + self.severity2 = VulnerabilitySeverity.objects.create( + scoring_system="CVSSv3", + value="7.5", + scoring_elements="AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", + ) + + self.vuln1.severities.add(self.severity1) + self.vuln1.severities.add(self.severity2) + self.vuln1.save() + + def test_aggregate_fixed_and_affected_packages(self): + with self.assertNumQueries(11): + start_time = time.time() + response = self.client.get(f"/vulnerabilities/{self.vuln1.vulnerability_id}") + end_time = time.time() + assert end_time - start_time < 0.05 + self.assertEqual(response.status_code, 200) From 9fa9514c3a3a0d3bf0e5cbbc4eb3561cb8c58ae9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 7 Jan 2025 15:47:17 +0000 Subject: [PATCH 037/545] Bump jinja2 from 3.1.4 to 3.1.5 Bumps [jinja2](https://github.com/pallets/jinja) from 3.1.4 to 3.1.5. - [Release notes](https://github.com/pallets/jinja/releases) - [Changelog](https://github.com/pallets/jinja/blob/main/CHANGES.rst) - [Commits](https://github.com/pallets/jinja/compare/3.1.4...3.1.5) --- updated-dependencies: - dependency-name: jinja2 dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index e05816191..347259791 100644 --- a/requirements.txt +++ b/requirements.txt @@ -53,7 +53,7 @@ ipython==8.10.0 isort==5.10.1 itypes==1.2.0 jedi==0.18.1 -Jinja2==3.1.4 +Jinja2==3.1.5 jsonschema==3.2.0 license-expression==30.3.1 lxml==4.9.1 From 87bde21a3d0f4d974609d32b042b1cfb5d302aa4 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Wed, 8 Jan 2025 16:17:37 +0530 Subject: [PATCH 038/545] Add postgresql conf in docker compose Signed-off-by: Tushar Goel --- docker-compose.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docker-compose.yml b/docker-compose.yml index 865be14e1..afbe9f337 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -3,10 +3,12 @@ version: "3" services: db: image: postgres:13 + command: -c config_file=/etc/postgresql/postgresql.conf env_file: - docker.env volumes: - db_data:/var/lib/postgresql/data/ + - ./etc/postgresql/postgresql.conf:/etc/postgresql/postgresql.conf vulnerablecode: build: . From 8519b0d2daa0d73d5d6313e30c50c269f10d27f2 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Fri, 20 Dec 2024 23:00:19 +0530 Subject: [PATCH 039/545] Add models for CodeFix Signed-off-by: Tushar Goel --- vulnerabilities/migrations/0085_codefix.py | 60 ++++++++++++++++++++++ vulnerabilities/models.py | 29 +++++++++++ 2 files changed, 89 insertions(+) create mode 100644 vulnerabilities/migrations/0085_codefix.py diff --git a/vulnerabilities/migrations/0085_codefix.py b/vulnerabilities/migrations/0085_codefix.py new file mode 100644 index 000000000..cbe162845 --- /dev/null +++ b/vulnerabilities/migrations/0085_codefix.py @@ -0,0 +1,60 @@ +# Generated by Django 4.2.16 on 2024-12-20 17:29 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ("vulnerabilities", "0084_alter_package_options_package_version_rank"), + ] + + operations = [ + migrations.CreateModel( + name="CodeFix", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ("commits", models.JSONField(blank=True, default=list)), + ("pulls", models.JSONField(blank=True, default=list)), + ("downloads", models.JSONField(blank=True, default=list)), + ("patch", models.TextField(blank=True, null=True)), + ("notes", models.TextField(blank=True, null=True)), + ("references", models.JSONField(blank=True, default=list)), + ("status_reviewed", models.BooleanField(default=False)), + ("base_commit", models.CharField(blank=True, max_length=255, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "applies_to_versions", + models.ManyToManyField( + blank=True, related_name="fixes", to="vulnerabilities.package" + ), + ), + ( + "base_version", + models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="base_version_changes", + to="vulnerabilities.package", + ), + ), + ( + "vulnerabilities", + models.ManyToManyField( + blank=True, related_name="codefixes", to="vulnerabilities.vulnerability" + ), + ), + ], + options={ + "abstract": False, + }, + ), + ] diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index 6248e1e47..610d35c5f 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -1581,3 +1581,32 @@ class Exploit(models.Model): @property def get_known_ransomware_campaign_use_type(self): return "Known" if self.known_ransomware_campaign_use else "Unknown" + + +class CodeChange(models.Model): + commits = models.JSONField(blank=True, default=list) + pulls = models.JSONField(blank=True, default=list) + downloads = models.JSONField(blank=True, default=list) + patch = models.TextField(blank=True, null=True) + notes = models.TextField(blank=True, null=True) + references = models.JSONField(blank=True, default=list) + status_reviewed = models.BooleanField(default=False) + base_version = models.ForeignKey( + "Package", + null=True, + blank=True, + on_delete=models.SET_NULL, + related_name="base_version_changes", + ) + base_commit = models.CharField(max_length=255, blank=True, null=True) + + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + + class Meta: + abstract = True + + +class CodeFix(CodeChange): + vulnerabilities = models.ManyToManyField("Vulnerability", related_name="codefixes", blank=True) + applies_to_versions = models.ManyToManyField("Package", related_name="fixes", blank=True) From 6f984c3e7c17b53a0b22168692fc740686460b60 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Fri, 20 Dec 2024 23:33:10 +0530 Subject: [PATCH 040/545] Add pipeline to collect fix commit Signed-off-by: Tushar Goel --- vulnerabilities/pipelines/collect_commits.py | 112 +++++++++++++++++++ 1 file changed, 112 insertions(+) create mode 100644 vulnerabilities/pipelines/collect_commits.py diff --git a/vulnerabilities/pipelines/collect_commits.py b/vulnerabilities/pipelines/collect_commits.py new file mode 100644 index 000000000..61f60b2a2 --- /dev/null +++ b/vulnerabilities/pipelines/collect_commits.py @@ -0,0 +1,112 @@ +# +# Copyright (c) nexB Inc. and others. All rights reserved. +# VulnerableCode is a trademark of nexB Inc. +# SPDX-License-Identifier: Apache-2.0 +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/aboutcode-org/vulnerablecode for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. +# + +from aboutcode.pipeline import LoopProgress + +from vulnerabilities.models import CodeFix +from vulnerabilities.models import Package +from vulnerabilities.models import VulnerabilityReference +from vulnerabilities.pipelines import VulnerableCodePipeline +from vulnerabilities.utils import normalize_purl + + +class CollectFixCommitsPipeline(VulnerableCodePipeline): + """ + Improver pipeline to scout References and create CodeFix entries. + """ + + pipeline_id = "collect_fix_commits" + license_expression = None + + @classmethod + def steps(cls): + return (cls.collect_and_store_fix_commits,) + + def collect_and_store_fix_commits(self): + references = VulnerabilityReference.objects.prefetch_related("vulnerabilities").distinct() + + self.log(f"Processing {references.count():,d} references to collect fix commits.") + + created_fix_count = 0 + progress = LoopProgress(total_iterations=references.count(), logger=self.log) + for reference in progress.iter(references.paginated(per_page=500)): + for vulnerability in reference.vulnerabilities.all(): + package_urls = self.extract_package_urls(reference) + commit_id = self.extract_commit_id(reference.url) + + if commit_id and package_urls: + for purl in package_urls: + normalized_purl = normalize_purl(purl) + package = self.get_or_create_package(normalized_purl) + codefix = self.create_codefix_entry( + vulnerability=vulnerability, + package=package, + commit_id=commit_id, + reference=reference.url, + ) + if codefix: + created_fix_count += 1 + + self.log(f"Successfully created {created_fix_count:,d} CodeFix entries.") + + def extract_package_urls(self, reference): + """ + Extract Package URLs from a reference. + Returns a list of Package URLs inferred from the reference. + """ + urls = [] + if "github" in reference.url: + parts = reference.url.split("/") + if len(parts) >= 5: + namespace = parts[-3] + name = parts[-2] + commit = parts[-1] + if commit: + urls.append(f"pkg:github/{namespace}/{name}@{commit}") + return urls + + def extract_commit_id(self, url): + """ + Extract a commit ID from a URL, if available. + """ + if "github" in url: + parts = url.split("/") + return parts[-1] if len(parts) > 0 else None + return None + + def get_or_create_package(self, purl): + """ + Get or create a Package object from a Package URL. + """ + try: + package, _ = Package.objects.get_or_create_from_purl(purl) + return package + except Exception as e: + self.log(f"Error creating package from purl {purl}: {e}") + return None + + def create_codefix_entry(self, vulnerability, package, commit_id, reference): + """ + Create a CodeFix entry associated with the given vulnerability and package. + """ + try: + codefix, created = CodeFix.objects.get_or_create( + base_version=package, + defaults={ + "commits": [commit_id], + "references": [reference], + }, + ) + if created: + codefix.vulnerabilities.add(vulnerability) + codefix.save() + return codefix + except Exception as e: + self.log(f"Error creating CodeFix entry: {e}") + return None From bcdc572515bbe437a36415b9e5d0314559a0c66c Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Tue, 24 Dec 2024 00:54:57 +0530 Subject: [PATCH 041/545] Address review comments Signed-off-by: Tushar Goel --- vulnerabilities/models.py | 61 ++++-- vulnerabilities/pipelines/collect_commits.py | 187 ++++++++++++++----- 2 files changed, 193 insertions(+), 55 deletions(-) diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index 610d35c5f..ab93084d9 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -1584,29 +1584,64 @@ def get_known_ransomware_campaign_use_type(self): class CodeChange(models.Model): - commits = models.JSONField(blank=True, default=list) - pulls = models.JSONField(blank=True, default=list) - downloads = models.JSONField(blank=True, default=list) - patch = models.TextField(blank=True, null=True) - notes = models.TextField(blank=True, null=True) - references = models.JSONField(blank=True, default=list) - status_reviewed = models.BooleanField(default=False) + """ + Abstract base model representing a change in code, either introducing or fixing a vulnerability. + This includes details about commits, patches, and related metadata. + """ + + commits = models.JSONField( + blank=True, + default=list, + help_text="List of commit identifiers associated with the code change.", + ) + pulls = models.JSONField( + blank=True, + default=list, + help_text="List of pull request URLs associated with the code change.", + ) + downloads = models.JSONField( + blank=True, default=list, help_text="List of download URLs for the patched code." + ) + patch = models.TextField( + blank=True, null=True, help_text="The code change in patch format (e.g., git diff)." + ) + notes = models.TextField( + blank=True, null=True, help_text="Additional notes or instructions about the code change." + ) + references = models.JSONField( + blank=True, default=list, help_text="External references related to this code change." + ) + status_reviewed = models.BooleanField( + default=False, help_text="Indicates if the code change has been reviewed." + ) base_version = models.ForeignKey( "Package", null=True, blank=True, on_delete=models.SET_NULL, related_name="base_version_changes", + help_text="The base version of the package to which this code change applies.", + ) + base_commit = models.CharField( + max_length=255, + blank=True, + null=True, + help_text="The commit ID representing the state of the code before applying the fix or change.", + ) + created_at = models.DateTimeField( + auto_now_add=True, help_text="Timestamp indicating when the code change was created." + ) + updated_at = models.DateTimeField( + auto_now=True, help_text="Timestamp indicating when the code change was last updated." ) - base_commit = models.CharField(max_length=255, blank=True, null=True) - - created_at = models.DateTimeField(auto_now_add=True) - updated_at = models.DateTimeField(auto_now=True) class Meta: abstract = True class CodeFix(CodeChange): - vulnerabilities = models.ManyToManyField("Vulnerability", related_name="codefixes", blank=True) - applies_to_versions = models.ManyToManyField("Package", related_name="fixes", blank=True) + package_vulnerabilities = models.ManyToManyField( + "AffectedByPackageRelatedVulnerability", + related_name="code_fixes", + help_text="The vulnerabilities fixed by this code change.", + ) diff --git a/vulnerabilities/pipelines/collect_commits.py b/vulnerabilities/pipelines/collect_commits.py index 61f60b2a2..44e91be31 100644 --- a/vulnerabilities/pipelines/collect_commits.py +++ b/vulnerabilities/pipelines/collect_commits.py @@ -8,12 +8,36 @@ # from aboutcode.pipeline import LoopProgress +from packageurl.contrib.url2purl import url2purl from vulnerabilities.models import CodeFix from vulnerabilities.models import Package from vulnerabilities.models import VulnerabilityReference from vulnerabilities.pipelines import VulnerableCodePipeline -from vulnerabilities.utils import normalize_purl + + +def extract_commit_id(url): + """ + Extract a commit ID from a URL, if available. + Supports different URL structures for commit references. + + >>> extract_commit_id("https://github.com/hedgedoc/hedgedoc/commit/c1789474020a6d668d616464cb2da5e90e123f65") + 'c1789474020a6d668d616464cb2da5e90e123f65' + """ + if "/commit/" in url: + parts = url.split("/") + if len(parts) > 1 and parts[-2] == "commit": + return parts[-1] + return None + + +def is_reference_already_processed(reference_url, commit_id): + """ + Check if a reference and commit ID pair already exists in a CodeFix entry. + """ + return CodeFix.objects.filter( + references__contains=[reference_url], commits__contains=[commit_id] + ).exists() class CollectFixCommitsPipeline(VulnerableCodePipeline): @@ -37,48 +61,33 @@ def collect_and_store_fix_commits(self): progress = LoopProgress(total_iterations=references.count(), logger=self.log) for reference in progress.iter(references.paginated(per_page=500)): for vulnerability in reference.vulnerabilities.all(): - package_urls = self.extract_package_urls(reference) - commit_id = self.extract_commit_id(reference.url) - - if commit_id and package_urls: - for purl in package_urls: - normalized_purl = normalize_purl(purl) - package = self.get_or_create_package(normalized_purl) - codefix = self.create_codefix_entry( - vulnerability=vulnerability, - package=package, - commit_id=commit_id, - reference=reference.url, - ) - if codefix: - created_fix_count += 1 + vcs_url = normalize_vcs_url(reference.url) + commit_id = extract_commit_id(reference.url) - self.log(f"Successfully created {created_fix_count:,d} CodeFix entries.") + if not commit_id or not vcs_url: + continue - def extract_package_urls(self, reference): - """ - Extract Package URLs from a reference. - Returns a list of Package URLs inferred from the reference. - """ - urls = [] - if "github" in reference.url: - parts = reference.url.split("/") - if len(parts) >= 5: - namespace = parts[-3] - name = parts[-2] - commit = parts[-1] - if commit: - urls.append(f"pkg:github/{namespace}/{name}@{commit}") - return urls - - def extract_commit_id(self, url): - """ - Extract a commit ID from a URL, if available. - """ - if "github" in url: - parts = url.split("/") - return parts[-1] if len(parts) > 0 else None - return None + # Skip if already processed + if is_reference_already_processed(reference.url, commit_id): + self.log( + f"Skipping already processed reference: {reference.url} with commit {commit_id}" + ) + continue + purl = url2purl(vcs_url) + if not purl: + self.log(f"Could not create purl from url: {vcs_url}") + continue + package = self.get_or_create_package(purl) + codefix = self.create_codefix_entry( + vulnerability=vulnerability, + package=package, + commit_id=commit_id, + reference=reference.url, + ) + if codefix: + created_fix_count += 1 + + self.log(f"Successfully created {created_fix_count:,d} CodeFix entries.") def get_or_create_package(self, purl): """ @@ -109,4 +118,98 @@ def create_codefix_entry(self, vulnerability, package, commit_id, reference): return codefix except Exception as e: self.log(f"Error creating CodeFix entry: {e}") - return None + return + + +PLAIN_URLS = ( + "https://", + "http://", +) + +VCS_URLS = ( + "git://", + "git+git://", + "git+https://", + "git+http://", + "hg://", + "hg+http://", + "hg+https://", + "svn://", + "svn+https://", + "svn+http://", +) + + +def normalize_vcs_url(repo_url, vcs_tool=None): + """ + Return a normalized vcs_url version control URL given some `repo_url` and an + optional `vcs_tool` hint (such as 'git', 'hg', etc. + + Handles shortcuts for GitHub, GitHub gist, Bitbucket, or GitLab repositories + and more using the same approach as npm install: + + See https://docs.npmjs.com/files/package.json#repository + or https://getcomposer.org/doc/05-repositories.md + + This is done here in npm: + https://github.com/npm/npm/blob/d3c858ce4cfb3aee515bb299eb034fe1b5e44344/node_modules/hosted-git-info/git-host-info.js + + These should be resolved: + npm/npm + gist:11081aaa281 + bitbucket:example/repo + gitlab:another/repo + expressjs/serve-static + git://github.com/angular/di.js.git + git://github.com/hapijs/boom + git@github.com:balderdashy/waterline-criteria.git + http://github.com/ariya/esprima.git + http://github.com/isaacs/nopt + https://github.com/chaijs/chai + https://github.com/christkv/kerberos.git + https://gitlab.com/foo/private.git + git@gitlab.com:foo/private.git + """ + if not repo_url or not isinstance(repo_url, str): + return + + repo_url = repo_url.strip() + if not repo_url: + return + + # TODO: If we match http and https, we may should add more check in + # case if the url is not a repo one. For example, check the domain + # name in the url... + if repo_url.startswith(VCS_URLS + PLAIN_URLS): + return repo_url + + if repo_url.startswith("git@"): + tool, _, right = repo_url.partition("@") + if ":" in repo_url: + host, _, repo = right.partition(":") + else: + # git@github.com/Filirom1/npm2aur.git + host, _, repo = right.partition("/") + + if any(r in host for r in ("bitbucket", "gitlab", "github")): + scheme = "https" + else: + scheme = "git" + + return f"{scheme}://{host}/{repo}" + + # FIXME: where these URL schemes come from?? + if repo_url.startswith(("bitbucket:", "gitlab:", "github:", "gist:")): + hoster_urls = { + "bitbucket": f"https://bitbucket.org/{repo}", + "github": f"https://github.com/{repo}", + "gitlab": f"https://gitlab.com/{repo}", + "gist": f"https://gist.github.com/{repo}", + } + hoster, _, repo = repo_url.partition(":") + return hoster_urls[hoster] % locals() + + if len(repo_url.split("/")) == 2: + # implicit github, but that's only on NPM? + return f"https://github.com/{repo_url}" + return repo_url From b3c0ef260594caebb294e9433012792a85ce112a Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Tue, 24 Dec 2024 01:02:54 +0530 Subject: [PATCH 042/545] Fix Signed-off-by: Tushar Goel --- vulnerabilities/migrations/0085_codefix.py | 60 --------- vulnerabilities/migrations/0086_codefix.py | 124 +++++++++++++++++++ vulnerabilities/models.py | 2 +- vulnerabilities/pipelines/collect_commits.py | 28 +---- 4 files changed, 131 insertions(+), 83 deletions(-) delete mode 100644 vulnerabilities/migrations/0085_codefix.py create mode 100644 vulnerabilities/migrations/0086_codefix.py diff --git a/vulnerabilities/migrations/0085_codefix.py b/vulnerabilities/migrations/0085_codefix.py deleted file mode 100644 index cbe162845..000000000 --- a/vulnerabilities/migrations/0085_codefix.py +++ /dev/null @@ -1,60 +0,0 @@ -# Generated by Django 4.2.16 on 2024-12-20 17:29 - -from django.db import migrations, models -import django.db.models.deletion - - -class Migration(migrations.Migration): - - dependencies = [ - ("vulnerabilities", "0084_alter_package_options_package_version_rank"), - ] - - operations = [ - migrations.CreateModel( - name="CodeFix", - fields=[ - ( - "id", - models.AutoField( - auto_created=True, primary_key=True, serialize=False, verbose_name="ID" - ), - ), - ("commits", models.JSONField(blank=True, default=list)), - ("pulls", models.JSONField(blank=True, default=list)), - ("downloads", models.JSONField(blank=True, default=list)), - ("patch", models.TextField(blank=True, null=True)), - ("notes", models.TextField(blank=True, null=True)), - ("references", models.JSONField(blank=True, default=list)), - ("status_reviewed", models.BooleanField(default=False)), - ("base_commit", models.CharField(blank=True, max_length=255, null=True)), - ("created_at", models.DateTimeField(auto_now_add=True)), - ("updated_at", models.DateTimeField(auto_now=True)), - ( - "applies_to_versions", - models.ManyToManyField( - blank=True, related_name="fixes", to="vulnerabilities.package" - ), - ), - ( - "base_version", - models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="base_version_changes", - to="vulnerabilities.package", - ), - ), - ( - "vulnerabilities", - models.ManyToManyField( - blank=True, related_name="codefixes", to="vulnerabilities.vulnerability" - ), - ), - ], - options={ - "abstract": False, - }, - ), - ] diff --git a/vulnerabilities/migrations/0086_codefix.py b/vulnerabilities/migrations/0086_codefix.py new file mode 100644 index 000000000..64ea35fe0 --- /dev/null +++ b/vulnerabilities/migrations/0086_codefix.py @@ -0,0 +1,124 @@ +# Generated by Django 4.2.16 on 2024-12-23 19:32 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ("vulnerabilities", "0085_alter_package_is_ghost_alter_package_version_rank_and_more"), + ] + + operations = [ + migrations.CreateModel( + name="CodeFix", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ( + "commits", + models.JSONField( + blank=True, + default=list, + help_text="List of commit identifiers associated with the code change.", + ), + ), + ( + "pulls", + models.JSONField( + blank=True, + default=list, + help_text="List of pull request URLs associated with the code change.", + ), + ), + ( + "downloads", + models.JSONField( + blank=True, + default=list, + help_text="List of download URLs for the patched code.", + ), + ), + ( + "patch", + models.TextField( + blank=True, + help_text="The code change in patch format (e.g., git diff).", + null=True, + ), + ), + ( + "notes", + models.TextField( + blank=True, + help_text="Additional notes or instructions about the code change.", + null=True, + ), + ), + ( + "references", + models.JSONField( + blank=True, + default=list, + help_text="External references related to this code change.", + ), + ), + ( + "status_reviewed", + models.BooleanField( + default=False, help_text="Indicates if the code change has been reviewed." + ), + ), + ( + "base_commit", + models.CharField( + blank=True, + help_text="The commit ID representing the state of the code before applying the fix or change.", + max_length=255, + null=True, + ), + ), + ( + "created_at", + models.DateTimeField( + auto_now_add=True, + help_text="Timestamp indicating when the code change was created.", + ), + ), + ( + "updated_at", + models.DateTimeField( + auto_now=True, + help_text="Timestamp indicating when the code change was last updated.", + ), + ), + ( + "base_version", + models.ForeignKey( + blank=True, + help_text="The base version of the package to which this code change applies.", + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="base_version_codechanges", + to="vulnerabilities.package", + ), + ), + ( + "package_vulnerabilities", + models.ManyToManyField( + help_text="The vulnerabilities fixed by this code change.", + related_name="code_fixes", + to="vulnerabilities.affectedbypackagerelatedvulnerability", + ), + ), + ], + options={ + "abstract": False, + }, + ), + ] diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index ab93084d9..7da4ec2c4 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -1619,7 +1619,7 @@ class CodeChange(models.Model): null=True, blank=True, on_delete=models.SET_NULL, - related_name="base_version_changes", + related_name="base_version_codechanges", help_text="The base version of the package to which this code change applies.", ) base_commit = models.CharField( diff --git a/vulnerabilities/pipelines/collect_commits.py b/vulnerabilities/pipelines/collect_commits.py index 44e91be31..564988d34 100644 --- a/vulnerabilities/pipelines/collect_commits.py +++ b/vulnerabilities/pipelines/collect_commits.py @@ -16,21 +16,6 @@ from vulnerabilities.pipelines import VulnerableCodePipeline -def extract_commit_id(url): - """ - Extract a commit ID from a URL, if available. - Supports different URL structures for commit references. - - >>> extract_commit_id("https://github.com/hedgedoc/hedgedoc/commit/c1789474020a6d668d616464cb2da5e90e123f65") - 'c1789474020a6d668d616464cb2da5e90e123f65' - """ - if "/commit/" in url: - parts = url.split("/") - if len(parts) > 1 and parts[-2] == "commit": - return parts[-1] - return None - - def is_reference_already_processed(reference_url, commit_id): """ Check if a reference and commit ID pair already exists in a CodeFix entry. @@ -62,15 +47,14 @@ def collect_and_store_fix_commits(self): for reference in progress.iter(references.paginated(per_page=500)): for vulnerability in reference.vulnerabilities.all(): vcs_url = normalize_vcs_url(reference.url) - commit_id = extract_commit_id(reference.url) - if not commit_id or not vcs_url: + if not vcs_url: continue # Skip if already processed - if is_reference_already_processed(reference.url, commit_id): + if is_reference_already_processed(reference.url, vcs_url): self.log( - f"Skipping already processed reference: {reference.url} with commit {commit_id}" + f"Skipping already processed reference: {reference.url} with VCS URL {vcs_url}" ) continue purl = url2purl(vcs_url) @@ -81,7 +65,7 @@ def collect_and_store_fix_commits(self): codefix = self.create_codefix_entry( vulnerability=vulnerability, package=package, - commit_id=commit_id, + vcs_url=vcs_url, reference=reference.url, ) if codefix: @@ -100,7 +84,7 @@ def get_or_create_package(self, purl): self.log(f"Error creating package from purl {purl}: {e}") return None - def create_codefix_entry(self, vulnerability, package, commit_id, reference): + def create_codefix_entry(self, vulnerability, package, vcs_url, reference): """ Create a CodeFix entry associated with the given vulnerability and package. """ @@ -108,7 +92,7 @@ def create_codefix_entry(self, vulnerability, package, commit_id, reference): codefix, created = CodeFix.objects.get_or_create( base_version=package, defaults={ - "commits": [commit_id], + "commits": [vcs_url], "references": [reference], }, ) From c01f6ec81cba6c617063d2fcdbd7be3c253f2d78 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Tue, 31 Dec 2024 17:20:42 +0530 Subject: [PATCH 043/545] Model changes Signed-off-by: Tushar Goel --- vulnerabilities/models.py | 62 +++++---- vulnerabilities/pipelines/collect_commits.py | 22 ++- vulnerabilities/tests/test_collect_commits.py | 129 ++++++++++++++++++ 3 files changed, 185 insertions(+), 28 deletions(-) create mode 100644 vulnerabilities/tests/test_collect_commits.py diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index 7da4ec2c4..6af4db6ae 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -1587,12 +1587,16 @@ class CodeChange(models.Model): """ Abstract base model representing a change in code, either introducing or fixing a vulnerability. This includes details about commits, patches, and related metadata. + + We are tracking commits, pulls and downloads as references to the code change. The goal is to + keep track and store the actual code patch in the ``patch`` field. When not available the patch + will be inferred from these references using improvers. """ commits = models.JSONField( blank=True, default=list, - help_text="List of commit identifiers associated with the code change.", + help_text="List of commit identifiers using VCS URLs associated with the code change.", ) pulls = models.JSONField( blank=True, @@ -1603,36 +1607,30 @@ class CodeChange(models.Model): blank=True, default=list, help_text="List of download URLs for the patched code." ) patch = models.TextField( - blank=True, null=True, help_text="The code change in patch format (e.g., git diff)." - ) - notes = models.TextField( - blank=True, null=True, help_text="Additional notes or instructions about the code change." - ) - references = models.JSONField( - blank=True, default=list, help_text="External references related to this code change." - ) - status_reviewed = models.BooleanField( - default=False, help_text="Indicates if the code change has been reviewed." + blank=True, null=True, help_text="The code change as a patch in unified diff format." ) - base_version = models.ForeignKey( + base_package_version = models.ForeignKey( "Package", null=True, blank=True, on_delete=models.SET_NULL, - related_name="base_version_codechanges", - help_text="The base version of the package to which this code change applies.", + related_name="codechanges", + help_text="The base package version to which this code change applies.", ) - base_commit = models.CharField( - max_length=255, - blank=True, - null=True, - help_text="The commit ID representing the state of the code before applying the fix or change.", + notes = models.TextField( + blank=True, null=True, help_text="Notes or instructions about this code change." + ) + references = models.JSONField( + blank=True, default=list, help_text="URL references related to this code change." + ) + is_reviewed = models.BooleanField( + default=False, help_text="Indicates if this code change has been reviewed." ) created_at = models.DateTimeField( - auto_now_add=True, help_text="Timestamp indicating when the code change was created." + auto_now_add=True, help_text="Timestamp indicating when this code change was created." ) updated_at = models.DateTimeField( - auto_now=True, help_text="Timestamp indicating when the code change was last updated." + auto_now=True, help_text="Timestamp indicating when this code change was last updated." ) class Meta: @@ -1640,8 +1638,24 @@ class Meta: class CodeFix(CodeChange): - package_vulnerabilities = models.ManyToManyField( + """ + A code fix is a code change that addresses a vulnerability and is associated: + - with a specific affected package version + - optionally with a specific fixing package version when it is known + """ + + affected_package_vulnerability = models.ForeignKey( "AffectedByPackageRelatedVulnerability", - related_name="code_fixes", - help_text="The vulnerabilities fixed by this code change.", + on_delete=models.CASCADE, + related_name="code_fix", + help_text="The affected package version to which this code fix applies.", + ) + + fixed_package_vulnerability = models.ForeignKey( + "FixingPackageRelatedVulnerability", + null=True, + blank=True, + on_delete=models.SET_NULL, + related_name="code_fix", + help_text="The fixing package version with this code fix", ) diff --git a/vulnerabilities/pipelines/collect_commits.py b/vulnerabilities/pipelines/collect_commits.py index 564988d34..690789b83 100644 --- a/vulnerabilities/pipelines/collect_commits.py +++ b/vulnerabilities/pipelines/collect_commits.py @@ -44,15 +44,25 @@ def collect_and_store_fix_commits(self): created_fix_count = 0 progress = LoopProgress(total_iterations=references.count(), logger=self.log) + + Reference + AffectedByPackageRelatedVulnerability + # FixingPackageRelatedVulnerability + + + for apv in AffectedByPackageRelatedVulnerability.objects.all(): + vuln = apv.vulnerability + for ref in vuln.references: + for reference in progress.iter(references.paginated(per_page=500)): for vulnerability in reference.vulnerabilities.all(): - vcs_url = normalize_vcs_url(reference.url) + vcs_url = normalize_vcs_url(repo_url=reference.url) if not vcs_url: continue # Skip if already processed - if is_reference_already_processed(reference.url, vcs_url): + if is_reference_already_processed(reference_url=reference.url, commit_id=vcs_url): self.log( f"Skipping already processed reference: {reference.url} with VCS URL {vcs_url}" ) @@ -97,7 +107,8 @@ def create_codefix_entry(self, vulnerability, package, vcs_url, reference): }, ) if created: - codefix.vulnerabilities.add(vulnerability) + AffectedByPackageRelatedVulnerability.objects.get + codefix.package_vulnerabilities.add(vulnerability) codefix.save() return codefix except Exception as e: @@ -124,10 +135,13 @@ def create_codefix_entry(self, vulnerability, package, vcs_url, reference): ) +# TODO: This function was borrowed from scancode-toolkit. We need to create a shared library for that. def normalize_vcs_url(repo_url, vcs_tool=None): """ Return a normalized vcs_url version control URL given some `repo_url` and an - optional `vcs_tool` hint (such as 'git', 'hg', etc. + optional `vcs_tool` hint (such as 'git', 'hg', etc.) + + Return None if repo_url is not recognized as a VCS URL. Handles shortcuts for GitHub, GitHub gist, Bitbucket, or GitLab repositories and more using the same approach as npm install: diff --git a/vulnerabilities/tests/test_collect_commits.py b/vulnerabilities/tests/test_collect_commits.py new file mode 100644 index 000000000..ad6aa1ba2 --- /dev/null +++ b/vulnerabilities/tests/test_collect_commits.py @@ -0,0 +1,129 @@ +from unittest.mock import patch + +from vulnerabilities.models import CodeFix +from vulnerabilities.pipelines.collect_commits import CollectFixCommitsPipeline +from vulnerabilities.pipelines.collect_commits import is_reference_already_processed +from vulnerabilities.pipelines.collect_commits import normalize_vcs_url + + +# --- Mocked Dependencies --- +class MockVulnerability: + def __init__(self, id): + self.id = id + + +class MockReference: + def __init__(self, url, vulnerabilities): + self.url = url + self.vulnerabilities = vulnerabilities + + +class MockPackage: + def __init__(self, purl): + self.purl = purl + + +# --- Tests for Utility Functions --- +@patch("vulnerabilities.models.CodeFix.objects.filter") +def test_reference_already_processed_true(mock_filter): + mock_filter.return_value.exists.return_value = True + result = is_reference_already_processed("http://example.com", "commit123") + assert result is True + mock_filter.assert_called_once_with( + references__contains=["http://example.com"], commits__contains=["commit123"] + ) + + +@patch("vulnerabilities.models.CodeFix.objects.filter") +def test_reference_already_processed_false(mock_filter): + mock_filter.return_value.exists.return_value = False + result = is_reference_already_processed("http://example.com", "commit123") + assert result is False + + +# --- Tests for normalize_vcs_url --- +def test_normalize_plain_url(): + url = normalize_vcs_url("https://github.com/user/repo.git") + assert url == "https://github.com/user/repo.git" + + +def test_normalize_git_ssh_url(): + url = normalize_vcs_url("git@github.com:user/repo.git") + assert url == "https://github.com/user/repo.git" + + +def test_normalize_implicit_github(): + url = normalize_vcs_url("user/repo") + assert url == "https://github.com/user/repo" + + +# --- Tests for CollectFixCommitsPipeline --- +@patch("vulnerabilities.models.VulnerabilityReference.objects.prefetch_related") +@patch("vulnerabilities.pipelines.collect_commits.CollectFixCommitsPipeline.get_or_create_package") +@patch("vulnerabilities.pipelines.collect_commits.is_reference_already_processed") +@patch("vulnerabilities.pipelines.collect_commits.url2purl") +def test_collect_and_store_fix_commits( + mock_url2purl, mock_is_processed, mock_get_package, mock_prefetch +): + mock_vuln = MockVulnerability(id=1) + mock_reference = MockReference(url="http://example.com", vulnerabilities=[mock_vuln]) + mock_prefetch.return_value.distinct.return_value.paginated.return_value = [mock_reference] + mock_url2purl.return_value = "pkg:example/package@1.0.0" + mock_is_processed.return_value = False + mock_get_package.return_value = MockPackage(purl="pkg:example/package@1.0.0") + + pipeline = CollectFixCommitsPipeline() + pipeline.log = lambda msg: None + pipeline.collect_and_store_fix_commits() + + mock_is_processed.assert_called_once_with("http://example.com", "pkg:example/package@1.0.0") + mock_get_package.assert_called_once_with("pkg:example/package@1.0.0") + + +@patch("vulnerabilities.pipelines.collect_commits.CollectFixCommitsPipeline.get_or_create_package") +def test_get_or_create_package_success(mock_get_or_create): + mock_get_or_create.return_value = (MockPackage(purl="pkg:example/package@1.0.0"), True) + pipeline = CollectFixCommitsPipeline() + package = pipeline.get_or_create_package("pkg:example/package@1.0.0") + assert package.purl == "pkg:example/package@1.0.0" + + +@patch("vulnerabilities.pipelines.collect_commits.CollectFixCommitsPipeline.get_or_create_package") +def test_get_or_create_package_failure(mock_get_or_create): + mock_get_or_create.side_effect = Exception("Error") + pipeline = CollectFixCommitsPipeline() + logs = [] + pipeline.log = lambda msg: logs.append(msg) + result = pipeline.get_or_create_package("pkg:example/package@1.0.0") + assert result is None + assert len(logs) == 1 + + +@patch("vulnerabilities.models.CodeFix.objects.get_or_create") +def test_create_codefix_entry_success(mock_get_or_create): + mock_get_or_create.return_value = (CodeFix(), True) + pipeline = CollectFixCommitsPipeline() + result = pipeline.create_codefix_entry( + MockVulnerability(1), + MockPackage("pkg:example/package@1.0.0"), + "http://example.com", + "http://reference", + ) + assert result is not None + mock_get_or_create.assert_called_once() + + +@patch("vulnerabilities.models.CodeFix.objects.get_or_create") +def test_create_codefix_entry_failure(mock_get_or_create): + mock_get_or_create.side_effect = Exception("Error") + pipeline = CollectFixCommitsPipeline() + logs = [] + pipeline.log = lambda msg: logs.append(msg) + result = pipeline.create_codefix_entry( + MockVulnerability(1), + MockPackage("pkg:example/package@1.0.0"), + "http://example.com", + "http://reference", + ) + assert result is None + assert len(logs) == 1 From 48d2144b8c0cf1145928f6c543f0604622a6144f Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Tue, 7 Jan 2025 20:20:46 +0530 Subject: [PATCH 044/545] Refactor the collect fix commit pipeline Signed-off-by: Tushar Goel --- vulnerabilities/pipelines/collect_commits.py | 164 +++++++++++------- vulnerabilities/tests/test_collect_commits.py | 6 +- 2 files changed, 106 insertions(+), 64 deletions(-) diff --git a/vulnerabilities/pipelines/collect_commits.py b/vulnerabilities/pipelines/collect_commits.py index 690789b83..93bcce205 100644 --- a/vulnerabilities/pipelines/collect_commits.py +++ b/vulnerabilities/pipelines/collect_commits.py @@ -7,22 +7,24 @@ # See https://aboutcode.org for more information about nexB OSS projects. # +import re + from aboutcode.pipeline import LoopProgress from packageurl.contrib.url2purl import url2purl +from vulnerabilities.models import AffectedByPackageRelatedVulnerability from vulnerabilities.models import CodeFix +from vulnerabilities.models import FixingPackageRelatedVulnerability from vulnerabilities.models import Package from vulnerabilities.models import VulnerabilityReference from vulnerabilities.pipelines import VulnerableCodePipeline -def is_reference_already_processed(reference_url, commit_id): +def is_vcs_url_already_processed(commit_id): """ - Check if a reference and commit ID pair already exists in a CodeFix entry. + Check if a VCS URL exists in a CodeFix entry. """ - return CodeFix.objects.filter( - references__contains=[reference_url], commits__contains=[commit_id] - ).exists() + return CodeFix.objects.filter(commits__contains=[commit_id]).exists() class CollectFixCommitsPipeline(VulnerableCodePipeline): @@ -38,83 +40,54 @@ def steps(cls): return (cls.collect_and_store_fix_commits,) def collect_and_store_fix_commits(self): - references = VulnerabilityReference.objects.prefetch_related("vulnerabilities").distinct() + affected_by_package_related_vulnerabilities = ( + AffectedByPackageRelatedVulnerability.objects.all().prefetch_related( + "vulnerability", "vulnerability__references" + ) + ) - self.log(f"Processing {references.count():,d} references to collect fix commits.") + self.log( + f"Processing {affected_by_package_related_vulnerabilities.count():,d} references to collect fix commits." + ) created_fix_count = 0 - progress = LoopProgress(total_iterations=references.count(), logger=self.log) - - Reference - AffectedByPackageRelatedVulnerability - # FixingPackageRelatedVulnerability + progress = LoopProgress( + total_iterations=affected_by_package_related_vulnerabilities.count(), logger=self.log + ) + for apv in progress.iter( + affected_by_package_related_vulnerabilities.paginated(per_page=500) + ): + vulnerability = apv.vulnerability + for reference in vulnerability.references: - for apv in AffectedByPackageRelatedVulnerability.objects.all(): - vuln = apv.vulnerability - for ref in vuln.references: + if not is_vcs_url(reference.url): + continue - for reference in progress.iter(references.paginated(per_page=500)): - for vulnerability in reference.vulnerabilities.all(): vcs_url = normalize_vcs_url(repo_url=reference.url) if not vcs_url: continue # Skip if already processed - if is_reference_already_processed(reference_url=reference.url, commit_id=vcs_url): + if is_vcs_url_already_processed(commit_id=vcs_url): self.log( f"Skipping already processed reference: {reference.url} with VCS URL {vcs_url}" ) continue - purl = url2purl(vcs_url) - if not purl: - self.log(f"Could not create purl from url: {vcs_url}") - continue - package = self.get_or_create_package(purl) - codefix = self.create_codefix_entry( - vulnerability=vulnerability, - package=package, - vcs_url=vcs_url, - reference=reference.url, + code_fix, created = CodeFix.objects.get_or_create( + commits=[vcs_url], + affected_package_vulnerability=apv, ) - if codefix: + + if created: created_fix_count += 1 + self.log( + f"Created CodeFix entry for reference: {reference.url} with VCS URL {vcs_url}" + ) self.log(f"Successfully created {created_fix_count:,d} CodeFix entries.") - def get_or_create_package(self, purl): - """ - Get or create a Package object from a Package URL. - """ - try: - package, _ = Package.objects.get_or_create_from_purl(purl) - return package - except Exception as e: - self.log(f"Error creating package from purl {purl}: {e}") - return None - - def create_codefix_entry(self, vulnerability, package, vcs_url, reference): - """ - Create a CodeFix entry associated with the given vulnerability and package. - """ - try: - codefix, created = CodeFix.objects.get_or_create( - base_version=package, - defaults={ - "commits": [vcs_url], - "references": [reference], - }, - ) - if created: - AffectedByPackageRelatedVulnerability.objects.get - codefix.package_vulnerabilities.add(vulnerability) - codefix.save() - return codefix - except Exception as e: - self.log(f"Error creating CodeFix entry: {e}") - return - PLAIN_URLS = ( "https://", @@ -211,3 +184,72 @@ def normalize_vcs_url(repo_url, vcs_tool=None): # implicit github, but that's only on NPM? return f"https://github.com/{repo_url}" return repo_url + + +def is_vcs_url(repo_url): + """ + Check if a given URL or string matches a valid VCS (Version Control System) URL. + + Supports: + - Standard VCS URL protocols (git, http, https, ssh) + - Shortcut syntax (e.g., github:user/repo, gitlab:group/repo) + - GitHub shortcut (e.g., user/repo) + + Args: + repo_url (str): The repository URL or shortcut to validate. + + Returns: + bool: True if the string is a valid VCS URL, False otherwise. + + Examples: + >>> is_vcs_url("git://github.com/angular/di.js.git") + True + >>> is_vcs_url("github:user/repo") + True + >>> is_vcs_url("user/repo") + True + >>> is_vcs_url("https://github.com/user/repo.git") + True + >>> is_vcs_url("git@github.com:user/repo.git") + True + >>> is_vcs_url("http://github.com/isaacs/nopt") + True + >>> is_vcs_url("https://gitlab.com/foo/private.git") + True + >>> is_vcs_url("git@gitlab.com:foo/private.git") + True + >>> is_vcs_url("bitbucket:example/repo") + True + >>> is_vcs_url("gist:11081aaa281") + True + >>> is_vcs_url("ftp://example.com/not-a-repo") + False + >>> is_vcs_url("random-string") + False + >>> is_vcs_url("https://example.com/not-a-repo") + False + """ + if not repo_url or not isinstance(repo_url, str): + return False + + repo_url = repo_url.strip() + if not repo_url: + return False + + # 1. Match URLs with standard protocols + if re.match(r"^(git|ssh|http|https)://", repo_url): + return True + + # 2. Match SSH URLs (e.g., git@github.com:user/repo.git) + if re.match(r"^git@\w+\.\w+:[\w\-./]+$", repo_url): + return True + + # 3. Match shortcut syntax (e.g., github:user/repo) + if re.match(r"^(github|gitlab|bitbucket|gist):[\w\-./]+$", repo_url): + return True + + # 4. Match implicit GitHub shortcut (e.g., user/repo) + if re.match(r"^[\w\-]+/[\w\-]+$", repo_url): + return True + + return False diff --git a/vulnerabilities/tests/test_collect_commits.py b/vulnerabilities/tests/test_collect_commits.py index ad6aa1ba2..6749fc54e 100644 --- a/vulnerabilities/tests/test_collect_commits.py +++ b/vulnerabilities/tests/test_collect_commits.py @@ -2,7 +2,7 @@ from vulnerabilities.models import CodeFix from vulnerabilities.pipelines.collect_commits import CollectFixCommitsPipeline -from vulnerabilities.pipelines.collect_commits import is_reference_already_processed +from vulnerabilities.pipelines.collect_commits import is_vcs_url_already_processed from vulnerabilities.pipelines.collect_commits import normalize_vcs_url @@ -27,7 +27,7 @@ def __init__(self, purl): @patch("vulnerabilities.models.CodeFix.objects.filter") def test_reference_already_processed_true(mock_filter): mock_filter.return_value.exists.return_value = True - result = is_reference_already_processed("http://example.com", "commit123") + result = is_vcs_url_already_processed("http://example.com", "commit123") assert result is True mock_filter.assert_called_once_with( references__contains=["http://example.com"], commits__contains=["commit123"] @@ -37,7 +37,7 @@ def test_reference_already_processed_true(mock_filter): @patch("vulnerabilities.models.CodeFix.objects.filter") def test_reference_already_processed_false(mock_filter): mock_filter.return_value.exists.return_value = False - result = is_reference_already_processed("http://example.com", "commit123") + result = is_vcs_url_already_processed("http://example.com", "commit123") assert result is False From 991fbeb3aefdd1f2ef3f517c82860522f2edeb9f Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Wed, 8 Jan 2025 18:58:48 +0530 Subject: [PATCH 045/545] Add tests Signed-off-by: Tushar Goel --- vulnerabilities/migrations/0086_codefix.py | 55 ++-- vulnerabilities/pipelines/collect_commits.py | 17 +- vulnerabilities/tests/test_collect_commits.py | 281 ++++++++++-------- 3 files changed, 196 insertions(+), 157 deletions(-) diff --git a/vulnerabilities/migrations/0086_codefix.py b/vulnerabilities/migrations/0086_codefix.py index 64ea35fe0..df67c3ae8 100644 --- a/vulnerabilities/migrations/0086_codefix.py +++ b/vulnerabilities/migrations/0086_codefix.py @@ -1,4 +1,4 @@ -# Generated by Django 4.2.16 on 2024-12-23 19:32 +# Generated by Django 4.2.16 on 2025-01-08 13:28 from django.db import migrations, models import django.db.models.deletion @@ -25,7 +25,7 @@ class Migration(migrations.Migration): models.JSONField( blank=True, default=list, - help_text="List of commit identifiers associated with the code change.", + help_text="List of commit identifiers using VCS URLs associated with the code change.", ), ), ( @@ -48,7 +48,7 @@ class Migration(migrations.Migration): "patch", models.TextField( blank=True, - help_text="The code change in patch format (e.g., git diff).", + help_text="The code change as a patch in unified diff format.", null=True, ), ), @@ -56,7 +56,7 @@ class Migration(migrations.Migration): "notes", models.TextField( blank=True, - help_text="Additional notes or instructions about the code change.", + help_text="Notes or instructions about this code change.", null=True, ), ), @@ -65,55 +65,58 @@ class Migration(migrations.Migration): models.JSONField( blank=True, default=list, - help_text="External references related to this code change.", + help_text="URL references related to this code change.", ), ), ( - "status_reviewed", + "is_reviewed", models.BooleanField( - default=False, help_text="Indicates if the code change has been reviewed." - ), - ), - ( - "base_commit", - models.CharField( - blank=True, - help_text="The commit ID representing the state of the code before applying the fix or change.", - max_length=255, - null=True, + default=False, help_text="Indicates if this code change has been reviewed." ), ), ( "created_at", models.DateTimeField( auto_now_add=True, - help_text="Timestamp indicating when the code change was created.", + help_text="Timestamp indicating when this code change was created.", ), ), ( "updated_at", models.DateTimeField( auto_now=True, - help_text="Timestamp indicating when the code change was last updated.", + help_text="Timestamp indicating when this code change was last updated.", ), ), ( - "base_version", + "affected_package_vulnerability", + models.ForeignKey( + help_text="The affected package version to which this code fix applies.", + on_delete=django.db.models.deletion.CASCADE, + related_name="code_fix", + to="vulnerabilities.affectedbypackagerelatedvulnerability", + ), + ), + ( + "base_package_version", models.ForeignKey( blank=True, - help_text="The base version of the package to which this code change applies.", + help_text="The base package version to which this code change applies.", null=True, on_delete=django.db.models.deletion.SET_NULL, - related_name="base_version_codechanges", + related_name="codechanges", to="vulnerabilities.package", ), ), ( - "package_vulnerabilities", - models.ManyToManyField( - help_text="The vulnerabilities fixed by this code change.", - related_name="code_fixes", - to="vulnerabilities.affectedbypackagerelatedvulnerability", + "fixed_package_vulnerability", + models.ForeignKey( + blank=True, + help_text="The fixing package version with this code fix", + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="code_fix", + to="vulnerabilities.fixingpackagerelatedvulnerability", ), ), ], diff --git a/vulnerabilities/pipelines/collect_commits.py b/vulnerabilities/pipelines/collect_commits.py index 93bcce205..8806fb4fb 100644 --- a/vulnerabilities/pipelines/collect_commits.py +++ b/vulnerabilities/pipelines/collect_commits.py @@ -10,13 +10,9 @@ import re from aboutcode.pipeline import LoopProgress -from packageurl.contrib.url2purl import url2purl from vulnerabilities.models import AffectedByPackageRelatedVulnerability from vulnerabilities.models import CodeFix -from vulnerabilities.models import FixingPackageRelatedVulnerability -from vulnerabilities.models import Package -from vulnerabilities.models import VulnerabilityReference from vulnerabilities.pipelines import VulnerableCodePipeline @@ -59,8 +55,7 @@ def collect_and_store_fix_commits(self): affected_by_package_related_vulnerabilities.paginated(per_page=500) ): vulnerability = apv.vulnerability - for reference in vulnerability.references: - + for reference in vulnerability.references.all(): if not is_vcs_url(reference.url): continue @@ -171,6 +166,7 @@ def normalize_vcs_url(repo_url, vcs_tool=None): # FIXME: where these URL schemes come from?? if repo_url.startswith(("bitbucket:", "gitlab:", "github:", "gist:")): + repo = repo_url.split(":")[1] hoster_urls = { "bitbucket": f"https://bitbucket.org/{repo}", "github": f"https://github.com/{repo}", @@ -236,12 +232,15 @@ def is_vcs_url(repo_url): if not repo_url: return False - # 1. Match URLs with standard protocols - if re.match(r"^(git|ssh|http|https)://", repo_url): + # Define valid VCS domains + vcs_domains = r"(github\.com|gitlab\.com|bitbucket\.org|gist\.github\.com)" + + # 1. Match URLs with standard protocols pointing to VCS domains + if re.match(rf"^(git|ssh|http|https)://{vcs_domains}/[\w\-.]+/[\w\-.]+", repo_url): return True # 2. Match SSH URLs (e.g., git@github.com:user/repo.git) - if re.match(r"^git@\w+\.\w+:[\w\-./]+$", repo_url): + if re.match(rf"^git@{vcs_domains}:[\w\-.]+/[\w\-.]+(\.git)?$", repo_url): return True # 3. Match shortcut syntax (e.g., github:user/repo) diff --git a/vulnerabilities/tests/test_collect_commits.py b/vulnerabilities/tests/test_collect_commits.py index 6749fc54e..c478244e1 100644 --- a/vulnerabilities/tests/test_collect_commits.py +++ b/vulnerabilities/tests/test_collect_commits.py @@ -1,129 +1,166 @@ -from unittest.mock import patch +from django.test import TestCase +from vulnerabilities.models import AffectedByPackageRelatedVulnerability from vulnerabilities.models import CodeFix +from vulnerabilities.models import Package +from vulnerabilities.models import Vulnerability +from vulnerabilities.models import VulnerabilityReference +from vulnerabilities.models import VulnerabilityRelatedReference from vulnerabilities.pipelines.collect_commits import CollectFixCommitsPipeline +from vulnerabilities.pipelines.collect_commits import is_vcs_url from vulnerabilities.pipelines.collect_commits import is_vcs_url_already_processed from vulnerabilities.pipelines.collect_commits import normalize_vcs_url -# --- Mocked Dependencies --- -class MockVulnerability: - def __init__(self, id): - self.id = id - - -class MockReference: - def __init__(self, url, vulnerabilities): - self.url = url - self.vulnerabilities = vulnerabilities - - -class MockPackage: - def __init__(self, purl): - self.purl = purl - - -# --- Tests for Utility Functions --- -@patch("vulnerabilities.models.CodeFix.objects.filter") -def test_reference_already_processed_true(mock_filter): - mock_filter.return_value.exists.return_value = True - result = is_vcs_url_already_processed("http://example.com", "commit123") - assert result is True - mock_filter.assert_called_once_with( - references__contains=["http://example.com"], commits__contains=["commit123"] - ) - - -@patch("vulnerabilities.models.CodeFix.objects.filter") -def test_reference_already_processed_false(mock_filter): - mock_filter.return_value.exists.return_value = False - result = is_vcs_url_already_processed("http://example.com", "commit123") - assert result is False - - -# --- Tests for normalize_vcs_url --- -def test_normalize_plain_url(): - url = normalize_vcs_url("https://github.com/user/repo.git") - assert url == "https://github.com/user/repo.git" - - -def test_normalize_git_ssh_url(): - url = normalize_vcs_url("git@github.com:user/repo.git") - assert url == "https://github.com/user/repo.git" - - -def test_normalize_implicit_github(): - url = normalize_vcs_url("user/repo") - assert url == "https://github.com/user/repo" - - -# --- Tests for CollectFixCommitsPipeline --- -@patch("vulnerabilities.models.VulnerabilityReference.objects.prefetch_related") -@patch("vulnerabilities.pipelines.collect_commits.CollectFixCommitsPipeline.get_or_create_package") -@patch("vulnerabilities.pipelines.collect_commits.is_reference_already_processed") -@patch("vulnerabilities.pipelines.collect_commits.url2purl") -def test_collect_and_store_fix_commits( - mock_url2purl, mock_is_processed, mock_get_package, mock_prefetch -): - mock_vuln = MockVulnerability(id=1) - mock_reference = MockReference(url="http://example.com", vulnerabilities=[mock_vuln]) - mock_prefetch.return_value.distinct.return_value.paginated.return_value = [mock_reference] - mock_url2purl.return_value = "pkg:example/package@1.0.0" - mock_is_processed.return_value = False - mock_get_package.return_value = MockPackage(purl="pkg:example/package@1.0.0") - - pipeline = CollectFixCommitsPipeline() - pipeline.log = lambda msg: None - pipeline.collect_and_store_fix_commits() - - mock_is_processed.assert_called_once_with("http://example.com", "pkg:example/package@1.0.0") - mock_get_package.assert_called_once_with("pkg:example/package@1.0.0") - - -@patch("vulnerabilities.pipelines.collect_commits.CollectFixCommitsPipeline.get_or_create_package") -def test_get_or_create_package_success(mock_get_or_create): - mock_get_or_create.return_value = (MockPackage(purl="pkg:example/package@1.0.0"), True) - pipeline = CollectFixCommitsPipeline() - package = pipeline.get_or_create_package("pkg:example/package@1.0.0") - assert package.purl == "pkg:example/package@1.0.0" - - -@patch("vulnerabilities.pipelines.collect_commits.CollectFixCommitsPipeline.get_or_create_package") -def test_get_or_create_package_failure(mock_get_or_create): - mock_get_or_create.side_effect = Exception("Error") - pipeline = CollectFixCommitsPipeline() - logs = [] - pipeline.log = lambda msg: logs.append(msg) - result = pipeline.get_or_create_package("pkg:example/package@1.0.0") - assert result is None - assert len(logs) == 1 - - -@patch("vulnerabilities.models.CodeFix.objects.get_or_create") -def test_create_codefix_entry_success(mock_get_or_create): - mock_get_or_create.return_value = (CodeFix(), True) - pipeline = CollectFixCommitsPipeline() - result = pipeline.create_codefix_entry( - MockVulnerability(1), - MockPackage("pkg:example/package@1.0.0"), - "http://example.com", - "http://reference", - ) - assert result is not None - mock_get_or_create.assert_called_once() - - -@patch("vulnerabilities.models.CodeFix.objects.get_or_create") -def test_create_codefix_entry_failure(mock_get_or_create): - mock_get_or_create.side_effect = Exception("Error") - pipeline = CollectFixCommitsPipeline() - logs = [] - pipeline.log = lambda msg: logs.append(msg) - result = pipeline.create_codefix_entry( - MockVulnerability(1), - MockPackage("pkg:example/package@1.0.0"), - "http://example.com", - "http://reference", - ) - assert result is None - assert len(logs) == 1 +class CollectFixCommitsPipelineTests(TestCase): + def setUp(self): + self.vulnerability = Vulnerability.objects.create( + vulnerability_id="VCID-1234", summary="Test vulnerability" + ) + + package = Package.objects.create(type="npm", namespace="abc", name="def", version="1") + + self.affected_by_vuln = AffectedByPackageRelatedVulnerability.objects.create( + package=package, vulnerability=self.vulnerability + ) + + self.reference1 = VulnerabilityReference.objects.create( + url="https://github.com/example/repo/commit/abcd1234" + ) + + self.reference2 = VulnerabilityReference.objects.create( + url="https://gitlab.com/example/repo/commit/efgh5678" + ) + VulnerabilityRelatedReference.objects.create( + vulnerability=self.vulnerability, reference=self.reference2 + ) + VulnerabilityRelatedReference.objects.create( + vulnerability=self.vulnerability, reference=self.reference1 + ) + + def test_is_vcs_url(self): + valid_urls = [ + "git://github.com/angular/di.js.git", + "https://github.com/user/repo.git", + "git@gitlab.com:user/repo.git", + ] + invalid_urls = [ + "ftp://example.com/not-a-repo", + "random-string", + "https://example.com/not-a-repo", + ] + for url in valid_urls: + assert is_vcs_url(url) is True + + for url in invalid_urls: + assert is_vcs_url(url) is False + + def test_normalize_vcs_url(self): + + assert ( + normalize_vcs_url("git@github.com:user/repo.git") == "https://github.com/user/repo.git" + ) + assert normalize_vcs_url("github:user/repo") == "https://github.com/user/repo" + assert normalize_vcs_url( + "https://github.com/user/repo.git" + ), "https://github.com/user/repo.git" + + def test_is_vcs_url_already_processed(self): + CodeFix.objects.create( + commits=["https://github.com/example/repo/commit/abcd1234"], + affected_package_vulnerability=self.affected_by_vuln, + ) + assert ( + is_vcs_url_already_processed("https://github.com/example/repo/commit/abcd1234") is True + ) + assert ( + is_vcs_url_already_processed("https://github.com/example/repo/commit/unknown") is False + ) + + def test_collect_and_store_fix_commits(self): + pipeline = CollectFixCommitsPipeline() + pipeline.collect_and_store_fix_commits() + + assert ( + CodeFix.objects.filter( + commits__contains=["https://github.com/example/repo/commit/abcd1234"] + ).exists() + is True + ) + assert ( + CodeFix.objects.filter( + commits__contains=["https://gitlab.com/example/repo/commit/efgh5678"] + ).exists() + is True + ) + + def test_skip_already_processed_commit(self): + CodeFix.objects.create( + commits=["https://github.com/example/repo/commit/abcd1234"], + affected_package_vulnerability=self.affected_by_vuln, + ) + + pipeline = CollectFixCommitsPipeline() + pipeline.collect_and_store_fix_commits() + + # Ensure duplicate entry was not created + self.assertEqual( + CodeFix.objects.filter( + commits__contains=["https://github.com/example/repo/commit/abcd1234"] + ).count(), + 1, + ) + + +class IsVCSURLTests(TestCase): + def test_valid_vcs_urls(self): + valid_urls = [ + "git://github.com/example/repo.git", + "https://github.com/example/repo.git", + "git@github.com:example/repo.git", + "github:user/repo", + ] + for url in valid_urls: + with self.subTest(url=url): + self.assertTrue(is_vcs_url(url)) + + def test_invalid_vcs_urls(self): + invalid_urls = ["http://example.com", "ftp://example.com/repo", "random-string"] + for url in invalid_urls: + with self.subTest(url=url): + self.assertFalse(is_vcs_url(url)) + + +class NormalizeVCSURLTests(TestCase): + def test_normalize_valid_vcs_urls(self): + self.assertEqual( + normalize_vcs_url("git@github.com:user/repo.git"), "https://github.com/user/repo.git" + ) + self.assertEqual(normalize_vcs_url("github:user/repo"), "https://github.com/user/repo") + self.assertEqual( + normalize_vcs_url("https://github.com/user/repo.git"), + "https://github.com/user/repo.git", + ) + + +class IsVCSURLAlreadyProcessedTests(TestCase): + def setUp(self): + self.vulnerability = Vulnerability.objects.create(vulnerability_id="VCID-5678") + package = Package.objects.create(type="npm", namespace="abc", name="def", version="1") + self.affected_by_vuln = AffectedByPackageRelatedVulnerability.objects.create( + package=package, vulnerability=self.vulnerability + ) + self.code_fix = CodeFix.objects.create( + commits=["https://github.com/example/repo/commit/commit1"], + affected_package_vulnerability=self.affected_by_vuln, + ) + + def test_commit_already_processed(self): + self.assertTrue( + is_vcs_url_already_processed("https://github.com/example/repo/commit/commit1") + ) + + def test_commit_not_processed(self): + self.assertFalse( + is_vcs_url_already_processed("https://github.com/example/repo/commit/commit2") + ) From 7bb44be8bd2bd5a8bc157c2ad3a5d70e64b92a53 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Wed, 8 Jan 2025 20:21:54 +0530 Subject: [PATCH 046/545] Add CodeFix in API Signed-off-by: Tushar Goel --- vulnerabilities/api_v2.py | 90 ++++++++++++++++++++ vulnerabilities/improvers/__init__.py | 2 + vulnerabilities/models.py | 2 + vulnerabilities/pipelines/collect_commits.py | 23 ++--- vulnerabilities/tests/test_api_v2.py | 30 +++++-- vulnerablecode/urls.py | 3 + 6 files changed, 133 insertions(+), 17 deletions(-) diff --git a/vulnerabilities/api_v2.py b/vulnerabilities/api_v2.py index b570570ed..d3f1d714c 100644 --- a/vulnerabilities/api_v2.py +++ b/vulnerabilities/api_v2.py @@ -21,6 +21,7 @@ from rest_framework.response import Response from rest_framework.reverse import reverse +from vulnerabilities.models import CodeFix from vulnerabilities.models import Package from vulnerabilities.models import Vulnerability from vulnerabilities.models import VulnerabilityReference @@ -198,14 +199,25 @@ def get_affected_by_vulnerabilities(self, obj): Return a dictionary with vulnerabilities as keys and their details, including fixed_by_packages. """ result = {} + request = self.context.get("request") for vuln in getattr(obj, "prefetched_affected_vulnerabilities", []): fixed_by_package = vuln.fixed_by_packages.first() purl = None if fixed_by_package: purl = fixed_by_package.package_url + # Get code fixed for a vulnerability + code_fixes = CodeFix.objects.filter( + affected_package_vulnerability__vulnerability=vuln + ).distinct() + code_fix_urls = [ + reverse("codefix-detail", args=[code_fix.id], request=request) + for code_fix in code_fixes + ] + result[vuln.vulnerability_id] = { "vulnerability_id": vuln.vulnerability_id, "fixed_by_packages": purl, + "code_fixes": code_fix_urls, } return result @@ -521,3 +533,81 @@ def lookup(self, request): qs = self.get_queryset().for_purls([purl]).with_is_vulnerable() return Response(PackageV2Serializer(qs, many=True, context={"request": request}).data) + + +from rest_framework import serializers + +from vulnerabilities.models import CodeFix + + +class CodeFixSerializer(serializers.ModelSerializer): + """ + Serializer for the CodeFix model. + Provides detailed information about a code fix. + """ + + affected_vulnerability_id = serializers.CharField( + source="affected_package_vulnerability.vulnerability.vulnerability_id", + read_only=True, + help_text="ID of the affected vulnerability.", + ) + affected_package_purl = serializers.CharField( + source="affected_package_vulnerability.package.package_url", + read_only=True, + help_text="PURL of the affected package.", + ) + fixed_package_purl = serializers.CharField( + source="fixed_package_vulnerability.package.package_url", + read_only=True, + help_text="PURL of the fixing package (if available).", + ) + created_at = serializers.DateTimeField( + format="%Y-%m-%dT%H:%M:%SZ", + read_only=True, + help_text="Timestamp when the code fix was created.", + ) + updated_at = serializers.DateTimeField( + format="%Y-%m-%dT%H:%M:%SZ", + read_only=True, + help_text="Timestamp when the code fix was last updated.", + ) + + class Meta: + model = CodeFix + fields = [ + "id", + "commits", + "pulls", + "downloads", + "patch", + "affected_vulnerability_id", + "affected_package_purl", + "fixed_package_purl", + "notes", + "references", + "is_reviewed", + "created_at", + "updated_at", + ] + read_only_fields = ["created_at", "updated_at"] + + +class CodeFixViewSet(viewsets.ReadOnlyModelViewSet): + """ + API endpoint that allows viewing CodeFix entries. + """ + + queryset = CodeFix.objects.all() + serializer_class = CodeFixSerializer + + def get_queryset(self): + """ + Optionally filter by vulnerability ID. + """ + queryset = super().get_queryset() + vulnerability_id = self.request.query_params.get("vulnerability_id") + if vulnerability_id: + queryset = queryset.filter( + affected_package_vulnerability__vulnerability__vulnerability_id=vulnerability_id + ) + return queryset diff --git a/vulnerabilities/improvers/__init__.py b/vulnerabilities/improvers/__init__.py index dd73eb02d..44a65df47 100644 --- a/vulnerabilities/improvers/__init__.py +++ b/vulnerabilities/improvers/__init__.py @@ -10,6 +10,7 @@ from vulnerabilities.improvers import valid_versions from vulnerabilities.improvers import vulnerability_status from vulnerabilities.pipelines import VulnerableCodePipeline +from vulnerabilities.pipelines import collect_commits from vulnerabilities.pipelines import compute_package_risk from vulnerabilities.pipelines import compute_package_version_rank from vulnerabilities.pipelines import enhance_with_exploitdb @@ -41,6 +42,7 @@ enhance_with_exploitdb.ExploitDBImproverPipeline, compute_package_risk.ComputePackageRiskPipeline, compute_package_version_rank.ComputeVersionRankPipeline, + collect_commits.CollectFixCommitsPipeline, ] IMPROVERS_REGISTRY = { diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index 6af4db6ae..1a58ec4dc 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -1101,6 +1101,8 @@ class AffectedByPackageRelatedVulnerability(PackageRelatedVulnerabilityBase): related_name="affected_package_vulnerability_relations", ) + objects = BaseQuerySet.as_manager() + class Meta(PackageRelatedVulnerabilityBase.Meta): verbose_name_plural = "Affected By Package Related Vulnerabilities" diff --git a/vulnerabilities/pipelines/collect_commits.py b/vulnerabilities/pipelines/collect_commits.py index 8806fb4fb..bf94b755d 100644 --- a/vulnerabilities/pipelines/collect_commits.py +++ b/vulnerabilities/pipelines/collect_commits.py @@ -20,7 +20,8 @@ def is_vcs_url_already_processed(commit_id): """ Check if a VCS URL exists in a CodeFix entry. """ - return CodeFix.objects.filter(commits__contains=[commit_id]).exists() + if "commit" in commit_id: + return CodeFix.objects.filter(commits__contains=[commit_id]).exists() class CollectFixCommitsPipeline(VulnerableCodePipeline): @@ -70,17 +71,19 @@ def collect_and_store_fix_commits(self): f"Skipping already processed reference: {reference.url} with VCS URL {vcs_url}" ) continue - code_fix, created = CodeFix.objects.get_or_create( - commits=[vcs_url], - affected_package_vulnerability=apv, - ) - - if created: - created_fix_count += 1 - self.log( - f"Created CodeFix entry for reference: {reference.url} with VCS URL {vcs_url}" + # check if vcs_url has commit + if "/commit/" in vcs_url: + code_fix, created = CodeFix.objects.get_or_create( + commits=[vcs_url], + affected_package_vulnerability=apv, ) + if created: + created_fix_count += 1 + self.log( + f"Created CodeFix entry for reference: {reference.url} with VCS URL {vcs_url}" + ) + self.log(f"Successfully created {created_fix_count:,d} CodeFix entries.") diff --git a/vulnerabilities/tests/test_api_v2.py b/vulnerabilities/tests/test_api_v2.py index af4dc47c8..e3434c6a9 100644 --- a/vulnerabilities/tests/test_api_v2.py +++ b/vulnerabilities/tests/test_api_v2.py @@ -216,7 +216,7 @@ def test_list_packages(self): Should return a list of packages with their details and associated vulnerabilities. """ url = reverse("package-v2-list") - with self.assertNumQueries(31): + with self.assertNumQueries(32): response = self.client.get(url, format="json") self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertIn("results", response.data) @@ -238,7 +238,7 @@ def test_filter_packages_by_purl(self): Test filtering packages by one or more PURLs. """ url = reverse("package-v2-list") - with self.assertNumQueries(19): + with self.assertNumQueries(20): response = self.client.get(url, {"purl": "pkg:pypi/django@3.2"}, format="json") self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(len(response.data["results"]["packages"]), 1) @@ -249,7 +249,7 @@ def test_filter_packages_by_affected_vulnerability(self): Test filtering packages by affected_by_vulnerability. """ url = reverse("package-v2-list") - with self.assertNumQueries(19): + with self.assertNumQueries(20): response = self.client.get( url, {"affected_by_vulnerability": "VCID-1234"}, format="json" ) @@ -308,7 +308,11 @@ def test_package_serializer_fields(self): # Verify affected_by_vulnerabilities structure expected_affected_by_vulnerabilities = { - "VCID-1234": {"vulnerability_id": "VCID-1234", "fixed_by_packages": None} + "VCID-1234": { + "code_fixes": [], + "vulnerability_id": "VCID-1234", + "fixed_by_packages": None, + } } self.assertEqual(data["affected_by_vulnerabilities"], expected_affected_by_vulnerabilities) @@ -387,7 +391,13 @@ def test_get_affected_by_vulnerabilities(self): vulnerabilities = serializer.get_affected_by_vulnerabilities(package) self.assertEqual( vulnerabilities, - {"VCID-1234": {"vulnerability_id": "VCID-1234", "fixed_by_packages": None}}, + { + "VCID-1234": { + "code_fixes": [], + "vulnerability_id": "VCID-1234", + "fixed_by_packages": None, + } + }, ) def test_get_fixing_vulnerabilities(self): @@ -591,7 +601,7 @@ def test_lookup_with_valid_purl(self): """ url = reverse("package-v2-lookup") data = {"purl": "pkg:pypi/django@3.2"} - with self.assertNumQueries(12): + with self.assertNumQueries(13): response = self.client.post(url, data, format="json") self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(1, len(response.data)) @@ -603,7 +613,13 @@ def test_lookup_with_valid_purl(self): self.assertEqual(response.data[0]["purl"], "pkg:pypi/django@3.2") self.assertEqual( response.data[0]["affected_by_vulnerabilities"], - {"VCID-1234": {"vulnerability_id": "VCID-1234", "fixed_by_packages": None}}, + { + "VCID-1234": { + "code_fixes": [], + "vulnerability_id": "VCID-1234", + "fixed_by_packages": None, + } + }, ) self.assertEqual(response.data[0]["fixing_vulnerabilities"], []) diff --git a/vulnerablecode/urls.py b/vulnerablecode/urls.py index 10f7db13f..54540a66d 100644 --- a/vulnerablecode/urls.py +++ b/vulnerablecode/urls.py @@ -20,6 +20,7 @@ from vulnerabilities.api import CPEViewSet from vulnerabilities.api import PackageViewSet from vulnerabilities.api import VulnerabilityViewSet +from vulnerabilities.api_v2 import CodeFixViewSet from vulnerabilities.api_v2 import PackageV2ViewSet from vulnerabilities.api_v2 import VulnerabilityV2ViewSet from vulnerabilities.views import ApiUserCreateView @@ -48,6 +49,8 @@ def __init__(self, *args, **kwargs): api_v2_router = OptionalSlashRouter() api_v2_router.register("packages", PackageV2ViewSet, basename="package-v2") api_v2_router.register("vulnerabilities", VulnerabilityV2ViewSet, basename="vulnerability-v2") +api_v2_router.register("codefixes", CodeFixViewSet, basename="codefix") + urlpatterns = [ path("api/v2/", include(api_v2_router.urls)), From 805590b89052a99465be5f68d9ce5cdda2b2c883 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Wed, 8 Jan 2025 20:26:21 +0530 Subject: [PATCH 047/545] Fix code Signed-off-by: Tushar Goel --- vulnerabilities/api_v2.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/vulnerabilities/api_v2.py b/vulnerabilities/api_v2.py index d3f1d714c..10ffb6d98 100644 --- a/vulnerabilities/api_v2.py +++ b/vulnerabilities/api_v2.py @@ -535,11 +535,6 @@ def lookup(self, request): return Response(PackageV2Serializer(qs, many=True, context={"request": request}).data) -from rest_framework import serializers - -from vulnerabilities.models import CodeFix - - class CodeFixSerializer(serializers.ModelSerializer): """ Serializer for the CodeFix model. From 3d7c209395bd99e7e33aa30bd314a18fac98f033 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Wed, 8 Jan 2025 20:31:02 +0530 Subject: [PATCH 048/545] Minor Fix Signed-off-by: Tushar Goel --- vulnerabilities/pipelines/collect_commits.py | 24 ++++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/vulnerabilities/pipelines/collect_commits.py b/vulnerabilities/pipelines/collect_commits.py index bf94b755d..92145c051 100644 --- a/vulnerabilities/pipelines/collect_commits.py +++ b/vulnerabilities/pipelines/collect_commits.py @@ -20,8 +20,7 @@ def is_vcs_url_already_processed(commit_id): """ Check if a VCS URL exists in a CodeFix entry. """ - if "commit" in commit_id: - return CodeFix.objects.filter(commits__contains=[commit_id]).exists() + return CodeFix.objects.filter(commits__contains=[commit_id]).exists() class CollectFixCommitsPipeline(VulnerableCodePipeline): @@ -57,6 +56,8 @@ def collect_and_store_fix_commits(self): ): vulnerability = apv.vulnerability for reference in vulnerability.references.all(): + if not "/commit/" in reference.url: + continue if not is_vcs_url(reference.url): continue @@ -72,17 +73,16 @@ def collect_and_store_fix_commits(self): ) continue # check if vcs_url has commit - if "/commit/" in vcs_url: - code_fix, created = CodeFix.objects.get_or_create( - commits=[vcs_url], - affected_package_vulnerability=apv, - ) + code_fix, created = CodeFix.objects.get_or_create( + commits=[vcs_url], + affected_package_vulnerability=apv, + ) - if created: - created_fix_count += 1 - self.log( - f"Created CodeFix entry for reference: {reference.url} with VCS URL {vcs_url}" - ) + if created: + created_fix_count += 1 + self.log( + f"Created CodeFix entry for reference: {reference.url} with VCS URL {vcs_url}" + ) self.log(f"Successfully created {created_fix_count:,d} CodeFix entries.") From c132094166b74acd1ee21c1f6623fc34863a49fd Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Thu, 9 Jan 2025 12:25:01 +0530 Subject: [PATCH 049/545] Add default postgresql.conf for local docker build Signed-off-by: Keshav Priyadarshi --- etc/postgresql/postgresql.conf | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 etc/postgresql/postgresql.conf diff --git a/etc/postgresql/postgresql.conf b/etc/postgresql/postgresql.conf new file mode 100644 index 000000000..57ea6700a --- /dev/null +++ b/etc/postgresql/postgresql.conf @@ -0,0 +1,12 @@ +# Default configuration for development build +# DB Version: 13 +# OS Type: linux +# DB Type: development +# Data Storage: local + +listen_addresses = '*' +max_connections = 100 +shared_buffers = 128MB +dynamic_shared_memory_type = posix +max_wal_size = 1GB +min_wal_size = 80MB From ea98eeb7afe9bea40e7c53fe44d8810024af1beb Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Fri, 10 Jan 2025 13:53:48 +0530 Subject: [PATCH 050/545] Migrate alpine importer to aboutcode pipeline Signed-off-by: Keshav Priyadarshi --- vulnerabilities/importers/__init__.py | 4 +- .../alpine_linux_importer.py} | 118 ++++++++++++++---- .../test_alpine_linux_importer_pipeline.py} | 87 ++++++++----- 3 files changed, 146 insertions(+), 63 deletions(-) rename vulnerabilities/{importers/alpine_linux.py => pipelines/alpine_linux_importer.py} (66%) rename vulnerabilities/tests/{test_alpine.py => pipelines/test_alpine_linux_importer_pipeline.py} (90%) diff --git a/vulnerabilities/importers/__init__.py b/vulnerabilities/importers/__init__.py index 3394dd989..3f429f669 100644 --- a/vulnerabilities/importers/__init__.py +++ b/vulnerabilities/importers/__init__.py @@ -7,7 +7,6 @@ # See https://aboutcode.org for more information about nexB OSS projects. # -from vulnerabilities.importers import alpine_linux from vulnerabilities.importers import apache_httpd from vulnerabilities.importers import apache_kafka from vulnerabilities.importers import apache_tomcat @@ -35,6 +34,7 @@ from vulnerabilities.importers import vulnrichment from vulnerabilities.importers import xen from vulnerabilities.pipelines import VulnerableCodeBaseImporterPipeline +from vulnerabilities.pipelines import alpine_linux_importer from vulnerabilities.pipelines import github_importer from vulnerabilities.pipelines import gitlab_importer from vulnerabilities.pipelines import nginx_importer @@ -44,7 +44,6 @@ from vulnerabilities.pipelines import pysec_importer IMPORTERS_REGISTRY = [ - alpine_linux.AlpineImporter, openssl.OpensslImporter, redhat.RedhatImporter, debian.DebianImporter, @@ -78,6 +77,7 @@ github_importer.GitHubAPIImporterPipeline, nvd_importer.NVDImporterPipeline, pysec_importer.PyPIImporterPipeline, + alpine_linux_importer.AlpineLinuxImporterPipeline, ] IMPORTERS_REGISTRY = { diff --git a/vulnerabilities/importers/alpine_linux.py b/vulnerabilities/pipelines/alpine_linux_importer.py similarity index 66% rename from vulnerabilities/importers/alpine_linux.py rename to vulnerabilities/pipelines/alpine_linux_importer.py index db169184e..d29f9bc9b 100644 --- a/vulnerabilities/importers/alpine_linux.py +++ b/vulnerabilities/pipelines/alpine_linux_importer.py @@ -1,5 +1,4 @@ # -# # Copyright (c) nexB Inc. and others. All rights reserved. # VulnerableCode is a trademark of nexB Inc. # SPDX-License-Identifier: Apache-2.0 @@ -21,40 +20,61 @@ from vulnerabilities.importer import AdvisoryData from vulnerabilities.importer import AffectedPackage -from vulnerabilities.importer import Importer +from vulnerabilities.pipelines import VulnerableCodeBaseImporterPipeline from vulnerabilities.references import WireSharkReference from vulnerabilities.references import XsaReference from vulnerabilities.references import ZbxReference from vulnerabilities.utils import fetch_response from vulnerabilities.utils import is_cve -LOGGER = logging.getLogger(__name__) -BASE_URL = "https://secdb.alpinelinux.org/" +class AlpineLinuxImporterPipeline(VulnerableCodeBaseImporterPipeline): + """Collect Alpine Linux advisories.""" + + pipeline_id = "alpine_linux_importer" -class AlpineImporter(Importer): spdx_license_expression = "CC-BY-SA-4.0" license_url = "https://secdb.alpinelinux.org/license.txt" + url = "https://secdb.alpinelinux.org/" importer_name = "Alpine Linux Importer" - def advisory_data(self) -> Iterable[AdvisoryData]: - page_response_content = fetch_response(BASE_URL).content - advisory_directory_links = fetch_advisory_directory_links(page_response_content) + @classmethod + def steps(cls): + return ( + cls.collect_and_store_advisories, + cls.import_new_advisories, + ) + + def advisories_count(self) -> int: + return 0 + + def collect_advisories(self) -> Iterable[AdvisoryData]: + page_response_content = fetch_response(self.url).content + advisory_directory_links = fetch_advisory_directory_links( + page_response_content, self.url, self.log + ) advisory_links = [] for advisory_directory_link in advisory_directory_links: advisory_directory_page = fetch_response(advisory_directory_link).content advisory_links.extend( - fetch_advisory_links(advisory_directory_page, advisory_directory_link) + fetch_advisory_links(advisory_directory_page, advisory_directory_link, self.log) ) for link in advisory_links: record = fetch_response(link).json() if not record["packages"]: - LOGGER.error(f'"packages" not found in {link!r}') + self.log( + f'"packages" not found in {link!r}', + level=logging.DEBUG, + ) continue - yield from process_record(record=record, url=link) + yield from process_record(record=record, url=link, logger=self.log) -def fetch_advisory_directory_links(page_response_content: str) -> List[str]: +def fetch_advisory_directory_links( + page_response_content: str, + base_url: str, + logger: callable = None, +) -> List[str]: """ Return a list of advisory directory links present in `page_response_content` html string """ @@ -66,16 +86,22 @@ def fetch_advisory_directory_links(page_response_content: str) -> List[str]: ] if not alpine_versions: - LOGGER.error(f"No versions found in {BASE_URL!r}") + if logger: + logger( + f"No versions found in {base_url!r}", + level=logging.DEBUG, + ) return [] - advisory_directory_links = [urljoin(BASE_URL, version) for version in alpine_versions] + advisory_directory_links = [urljoin(base_url, version) for version in alpine_versions] return advisory_directory_links def fetch_advisory_links( - advisory_directory_page: str, advisory_directory_link: str + advisory_directory_page: str, + advisory_directory_link: str, + logger: callable = None, ) -> Iterable[str]: """ Yield json file urls present in `advisory_directory_page` @@ -83,36 +109,52 @@ def fetch_advisory_links( advisory_directory_page = BeautifulSoup(advisory_directory_page, features="lxml") anchor_tags = advisory_directory_page.find_all("a") if not anchor_tags: - LOGGER.error(f"No anchor tags found in {advisory_directory_link!r}") + if logger: + logger( + f"No anchor tags found in {advisory_directory_link!r}", + level=logging.DEBUG, + ) return iter([]) for anchor_tag in anchor_tags: if anchor_tag.text.endswith("json"): yield urljoin(advisory_directory_link, anchor_tag.text) -def check_for_attributes(record) -> bool: +def check_for_attributes(record, logger) -> bool: attributes = ["distroversion", "reponame", "archs"] for attribute in attributes: if attribute not in record: - LOGGER.error(f'"{attribute!r}" not found in {record!r}') + if logger: + logger( + f'"{attribute!r}" not found in {record!r}', + level=logging.DEBUG, + ) return False return True -def process_record(record: dict, url: str) -> Iterable[AdvisoryData]: +def process_record(record: dict, url: str, logger: callable = None) -> Iterable[AdvisoryData]: """ Return a list of AdvisoryData objects by processing data present in that `record` """ if not record.get("packages"): - LOGGER.error(f'"packages" not found in this record {record!r}') + if logger: + logger( + f'"packages" not found in this record {record!r}', + level=logging.DEBUG, + ) return [] for package in record["packages"]: if not package["pkg"]: - LOGGER.error(f'"pkg" not found in this package {package!r}') + if logger: + logger( + f'"pkg" not found in this package {package!r}', + level=logging.DEBUG, + ) continue - if not check_for_attributes(record): + if not check_for_attributes(record, logger): continue yield from load_advisories( pkg_infos=package["pkg"], @@ -120,6 +162,7 @@ def process_record(record: dict, url: str) -> Iterable[AdvisoryData]: reponame=record["reponame"], archs=record["archs"], url=url, + logger=logger, ) @@ -129,6 +172,7 @@ def load_advisories( reponame: str, archs: List[str], url: str, + logger: callable = None, ) -> Iterable[AdvisoryData]: """ Yield AdvisoryData by mapping data from `pkg_infos` @@ -136,17 +180,29 @@ def load_advisories( `distroversion`, `reponame`, `archs` """ if not pkg_infos.get("name"): - LOGGER.error(f'"name" is not available in package {pkg_infos!r}') + if logger: + logger( + f'"name" is not available in package {pkg_infos!r}', + level=logging.DEBUG, + ) return [] for version, fixed_vulns in pkg_infos["secfixes"].items(): if not fixed_vulns: - LOGGER.error(f"No fixed vulnerabilities in version {version!r}") + if logger: + logger( + f"No fixed vulnerabilities in version {version!r}", + level=logging.DEBUG, + ) continue for vuln_ids in fixed_vulns: if not isinstance(vuln_ids, str): - LOGGER.error(f"{vuln_ids!r} is not of `str` instance") + if logger: + logger( + f"{vuln_ids!r} is not of `str` instance", + level=logging.DEBUG, + ) continue vuln_ids = vuln_ids.split() aliases = [] @@ -179,10 +235,18 @@ def load_advisories( try: fixed_version = AlpineLinuxVersion(version) except Exception as e: - LOGGER.error(f"{version!r} is not a valid AlpineVersion {e!r}") + if logger: + logger( + f"{version!r} is not a valid AlpineVersion {e!r}", + level=logging.DEBUG, + ) continue if not isinstance(archs, List): - LOGGER.error(f"{archs!r} is not of `List` instance") + if logger: + logger( + f"{archs!r} is not of `List` instance", + level=logging.DEBUG, + ) continue if archs: for arch in archs: diff --git a/vulnerabilities/tests/test_alpine.py b/vulnerabilities/tests/pipelines/test_alpine_linux_importer_pipeline.py similarity index 90% rename from vulnerabilities/tests/test_alpine.py rename to vulnerabilities/tests/pipelines/test_alpine_linux_importer_pipeline.py index 1ab74a89a..386f239d8 100644 --- a/vulnerabilities/tests/test_alpine.py +++ b/vulnerabilities/tests/pipelines/test_alpine_linux_importer_pipeline.py @@ -9,6 +9,7 @@ import json import os +from pathlib import Path import pytest from packageurl import PackageURL @@ -16,17 +17,18 @@ from vulnerabilities.importer import AdvisoryData from vulnerabilities.importer import AffectedPackage -from vulnerabilities.importers.alpine_linux import fetch_advisory_directory_links -from vulnerabilities.importers.alpine_linux import fetch_advisory_links -from vulnerabilities.importers.alpine_linux import load_advisories -from vulnerabilities.importers.alpine_linux import process_record +from vulnerabilities.pipelines.alpine_linux_importer import fetch_advisory_directory_links +from vulnerabilities.pipelines.alpine_linux_importer import fetch_advisory_links +from vulnerabilities.pipelines.alpine_linux_importer import load_advisories +from vulnerabilities.pipelines.alpine_linux_importer import process_record from vulnerabilities.references import XsaReference +from vulnerabilities.tests.pipelines import TestLogger -BASE_DIR = os.path.dirname(os.path.abspath(__file__)) -TEST_DATA = os.path.join(BASE_DIR, "test_data", "alpine") +TEST_DATA = Path(__file__).parent.parent / "test_data" / "alpine" -def test_process_record(caplog): +def test_process_record(): + logger = TestLogger() expected_advisories = [ AdvisoryData( aliases=[], @@ -445,14 +447,18 @@ def test_process_record(caplog): url="https://secdb.alpinelinux.org/v3.11/", ), ] - with open(os.path.join(TEST_DATA, os.path.join(TEST_DATA, "v3.11", "main.json"))) as f: + with open(TEST_DATA / "v3.11/main.json") as f: found_advisories = list( - process_record(json.loads(f.read()), "https://secdb.alpinelinux.org/v3.11/") + process_record( + json.loads(f.read()), + "https://secdb.alpinelinux.org/v3.11/", + logger=logger.write, + ) ) assert found_advisories == expected_advisories assert ( "'4.10-1-r1' is not a valid AlpineVersion InvalidVersion(\"'4.10-1-r1' is not a valid \")" - in caplog.text + in logger.getvalue() ) @@ -474,14 +480,22 @@ def test_fetch_advisory_directory_links(): "https://secdb.alpinelinux.org/v3.8/", "https://secdb.alpinelinux.org/v3.9/", ] - with open(os.path.join(TEST_DATA, "web_pages", "directory.html")) as f: - assert fetch_advisory_directory_links(f.read()) == expected + with open(TEST_DATA / "web_pages/directory.html") as f: + assert ( + fetch_advisory_directory_links(f.read(), "https://secdb.alpinelinux.org/") == expected + ) -def test_fetch_advisory_directory_links_failure(caplog): - with open(os.path.join(TEST_DATA, "web_pages", "fail_directory.html")) as f: - assert fetch_advisory_directory_links(f.read()) == [] - assert "No versions found in 'https://secdb.alpinelinux.org/'" in caplog.text +def test_fetch_advisory_directory_links_failure(): + logger = TestLogger() + with open(TEST_DATA / "web_pages/fail_directory.html") as f: + assert ( + fetch_advisory_directory_links( + f.read(), "https://secdb.alpinelinux.org/", logger=logger.write + ) + == [] + ) + assert "No versions found in 'https://secdb.alpinelinux.org/'" in logger.getvalue() def test_fetch_advisory_links(): @@ -489,45 +503,49 @@ def test_fetch_advisory_links(): "https://secdb.alpinelinux.org/v3.11/community.json", "https://secdb.alpinelinux.org/v3.11/main.json", ] - with open(os.path.join(TEST_DATA, "web_pages", "v3.11.html")) as f: + with open(TEST_DATA / "web_pages/v3.11.html") as f: assert ( list(fetch_advisory_links(f.read(), "https://secdb.alpinelinux.org/v3.11/")) == expected ) -def test_fetch_advisory_links_failure(caplog): - with open(os.path.join(TEST_DATA, "web_pages", "fail_directory.html")) as f: - assert list(fetch_advisory_links(f.read(), "v3.11")) == [] - assert "No anchor tags found in 'v3.11'" in caplog.text +def test_fetch_advisory_links_failure(): + logger = TestLogger() + with open(TEST_DATA / "web_pages/fail_directory.html") as f: + assert list(fetch_advisory_links(f.read(), "v3.11", logger=logger.write)) == [] + assert "No anchor tags found in 'v3.11'" in logger.getvalue() -def test_process_record_without_packages(caplog): - with open(os.path.join(TEST_DATA, os.path.join(TEST_DATA, "v3.3", "community.json"))) as f: - assert list(process_record(json.loads(f.read()), "")) == [] +def test_process_record_without_packages(): + logger = TestLogger() + with open(TEST_DATA / TEST_DATA / "v3.3/community.json") as f: + assert list(process_record(json.loads(f.read()), "", logger=logger.write)) == [] assert ( "\"packages\" not found in this record {'apkurl': '{{urlprefix}}/{{distroversion}}/{{reponame}}/{{arch}}/{{pkg.name}}-{{pkg.ver}}.apk', 'archs': ['armhf', 'x86', 'x86_64'], 'reponame': 'community', 'urlprefix': 'https://dl-cdn.alpinelinux.org/alpine', 'distroversion': 'v3.3', 'packages': []}" - in caplog.text + in logger.getvalue() ) -def test_load_advisories_package_without_name(caplog): +def test_load_advisories_package_without_name(): + logger = TestLogger() package = { "secfixes": {"4.10.0-r1": ["XSA-248"], "4.10.0-r2": ["CVE-2018-7540 XSA-252"]}, } - list(load_advisories(package, "v3.11", "main", archs=[], url="")) + list(load_advisories(package, "v3.11", "main", archs=[], url="", logger=logger.write)) assert ( "\"name\" is not available in package {'secfixes': {'4.10.0-r1': ['XSA-248'], '4.10.0-r2': ['CVE-2018-7540 XSA-252']}}" - in caplog.text + in logger.getvalue() ) -def test_load_advisories_package_without_secfixes(caplog): +def test_load_advisories_package_without_secfixes(): + logger = TestLogger() package = { "name": "xen", "secfixes": {"4.10.0-r1": []}, } - list(load_advisories(package, "v3.11", "main", archs=[], url="")) - assert "No fixed vulnerabilities in version '4.10.0-r1'" in caplog.text + list(load_advisories(package, "v3.11", "main", archs=[], url="", logger=logger.write)) + assert "No fixed vulnerabilities in version '4.10.0-r1'" in logger.getvalue() @pytest.mark.parametrize( @@ -542,13 +560,14 @@ def test_load_advisories_package_without_secfixes(caplog): "4.10-1-r1", ], ) -def test_load_advisories_package_with_invalid_alpine_version(test_case, caplog): +def test_load_advisories_package_with_invalid_alpine_version(test_case): + logger = TestLogger() package = { "name": "xen", "secfixes": {f"{test_case}": ["XSA-248"]}, } - list(load_advisories(package, "v3.11", "main", archs=[], url="")) + list(load_advisories(package, "v3.11", "main", archs=[], url="", logger=logger.write)) assert ( f"{test_case!r} is not a valid AlpineVersion InvalidVersion(\"{test_case!r} is not a valid \")" - in caplog.text + in logger.getvalue() ) From 1b834c3e0faabfa38c686bce17e75111ec3eb095 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Fri, 10 Jan 2025 14:34:20 +0530 Subject: [PATCH 051/545] Add data migration for old alpine advisory Signed-off-by: Keshav Priyadarshi --- .../0086_update_alpine_advisory_created_by.py | 36 ++++++++++++++++ vulnerabilities/tests/test_data_migrations.py | 42 +++++++++++++++++++ 2 files changed, 78 insertions(+) create mode 100644 vulnerabilities/migrations/0086_update_alpine_advisory_created_by.py diff --git a/vulnerabilities/migrations/0086_update_alpine_advisory_created_by.py b/vulnerabilities/migrations/0086_update_alpine_advisory_created_by.py new file mode 100644 index 000000000..b7e5394b1 --- /dev/null +++ b/vulnerabilities/migrations/0086_update_alpine_advisory_created_by.py @@ -0,0 +1,36 @@ + +from django.db import migrations + +""" +Update the created_by field on Advisory from the old qualified_name +to the new pipeline_id. +""" + + +def update_created_by(apps, schema_editor): + from vulnerabilities.pipelines.alpine_linux_importer import AlpineLinuxImporterPipeline + + Advisory = apps.get_model("vulnerabilities", "Advisory") + Advisory.objects.filter(created_by="vulnerabilities.importers.alpine_linux.AlpineImporter").update( + created_by=AlpineLinuxImporterPipeline.pipeline_id + ) + + +def reverse_update_created_by(apps, schema_editor): + from vulnerabilities.pipelines.alpine_linux_importer import AlpineLinuxImporterPipeline + + Advisory = apps.get_model("vulnerabilities", "Advisory") + Advisory.objects.filter(created_by=AlpineLinuxImporterPipeline.pipeline_id).update( + created_by="vulnerabilities.importers.alpine_linux.AlpineImporter" + ) + + +class Migration(migrations.Migration): + + dependencies = [ + ("vulnerabilities", "0085_alter_package_is_ghost_alter_package_version_rank_and_more"), + ] + + operations = [ + migrations.RunPython(update_created_by, reverse_code=reverse_update_created_by), + ] \ No newline at end of file diff --git a/vulnerabilities/tests/test_data_migrations.py b/vulnerabilities/tests/test_data_migrations.py index 046c86ce5..02cb1d489 100644 --- a/vulnerabilities/tests/test_data_migrations.py +++ b/vulnerabilities/tests/test_data_migrations.py @@ -880,3 +880,45 @@ def test_update_pysec_created_by_field(self): assert adv.filter(created_by="vulnerabilities.importers.pysec.PyPIImporter").count() == 0 assert adv.filter(created_by="pysec_importer").count() == 1 + + +class TestUpdateAlpineAdvisoryCreatedByField(TestMigrations): + app_name = "vulnerabilities" + migrate_from = "0085_alter_package_is_ghost_alter_package_version_rank_and_more" + migrate_to = "0086_update_alpine_advisory_created_by" + + advisory_data1 = AdvisoryData( + aliases=["CVE-2020-13371337"], + summary="vulnerability description here", + affected_packages=[ + AffectedPackage( + package=PackageURL(type="pypi", name="foobar"), + affected_version_range=VersionRange.from_string("vers:pypi/>=1.0.0|<=2.0.0"), + ) + ], + references=[Reference(url="https://example.com/with/more/info/CVE-2020-13371337")], + date_published=timezone.now(), + url="https://test.com", + ) + + def setUpBeforeMigration(self, apps): + Advisory = apps.get_model("vulnerabilities", "Advisory") + adv1 = Advisory.objects.create( + aliases=self.advisory_data1.aliases, + summary=self.advisory_data1.summary, + affected_packages=[pkg.to_dict() for pkg in self.advisory_data1.affected_packages], + references=[ref.to_dict() for ref in self.advisory_data1.references], + url=self.advisory_data1.url, + created_by="vulnerabilities.importers.alpine_linux.AlpineImporter", + date_collected=timezone.now(), + ) + + def test_update_pysec_created_by_field(self): + Advisory = apps.get_model("vulnerabilities", "Advisory") + adv = Advisory.objects.all() + + assert ( + adv.filter(created_by="vulnerabilities.importers.alpine_linux.AlpineImporter").count() + == 0 + ) + assert adv.filter(created_by="alpine_linux_importer").count() == 1 From 11c417af82204d7726811b154870365cf7adc01e Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Fri, 10 Jan 2025 16:15:22 +0530 Subject: [PATCH 052/545] Resolve migration conflict Signed-off-by: Keshav Priyadarshi --- ...reated_by.py => 0087_update_alpine_advisory_created_by.py} | 2 +- vulnerabilities/tests/test_data_migrations.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) rename vulnerabilities/migrations/{0086_update_alpine_advisory_created_by.py => 0087_update_alpine_advisory_created_by.py} (91%) diff --git a/vulnerabilities/migrations/0086_update_alpine_advisory_created_by.py b/vulnerabilities/migrations/0087_update_alpine_advisory_created_by.py similarity index 91% rename from vulnerabilities/migrations/0086_update_alpine_advisory_created_by.py rename to vulnerabilities/migrations/0087_update_alpine_advisory_created_by.py index b7e5394b1..9b162b09d 100644 --- a/vulnerabilities/migrations/0086_update_alpine_advisory_created_by.py +++ b/vulnerabilities/migrations/0087_update_alpine_advisory_created_by.py @@ -28,7 +28,7 @@ def reverse_update_created_by(apps, schema_editor): class Migration(migrations.Migration): dependencies = [ - ("vulnerabilities", "0085_alter_package_is_ghost_alter_package_version_rank_and_more"), + ("vulnerabilities", "0086_codefix"), ] operations = [ diff --git a/vulnerabilities/tests/test_data_migrations.py b/vulnerabilities/tests/test_data_migrations.py index 02cb1d489..38bf9417f 100644 --- a/vulnerabilities/tests/test_data_migrations.py +++ b/vulnerabilities/tests/test_data_migrations.py @@ -884,8 +884,8 @@ def test_update_pysec_created_by_field(self): class TestUpdateAlpineAdvisoryCreatedByField(TestMigrations): app_name = "vulnerabilities" - migrate_from = "0085_alter_package_is_ghost_alter_package_version_rank_and_more" - migrate_to = "0086_update_alpine_advisory_created_by" + migrate_from = "0086_codefix" + migrate_to = "0087_update_alpine_advisory_created_by" advisory_data1 = AdvisoryData( aliases=["CVE-2020-13371337"], From fd9cf47d44440f12c6b6103d43a9f9c5919e36be Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Sun, 12 Jan 2025 21:40:09 +0530 Subject: [PATCH 053/545] Allow CVSS3.1 Severities in NVD Signed-off-by: Tushar Goel --- vulnerabilities/pipelines/nvd_importer.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/vulnerabilities/pipelines/nvd_importer.py b/vulnerabilities/pipelines/nvd_importer.py index bd6f33cf9..645b9f442 100644 --- a/vulnerabilities/pipelines/nvd_importer.py +++ b/vulnerabilities/pipelines/nvd_importer.py @@ -210,8 +210,14 @@ def severities(self): base_metric_v3 = impact.get("baseMetricV3") or {} if base_metric_v3: cvss_v3 = get_item(base_metric_v3, "cvssV3") + version = cvss_v3.get("version") + system = None + if version == "3.1": + system = severity_systems.CVSSV31 + else: + system = severity_systems.CVSSV3 vs = VulnerabilitySeverity( - system=severity_systems.CVSSV3, + system=system, value=str(cvss_v3.get("baseScore") or ""), scoring_elements=str(cvss_v3.get("vectorString") or ""), ) From c80c7a3944c87d463d90171c0f92c4bcdb354a8e Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Mon, 13 Jan 2025 20:04:52 +0530 Subject: [PATCH 054/545] Add pipeline to add CVSSv3.1 score for CVEs Signed-off-by: Tushar Goel --- vulnerabilities/improvers/__init__.py | 2 + vulnerabilities/models.py | 2 + .../pipelines/add_cvss31_to_CVEs.py | 105 ++++++++++++++++++ vulnerabilities/tests/test_add_cvsssv31.py | 56 ++++++++++ 4 files changed, 165 insertions(+) create mode 100644 vulnerabilities/pipelines/add_cvss31_to_CVEs.py create mode 100644 vulnerabilities/tests/test_add_cvsssv31.py diff --git a/vulnerabilities/improvers/__init__.py b/vulnerabilities/improvers/__init__.py index 44a65df47..d7bb3c288 100644 --- a/vulnerabilities/improvers/__init__.py +++ b/vulnerabilities/improvers/__init__.py @@ -17,6 +17,7 @@ from vulnerabilities.pipelines import enhance_with_kev from vulnerabilities.pipelines import enhance_with_metasploit from vulnerabilities.pipelines import flag_ghost_packages +from vulnerabilities.pipelines import add_cvss31_to_CVEs IMPROVERS_REGISTRY = [ valid_versions.GitHubBasicImprover, @@ -43,6 +44,7 @@ compute_package_risk.ComputePackageRiskPipeline, compute_package_version_rank.ComputeVersionRankPipeline, collect_commits.CollectFixCommitsPipeline, + add_cvss31_to_CVEs.CVEAdvisoryMappingPipeline, ] IMPROVERS_REGISTRY = { diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index 1a58ec4dc..21b1129a2 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -189,6 +189,8 @@ class VulnerabilitySeverity(models.Model): blank=True, null=True, help_text="UTC Date of publication of the vulnerability severity" ) + objects = BaseQuerySet.as_manager() + class Meta: ordering = ["url", "scoring_system", "value"] diff --git a/vulnerabilities/pipelines/add_cvss31_to_CVEs.py b/vulnerabilities/pipelines/add_cvss31_to_CVEs.py new file mode 100644 index 000000000..acda42b52 --- /dev/null +++ b/vulnerabilities/pipelines/add_cvss31_to_CVEs.py @@ -0,0 +1,105 @@ +# Copyright (c) nexB Inc. and others. All rights reserved. +# VulnerableCode is a trademark of nexB Inc. +# SPDX-License-Identifier: Apache-2.0 +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/aboutcode-org/vulnerablecode for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. +# +import re + +from aboutcode.pipeline import LoopProgress +from django.db import transaction + +from vulnerabilities import severity_systems +from vulnerabilities.models import Advisory +from vulnerabilities.models import VulnerabilitySeverity +from vulnerabilities.pipelines import VulnerableCodePipeline + + +class CVEAdvisoryMappingPipeline(VulnerableCodePipeline): + """ + Pipeline to map CVEs from VulnerabilitySeverity to corresponding Advisories with CVSS3.1 scores. + """ + + pipeline_id = "add_cvssv3.1_to_CVEs" + + @classmethod + def steps(cls): + return (cls.process_cve_advisory_mapping,) + + def process_cve_advisory_mapping(self): + nvd_severities = ( + VulnerabilitySeverity.objects.filter( + url__startswith="https://nvd.nist.gov/vuln/detail/CVE-", scoring_system="cvssv3" + ) + .prefetch_related("vulnerabilities") + .distinct() + ) + + self.log(f"Processing {nvd_severities.count():,d} CVE severity records") + + progress = LoopProgress( + total_iterations=nvd_severities.count(), + logger=self.log, + progress_step=5, + ) + + batch_size = 1000 + results = [] + + for severity in progress.iter(nvd_severities.paginated(per_page=batch_size)): + print(severity.url) + cve_pattern = re.compile(r"(CVE-\d{4}-\d{4,7})").search + cve_match = cve_pattern(severity.url) + if cve_match: + cve_id = cve_match.group() + else: + self.log(f"Could not find CVE ID in URL: {severity.url}") + continue + + matching_advisories = Advisory.objects.filter( + aliases=[cve_id], + created_by="nvd_importer", + ) + + for advisory in matching_advisories: + for reference in advisory.references: + for sev in reference.get("severities", []): + if sev.get("system") == "cvssv3.1": + results.append( + { + "cve_id": cve_id, + "cvss31_score": sev.get("value"), + "cvss31_vector": sev.get("scoring_elements"), + "vulnerabilities": severity.vulnerabilities.all(), + } + ) + + if results: + print(results) + self._process_batch(results) + + self.log(f"Completed processing CVE to Advisory mappings") + + def _process_batch(self, results): + """ + Process a batch of results. Transactions are used to ensure data consistency. + """ + self.log(f"Processing batch of {len(results)} mappings") + + with transaction.atomic(): + for result in results: + self.log( + f"CVE: {result['cve_id']}, " + f"CVSS3.1: {result['cvss31_score']}, " + f"Vector: {result['cvss31_vector']}" + ) + + for vulnerability in result["vulnerabilities"]: + vuln_severity, _ = VulnerabilitySeverity.objects.update_or_create( + scoring_system=severity_systems.CVSSV31.identifier, + url=f"https://nvd.nist.gov/vuln/detail/{result['cve_id']}", + value=result["cvss31_score"], + scoring_elements=result["cvss31_vector"], + ) + vulnerability.severities.add(vuln_severity) diff --git a/vulnerabilities/tests/test_add_cvsssv31.py b/vulnerabilities/tests/test_add_cvsssv31.py new file mode 100644 index 000000000..c79b51879 --- /dev/null +++ b/vulnerabilities/tests/test_add_cvsssv31.py @@ -0,0 +1,56 @@ +import unittest +from unittest.mock import Mock +from unittest.mock import patch + +from django.test import TestCase + +from vulnerabilities.models import Advisory +from vulnerabilities.models import Alias +from vulnerabilities.models import Vulnerability +from vulnerabilities.models import VulnerabilitySeverity +from vulnerabilities.pipelines.add_cvss31_to_CVEs import CVEAdvisoryMappingPipeline +from vulnerabilities.severity_systems import CVSSV3 +from vulnerabilities.severity_systems import CVSSV31 + + +class TestCVEAdvisoryMappingPipeline(TestCase): + def setUp(self): + self.pipeline = CVEAdvisoryMappingPipeline() + Advisory.objects.create( + created_by="nvd_importer", + aliases=["CVE-2024-1234"], + references=[ + { + "severities": [ + { + "system": "cvssv3.1", + "value": "7.5", + "scoring_elements": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:N/A:N", + } + ], + "url": "https://nvd.nist.gov/vuln/detail/CVE-2024-1234", + } + ], + date_collected="2024-09-27T19:38:00Z", + ) + vuln = Vulnerability.objects.create(vulnerability_id="CVE-2024-1234") + sev = VulnerabilitySeverity.objects.create( + scoring_system=CVSSV3.identifier, + url="https://nvd.nist.gov/vuln/detail/CVE-2024-1234", + value="7.5", + scoring_elements="CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:N/A:N", + ) + vuln.severities.add(sev) + + def test_process_cve_advisory_mapping_single_record(self): + self.pipeline.process_cve_advisory_mapping() + self.assertEqual(VulnerabilitySeverity.objects.count(), 2) + # check if severity with cvssv3.1 is created + sev = VulnerabilitySeverity.objects.get(scoring_system=CVSSV31.identifier) + self.assertEqual(sev.url, "https://nvd.nist.gov/vuln/detail/CVE-2024-1234") + self.assertEqual(sev.value, "7.5") + self.assertEqual(sev.scoring_elements, "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:N/A:N") + # check if severity is added to existing vulnerability + vuln = Vulnerability.objects.get(vulnerability_id="CVE-2024-1234") + self.assertEqual(vuln.severities.count(), 2) + self.assertIn(sev, vuln.severities.all()) From 5f2d228323dda854a387bdf5408e2c16d0d54a88 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Mon, 13 Jan 2025 20:05:02 +0530 Subject: [PATCH 055/545] Add pipeline to add CVSSv3.1 score for CVEs Signed-off-by: Tushar Goel --- vulnerabilities/improvers/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vulnerabilities/improvers/__init__.py b/vulnerabilities/improvers/__init__.py index d7bb3c288..9b11c7920 100644 --- a/vulnerabilities/improvers/__init__.py +++ b/vulnerabilities/improvers/__init__.py @@ -10,6 +10,7 @@ from vulnerabilities.improvers import valid_versions from vulnerabilities.improvers import vulnerability_status from vulnerabilities.pipelines import VulnerableCodePipeline +from vulnerabilities.pipelines import add_cvss31_to_CVEs from vulnerabilities.pipelines import collect_commits from vulnerabilities.pipelines import compute_package_risk from vulnerabilities.pipelines import compute_package_version_rank @@ -17,7 +18,6 @@ from vulnerabilities.pipelines import enhance_with_kev from vulnerabilities.pipelines import enhance_with_metasploit from vulnerabilities.pipelines import flag_ghost_packages -from vulnerabilities.pipelines import add_cvss31_to_CVEs IMPROVERS_REGISTRY = [ valid_versions.GitHubBasicImprover, From 8fe43f7623d6b86e75b632669ee137d095222aab Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Mon, 13 Jan 2025 11:22:21 +0530 Subject: [PATCH 056/545] Fix secret generation on mac Signed-off-by: Keshav Priyadarshi --- Makefile | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/Makefile b/Makefile index 067cb419f..47ce9fcd7 100644 --- a/Makefile +++ b/Makefile @@ -42,6 +42,10 @@ else SUDO_POSTGRES= endif +ifeq ($(UNAME), Darwin) + GET_SECRET_KEY=`head /dev/urandom | base64 | head -c50` +endif + virtualenv: @echo "-> Bootstrap the virtualenv with PYTHON_EXE=${PYTHON_EXE}" @${PYTHON_EXE} ${VIRTUALENV_PYZ} --never-download --no-periodic-update ${VENV} From cc6d3006847514aee950b2462d9013714c0dc4fe Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Mon, 13 Jan 2025 17:18:34 +0530 Subject: [PATCH 057/545] Add migration to fix alpine PURLs Signed-off-by: Keshav Priyadarshi --- .../migrations/0088_fix_alpine_purl_type.py | 103 ++++++++++++++++++ 1 file changed, 103 insertions(+) create mode 100644 vulnerabilities/migrations/0088_fix_alpine_purl_type.py diff --git a/vulnerabilities/migrations/0088_fix_alpine_purl_type.py b/vulnerabilities/migrations/0088_fix_alpine_purl_type.py new file mode 100644 index 000000000..29339cfd4 --- /dev/null +++ b/vulnerabilities/migrations/0088_fix_alpine_purl_type.py @@ -0,0 +1,103 @@ +from datetime import datetime +from datetime import timezone + +from aboutcode.pipeline import LoopProgress +from django.db import migrations +from packageurl import PackageURL + +CHUNK_SIZE = 50000 +BATCH_SIZE = 500 + + +class Migration(migrations.Migration): + def fix_alpine_purl_type(apps, schema_editor): + """Use proper apk package type for Alpine""" + + Package = apps.get_model("vulnerabilities", "Package") + batch = [] + alpine_packages_query = Package.objects.filter(type="alpine") + + log(f"\nFixing PURL for {alpine_packages_query.count():,d} alpine packages") + progress = LoopProgress( + total_iterations=alpine_packages_query.count(), + progress_step=10, + logger=log, + ) + for package in progress.iter(alpine_packages_query.iterator(chunk_size=CHUNK_SIZE)): + package.type = "apk" + package.namespace = "alpine" + + package.package_url = update_alpine_purl(package.package_url, "apk", "alpine") + package.plain_package_url = update_alpine_purl( + package.plain_package_url, "apk", "alpine" + ) + + batch.append(package) + if len(batch) >= BATCH_SIZE: + bulk_update_package(Package, batch) + batch.clear() + + bulk_update_package(Package, batch) + + def reverse_fix_alpine_purl_type(apps, schema_editor): + Package = apps.get_model("vulnerabilities", "Package") + batch = [] + alpine_packages_query = Package.objects.filter(type="apk", namespace="alpine") + + log(f"\nREVERSE: Fix for {alpine_packages_query.count():,d} alpine packages") + progress = LoopProgress( + total_iterations=alpine_packages_query.count(), + progress_step=10, + logger=log, + ) + for package in progress.iter(alpine_packages_query.iterator(chunk_size=CHUNK_SIZE)): + package.type = "alpine" + package.namespace = "" + + package.package_url = update_alpine_purl(package.package_url, "alpine", "") + package.plain_package_url = update_alpine_purl(package.plain_package_url, "alpine", "") + + batch.append(package) + if len(batch) >= BATCH_SIZE: + bulk_update_package(Package, batch) + batch.clear() + + bulk_update_package(Package, batch) + + dependencies = [ + ("vulnerabilities", "0087_update_alpine_advisory_created_by"), + ] + + operations = [ + migrations.RunPython( + code=fix_alpine_purl_type, + reverse_code=reverse_fix_alpine_purl_type, + ), + ] + + +def bulk_update_package(package, batch): + if batch: + package.objects.bulk_update( + objs=batch, + fields=[ + "type", + "namespace", + "package_url", + "plain_package_url", + ], + ) + + +def update_alpine_purl(purl, purl_type, purl_namespace): + package_url = PackageURL.from_string(purl).to_dict() + package_url["type"] = purl_type + package_url["namespace"] = purl_namespace + return str(PackageURL(**package_url)) + + +def log(message): + now_local = datetime.now(timezone.utc).astimezone() + timestamp = now_local.strftime("%Y-%m-%d %H:%M:%S.%f")[:-3] + message = f"{timestamp} {message}" + print(message) From 8a55549b7f440c70363a2942c554e5febf5b8488 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Mon, 13 Jan 2025 17:50:19 +0530 Subject: [PATCH 058/545] Add test for Alpine data migration Signed-off-by: Keshav Priyadarshi --- vulnerabilities/tests/test_data_migrations.py | 30 +++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/vulnerabilities/tests/test_data_migrations.py b/vulnerabilities/tests/test_data_migrations.py index 38bf9417f..55bbb71ef 100644 --- a/vulnerabilities/tests/test_data_migrations.py +++ b/vulnerabilities/tests/test_data_migrations.py @@ -19,6 +19,7 @@ from vulnerabilities.importer import AdvisoryData from vulnerabilities.importer import AffectedPackage from vulnerabilities.importer import Reference +from vulnerabilities.utils import purl_to_dict class TestMigrations(TestCase): @@ -922,3 +923,32 @@ def test_update_pysec_created_by_field(self): == 0 ) assert adv.filter(created_by="alpine_linux_importer").count() == 1 + + +class TestFixAlpinePURLCreatedByField(TestMigrations): + app_name = "vulnerabilities" + migrate_from = "0087_update_alpine_advisory_created_by" + migrate_to = "0088_fix_alpine_purl_type" + + def setUpBeforeMigration(self, apps): + Package = apps.get_model("vulnerabilities", "Package") + purl = str( + PackageURL( + type="alpine", + namespace="", + name="curl", + version="7.83.0-r0", + qualifiers="arch=x86", + ) + ) + package1 = Package.objects.create( + **purl_to_dict(purl=purl), package_url=purl, plain_package_url=purl + ) + + def test_fix_alpine_purl(self): + Package = apps.get_model("vulnerabilities", "Package") + package = Package.objects.all() + print(package) + + assert package.filter(type="alpine").count() == 0 + assert package.filter(type="apk").count() == 1 From 3cb9d3f9b33e5812472d0972867ffcb97e1af2c2 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Mon, 13 Jan 2025 21:32:05 +0530 Subject: [PATCH 059/545] Use proper purl type for Alpine in pipeline, models, and views Signed-off-by: Keshav Priyadarshi --- vulnerabilities/models.py | 4 +- .../pipelines/alpine_linux_importer.py | 6 +- .../test_alpine_linux_importer_pipeline.py | 112 +++++++++--------- .../default_improver/alpine-expected.json | 28 ++--- .../default_improver/alpine-input.json | 28 ++--- vulnerabilities/tests/test_models.py | 12 +- vulnerabilities/tests/test_view.py | 5 +- vulnerabilities/views.py | 2 +- 8 files changed, 95 insertions(+), 102 deletions(-) diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index 21b1129a2..4db674e3e 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -10,10 +10,8 @@ import hashlib import json import logging -import typing from contextlib import suppress from functools import cached_property -from typing import Optional from typing import Union from cwe2.database import Database @@ -56,7 +54,7 @@ models.CharField.register_lookup(Trim) # patch univers for missing entry -RANGE_CLASS_BY_SCHEMES["alpine"] = AlpineLinuxVersionRange +RANGE_CLASS_BY_SCHEMES["apk"] = AlpineLinuxVersionRange class BaseQuerySet(models.QuerySet): diff --git a/vulnerabilities/pipelines/alpine_linux_importer.py b/vulnerabilities/pipelines/alpine_linux_importer.py index d29f9bc9b..28736e507 100644 --- a/vulnerabilities/pipelines/alpine_linux_importer.py +++ b/vulnerabilities/pipelines/alpine_linux_importer.py @@ -254,7 +254,8 @@ def load_advisories( affected_packages.append( AffectedPackage( package=PackageURL( - type="alpine", + type="apk", + namespace="alpine", name=pkg_infos["name"], qualifiers=qualifiers, ), @@ -266,7 +267,8 @@ def load_advisories( affected_packages.append( AffectedPackage( package=PackageURL( - type="alpine", + type="apk", + namespace="alpine", name=pkg_infos["name"], qualifiers=qualifiers, ), diff --git a/vulnerabilities/tests/pipelines/test_alpine_linux_importer_pipeline.py b/vulnerabilities/tests/pipelines/test_alpine_linux_importer_pipeline.py index 386f239d8..49182b287 100644 --- a/vulnerabilities/tests/pipelines/test_alpine_linux_importer_pipeline.py +++ b/vulnerabilities/tests/pipelines/test_alpine_linux_importer_pipeline.py @@ -36,8 +36,8 @@ def test_process_record(): affected_packages=[ AffectedPackage( package=PackageURL( - type="alpine", - namespace=None, + type="apk", + namespace="alpine", name="xen", version=None, qualifiers={ @@ -52,8 +52,8 @@ def test_process_record(): ), AffectedPackage( package=PackageURL( - type="alpine", - namespace=None, + type="apk", + namespace="alpine", name="xen", version=None, qualifiers={"arch": "armhf", "distroversion": "v3.11", "reponame": "main"}, @@ -64,8 +64,8 @@ def test_process_record(): ), AffectedPackage( package=PackageURL( - type="alpine", - namespace=None, + type="apk", + namespace="alpine", name="xen", version=None, qualifiers={"arch": "armv7", "distroversion": "v3.11", "reponame": "main"}, @@ -76,8 +76,8 @@ def test_process_record(): ), AffectedPackage( package=PackageURL( - type="alpine", - namespace=None, + type="apk", + namespace="alpine", name="xen", version=None, qualifiers={ @@ -92,8 +92,8 @@ def test_process_record(): ), AffectedPackage( package=PackageURL( - type="alpine", - namespace=None, + type="apk", + namespace="alpine", name="xen", version=None, qualifiers={"arch": "s390x", "distroversion": "v3.11", "reponame": "main"}, @@ -104,8 +104,8 @@ def test_process_record(): ), AffectedPackage( package=PackageURL( - type="alpine", - namespace=None, + type="apk", + namespace="alpine", name="xen", version=None, qualifiers={"arch": "x86", "distroversion": "v3.11", "reponame": "main"}, @@ -116,8 +116,8 @@ def test_process_record(): ), AffectedPackage( package=PackageURL( - type="alpine", - namespace=None, + type="apk", + namespace="alpine", name="xen", version=None, qualifiers={"arch": "x86_64", "distroversion": "v3.11", "reponame": "main"}, @@ -143,8 +143,8 @@ def test_process_record(): affected_packages=[ AffectedPackage( package=PackageURL( - type="alpine", - namespace=None, + type="apk", + namespace="alpine", name="xen", version=None, qualifiers={ @@ -159,8 +159,8 @@ def test_process_record(): ), AffectedPackage( package=PackageURL( - type="alpine", - namespace=None, + type="apk", + namespace="alpine", name="xen", version=None, qualifiers={"arch": "armhf", "distroversion": "v3.11", "reponame": "main"}, @@ -171,8 +171,8 @@ def test_process_record(): ), AffectedPackage( package=PackageURL( - type="alpine", - namespace=None, + type="apk", + namespace="alpine", name="xen", version=None, qualifiers={"arch": "armv7", "distroversion": "v3.11", "reponame": "main"}, @@ -183,8 +183,8 @@ def test_process_record(): ), AffectedPackage( package=PackageURL( - type="alpine", - namespace=None, + type="apk", + namespace="alpine", name="xen", version=None, qualifiers={ @@ -199,8 +199,8 @@ def test_process_record(): ), AffectedPackage( package=PackageURL( - type="alpine", - namespace=None, + type="apk", + namespace="alpine", name="xen", version=None, qualifiers={"arch": "s390x", "distroversion": "v3.11", "reponame": "main"}, @@ -211,8 +211,8 @@ def test_process_record(): ), AffectedPackage( package=PackageURL( - type="alpine", - namespace=None, + type="apk", + namespace="alpine", name="xen", version=None, qualifiers={"arch": "x86", "distroversion": "v3.11", "reponame": "main"}, @@ -223,8 +223,8 @@ def test_process_record(): ), AffectedPackage( package=PackageURL( - type="alpine", - namespace=None, + type="apk", + namespace="alpine", name="xen", version=None, qualifiers={"arch": "x86_64", "distroversion": "v3.11", "reponame": "main"}, @@ -250,8 +250,8 @@ def test_process_record(): affected_packages=[ AffectedPackage( package=PackageURL( - type="alpine", - namespace=None, + type="apk", + namespace="alpine", name="apk-tools", version=None, qualifiers={ @@ -266,8 +266,8 @@ def test_process_record(): ), AffectedPackage( package=PackageURL( - type="alpine", - namespace=None, + type="apk", + namespace="alpine", name="apk-tools", version=None, qualifiers={"arch": "armhf", "distroversion": "v3.11", "reponame": "main"}, @@ -278,8 +278,8 @@ def test_process_record(): ), AffectedPackage( package=PackageURL( - type="alpine", - namespace=None, + type="apk", + namespace="alpine", name="apk-tools", version=None, qualifiers={"arch": "armv7", "distroversion": "v3.11", "reponame": "main"}, @@ -290,8 +290,8 @@ def test_process_record(): ), AffectedPackage( package=PackageURL( - type="alpine", - namespace=None, + type="apk", + namespace="alpine", name="apk-tools", version=None, qualifiers={ @@ -306,8 +306,8 @@ def test_process_record(): ), AffectedPackage( package=PackageURL( - type="alpine", - namespace=None, + type="apk", + namespace="alpine", name="apk-tools", version=None, qualifiers={"arch": "s390x", "distroversion": "v3.11", "reponame": "main"}, @@ -318,8 +318,8 @@ def test_process_record(): ), AffectedPackage( package=PackageURL( - type="alpine", - namespace=None, + type="apk", + namespace="alpine", name="apk-tools", version=None, qualifiers={"arch": "x86", "distroversion": "v3.11", "reponame": "main"}, @@ -330,8 +330,8 @@ def test_process_record(): ), AffectedPackage( package=PackageURL( - type="alpine", - namespace=None, + type="apk", + namespace="alpine", name="apk-tools", version=None, qualifiers={"arch": "x86_64", "distroversion": "v3.11", "reponame": "main"}, @@ -351,8 +351,8 @@ def test_process_record(): affected_packages=[ AffectedPackage( package=PackageURL( - type="alpine", - namespace=None, + type="apk", + namespace="alpine", name="apk-tools", version=None, qualifiers={ @@ -367,8 +367,8 @@ def test_process_record(): ), AffectedPackage( package=PackageURL( - type="alpine", - namespace=None, + type="apk", + namespace="alpine", name="apk-tools", version=None, qualifiers={"arch": "armhf", "distroversion": "v3.11", "reponame": "main"}, @@ -379,8 +379,8 @@ def test_process_record(): ), AffectedPackage( package=PackageURL( - type="alpine", - namespace=None, + type="apk", + namespace="alpine", name="apk-tools", version=None, qualifiers={"arch": "armv7", "distroversion": "v3.11", "reponame": "main"}, @@ -391,8 +391,8 @@ def test_process_record(): ), AffectedPackage( package=PackageURL( - type="alpine", - namespace=None, + type="apk", + namespace="alpine", name="apk-tools", version=None, qualifiers={ @@ -407,8 +407,8 @@ def test_process_record(): ), AffectedPackage( package=PackageURL( - type="alpine", - namespace=None, + type="apk", + namespace="alpine", name="apk-tools", version=None, qualifiers={"arch": "s390x", "distroversion": "v3.11", "reponame": "main"}, @@ -419,8 +419,8 @@ def test_process_record(): ), AffectedPackage( package=PackageURL( - type="alpine", - namespace=None, + type="apk", + namespace="alpine", name="apk-tools", version=None, qualifiers={"arch": "x86", "distroversion": "v3.11", "reponame": "main"}, @@ -431,8 +431,8 @@ def test_process_record(): ), AffectedPackage( package=PackageURL( - type="alpine", - namespace=None, + type="apk", + namespace="alpine", name="apk-tools", version=None, qualifiers={"arch": "x86_64", "distroversion": "v3.11", "reponame": "main"}, diff --git a/vulnerabilities/tests/test_data/default_improver/alpine-expected.json b/vulnerabilities/tests/test_data/default_improver/alpine-expected.json index 5d8a84930..f9d3caf16 100644 --- a/vulnerabilities/tests/test_data/default_improver/alpine-expected.json +++ b/vulnerabilities/tests/test_data/default_improver/alpine-expected.json @@ -6,8 +6,8 @@ "summary": null, "affected_purls": [], "fixed_purl": { - "type": "alpine", - "namespace": "", + "type": "apk", + "namespace": "alpine", "name": "xen", "version": "4.10.0-r1", "qualifiers": "arch=aarch64&distroversion=v3.11&reponame=main", @@ -30,8 +30,8 @@ "summary": null, "affected_purls": [], "fixed_purl": { - "type": "alpine", - "namespace": "", + "type": "apk", + "namespace": "alpine", "name": "xen", "version": "4.10.0-r1", "qualifiers": "arch=armhf&distroversion=v3.11&reponame=main", @@ -54,8 +54,8 @@ "summary": null, "affected_purls": [], "fixed_purl": { - "type": "alpine", - "namespace": "", + "type": "apk", + "namespace": "alpine", "name": "xen", "version": "4.10.0-r1", "qualifiers": "arch=armv7&distroversion=v3.11&reponame=main", @@ -78,8 +78,8 @@ "summary": null, "affected_purls": [], "fixed_purl": { - "type": "alpine", - "namespace": "", + "type": "apk", + "namespace": "alpine", "name": "xen", "version": "4.10.0-r1", "qualifiers": "arch=ppc64le&distroversion=v3.11&reponame=main", @@ -102,8 +102,8 @@ "summary": null, "affected_purls": [], "fixed_purl": { - "type": "alpine", - "namespace": "", + "type": "apk", + "namespace": "alpine", "name": "xen", "version": "4.10.0-r1", "qualifiers": "arch=s390x&distroversion=v3.11&reponame=main", @@ -126,8 +126,8 @@ "summary": null, "affected_purls": [], "fixed_purl": { - "type": "alpine", - "namespace": "", + "type": "apk", + "namespace": "alpine", "name": "xen", "version": "4.10.0-r1", "qualifiers": "arch=x86&distroversion=v3.11&reponame=main", @@ -150,8 +150,8 @@ "summary": null, "affected_purls": [], "fixed_purl": { - "type": "alpine", - "namespace": "", + "type": "apk", + "namespace": "alpine", "name": "xen", "version": "4.10.0-r1", "qualifiers": "arch=x86_64&distroversion=v3.11&reponame=main", diff --git a/vulnerabilities/tests/test_data/default_improver/alpine-input.json b/vulnerabilities/tests/test_data/default_improver/alpine-input.json index 9ff37ecb8..f2143b32b 100644 --- a/vulnerabilities/tests/test_data/default_improver/alpine-input.json +++ b/vulnerabilities/tests/test_data/default_improver/alpine-input.json @@ -4,8 +4,8 @@ "affected_packages": [ { "package": { - "type": "alpine", - "namespace": null, + "type": "apk", + "namespace": "alpine", "name": "xen", "version": null, "qualifiers": { @@ -20,8 +20,8 @@ }, { "package": { - "type": "alpine", - "namespace": null, + "type": "apk", + "namespace": "alpine", "name": "xen", "version": null, "qualifiers": { @@ -36,8 +36,8 @@ }, { "package": { - "type": "alpine", - "namespace": null, + "type": "apk", + "namespace": "alpine", "name": "xen", "version": null, "qualifiers": { @@ -52,8 +52,8 @@ }, { "package": { - "type": "alpine", - "namespace": null, + "type": "apk", + "namespace": "alpine", "name": "xen", "version": null, "qualifiers": { @@ -68,8 +68,8 @@ }, { "package": { - "type": "alpine", - "namespace": null, + "type": "apk", + "namespace": "alpine", "name": "xen", "version": null, "qualifiers": { @@ -84,8 +84,8 @@ }, { "package": { - "type": "alpine", - "namespace": null, + "type": "apk", + "namespace": "alpine", "name": "xen", "version": null, "qualifiers": { @@ -100,8 +100,8 @@ }, { "package": { - "type": "alpine", - "namespace": null, + "type": "apk", + "namespace": "alpine", "name": "xen", "version": null, "qualifiers": { diff --git a/vulnerabilities/tests/test_models.py b/vulnerabilities/tests/test_models.py index 014754786..a5f8e251c 100644 --- a/vulnerabilities/tests/test_models.py +++ b/vulnerabilities/tests/test_models.py @@ -8,25 +8,17 @@ # import urllib.parse -from datetime import datetime from unittest import TestCase -from unittest import mock import pytest -from django.db import transaction -from django.db.models.query import QuerySet -from django.db.utils import IntegrityError -from freezegun import freeze_time from packageurl import PackageURL from univers import versions from univers.version_range import RANGE_CLASS_BY_SCHEMES -from univers.version_range import AlpineLinuxVersionRange from vulnerabilities import models from vulnerabilities.models import Alias from vulnerabilities.models import Package from vulnerabilities.models import Vulnerability -from vulnerabilities.models import VulnerabilityQuerySet class TestVulnerabilityModel(TestCase): @@ -397,7 +389,9 @@ def test_univers_version_class(self): pypi_package_version = RANGE_CLASS_BY_SCHEMES[pypi_package.type].version_class assert pypi_package_version == versions.PypiVersion - alpine_package = models.Package.objects.create(type="alpine", name="lxml", version="0.9") + alpine_package = models.Package.objects.create( + type="apk", namespace="alpine", name="lxml", version="0.9" + ) alpine_version = RANGE_CLASS_BY_SCHEMES[alpine_package.type].version_class assert alpine_version == versions.AlpineLinuxVersion diff --git a/vulnerabilities/tests/test_view.py b/vulnerabilities/tests/test_view.py index fd62e94a1..98a555294 100644 --- a/vulnerabilities/tests/test_view.py +++ b/vulnerabilities/tests/test_view.py @@ -16,7 +16,6 @@ from packageurl import PackageURL from univers import versions -from vulnerabilities import models from vulnerabilities.models import AffectedByPackageRelatedVulnerability from vulnerabilities.models import Alias from vulnerabilities.models import FixingPackageRelatedVulnerability @@ -249,8 +248,8 @@ class TestCustomFilters: "pkg%3Arpm/redhat/katello-client-bootstrap%401.1.0-2%3Farch%3Del6sat", ), ( - "pkg:alpine/nginx@1.10.3-r1?arch=armhf&distroversion=v3.5&reponame=main", - "pkg%3Aalpine/nginx%401.10.3-r1%3Farch%3Darmhf%26distroversion%3Dv3.5%26reponame%3Dmain", + "pkg:apk/alpine/nginx@1.10.3-r1?arch=armhf&distroversion=v3.5&reponame=main", + "pkg%3Aapk/alpine/nginx%401.10.3-r1%3Farch%3Darmhf%26distroversion%3Dv3.5%26reponame%3Dmain", ), ("pkg:nginx/nginx@0.9.0?os=windows", "pkg%3Anginx/nginx%400.9.0%3Fos%3Dwindows"), ( diff --git a/vulnerabilities/views.py b/vulnerabilities/views.py index fd57acea5..7d0911c64 100644 --- a/vulnerabilities/views.py +++ b/vulnerabilities/views.py @@ -54,7 +54,7 @@ def purl_sort_key(purl: models.Package): def get_purl_version_class(purl: models.Package): - RANGE_CLASS_BY_SCHEMES["alpine"] = AlpineLinuxVersionRange + RANGE_CLASS_BY_SCHEMES["apk"] = AlpineLinuxVersionRange purl_version_class = None check_version_class = RANGE_CLASS_BY_SCHEMES.get(purl.type, None) if check_version_class: From a342879c4878b750a43ed8f7c624b6dd6b3438cf Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Mon, 13 Jan 2025 14:30:32 +0530 Subject: [PATCH 060/545] Add description and link to latest release in UI Signed-off-by: Keshav Priyadarshi --- vulnerabilities/templates/index.html | 35 ++++++++++++++++++++++++---- vulnerabilities/views.py | 3 ++- 2 files changed, 32 insertions(+), 6 deletions(-) diff --git a/vulnerabilities/templates/index.html b/vulnerabilities/templates/index.html index 8321efd83..eaa7a55b9 100644 --- a/vulnerabilities/templates/index.html +++ b/vulnerabilities/templates/index.html @@ -6,8 +6,33 @@ {% endblock %} {% block content %} -
- {% include "package_search_box.html" %} - {% include "vulnerability_search_box.html" %} -
-{% endblock %} +
+
+
+

+ VulnerableCode aggregates software + vulnerabilities from multiple public advisory sources + and presents their details along with their affected + packages and fixed-by packages identified by + Package URLs (PURLs). +

+

+ What's new in this Release: + + Check out latest updates here! + +

+
+
+
+ {% include "vulnerability_search_box.html" %} +
+
+
+
+ {% include "package_search_box.html" %} +
+
+
+
+{% endblock %} \ No newline at end of file diff --git a/vulnerabilities/views.py b/vulnerabilities/views.py index fd57acea5..93cff0628 100644 --- a/vulnerabilities/views.py +++ b/vulnerabilities/views.py @@ -7,7 +7,6 @@ # See https://aboutcode.org for more information about nexB OSS projects. # import logging -from datetime import datetime from cvss.exceptions import CVSS2MalformedError from cvss.exceptions import CVSS3MalformedError @@ -34,6 +33,7 @@ from vulnerabilities.severity_systems import EPSS from vulnerabilities.severity_systems import SCORING_SYSTEMS from vulnerabilities.utils import get_severity_range +from vulnerablecode import __version__ as VULNERABLECODE_VERSION from vulnerablecode.settings import env PAGE_SIZE = 20 @@ -256,6 +256,7 @@ def get(self, request): context = { "vulnerability_search_form": VulnerabilitySearchForm(request_query), "package_search_form": PackageSearchForm(request_query), + "release_url": f"https://github.com/aboutcode-org/vulnerablecode/releases/tag/v{VULNERABLECODE_VERSION}", } return render(request=request, template_name=self.template_name, context=context) From 656fd7d1969aff5bd95144bf68bd769cdbf6e0c2 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Mon, 13 Jan 2025 15:22:24 +0530 Subject: [PATCH 061/545] Move description to down Signed-off-by: Keshav Priyadarshi --- vulnerabilities/templates/index.html | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/vulnerabilities/templates/index.html b/vulnerabilities/templates/index.html index eaa7a55b9..cdc9212ed 100644 --- a/vulnerabilities/templates/index.html +++ b/vulnerabilities/templates/index.html @@ -8,8 +8,18 @@ {% block content %}
+
+
+ {% include "vulnerability_search_box.html" %} +
+
+
+
+ {% include "package_search_box.html" %} +
+
-

+

VulnerableCode aggregates software vulnerabilities from multiple public advisory sources and presents their details along with their affected @@ -23,16 +33,6 @@

-
-
- {% include "vulnerability_search_box.html" %} -
-
-
-
- {% include "package_search_box.html" %} -
-
{% endblock %} \ No newline at end of file From fabe0358056cee9cd94c195fa7613f7708a3a3a8 Mon Sep 17 00:00:00 2001 From: Tushar Goel <34160672+TG1999@users.noreply.github.com> Date: Thu, 23 Jan 2025 20:00:20 +0530 Subject: [PATCH 062/545] Optimize vulnerabilities view (#1728) * Optimize vulnerabilities view Signed-off-by: Tushar Goel * Fix formatting Signed-off-by: Tushar Goel * Fix tests Signed-off-by: Tushar Goel * Fix views Signed-off-by: Tushar Goel * Move severities to a different tab Signed-off-by: Tushar Goel * Separate views Signed-off-by: Tushar Goel * Refactor Views Signed-off-by: Tushar Goel --------- Signed-off-by: Tushar Goel --- vulnerabilities/importer.py | 2 +- vulnerabilities/models.py | 144 ++++- .../templates/vulnerability_details.html | 582 ++++++++---------- .../vulnerability_package_details.html | 88 +++ .../test_data/package_sort/sorted_purls.txt | 16 +- vulnerabilities/tests/test_view.py | 8 +- vulnerabilities/utils.py | 10 + vulnerabilities/views.py | 216 ++++--- vulnerablecode/urls.py | 6 + 9 files changed, 623 insertions(+), 449 deletions(-) create mode 100644 vulnerabilities/templates/vulnerability_package_details.html diff --git a/vulnerabilities/importer.py b/vulnerabilities/importer.py index c5a5c5743..2a296c680 100644 --- a/vulnerabilities/importer.py +++ b/vulnerabilities/importer.py @@ -111,7 +111,7 @@ def to_dict(self): def from_dict(cls, ref: dict): return cls( reference_id=ref["reference_id"], - reference_type=ref["reference_type"], + reference_type=ref.get("reference_type") or "", url=ref["url"], severities=[ VulnerabilitySeverity.from_dict(severity) for severity in ref["severities"] diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index 4db674e3e..9b6df7c13 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -7,14 +7,23 @@ # See https://aboutcode.org for more information about nexB OSS projects. # +import csv import hashlib import json import logging +import xml.etree.ElementTree as ET from contextlib import suppress from functools import cached_property +from itertools import groupby +from operator import attrgetter from typing import Union +from cvss.exceptions import CVSS2MalformedError +from cvss.exceptions import CVSS3MalformedError +from cvss.exceptions import CVSS4MalformedError from cwe2.database import Database +from cwe2.mappings import xml_database_path +from cwe2.weakness import Weakness as DBWeakness from django.contrib.auth import get_user_model from django.contrib.auth.models import UserManager from django.core import exceptions @@ -41,8 +50,8 @@ from univers.version_range import AlpineLinuxVersionRange from univers.versions import Version -from aboutcode import hashid from vulnerabilities import utils +from vulnerabilities.severity_systems import EPSS from vulnerabilities.severity_systems import SCORING_SYSTEMS from vulnerabilities.utils import normalize_purl from vulnerabilities.utils import purl_to_dict @@ -371,6 +380,127 @@ def get_related_purls(self): """ return [p.package_url for p in self.packages.distinct().all()] + def aggregate_fixed_and_affected_packages(self): + from vulnerabilities.utils import get_purl_version_class + + sorted_fixed_by_packages = self.fixed_by_packages.filter(is_ghost=False).order_by( + "type", "namespace", "name", "qualifiers", "subpath" + ) + + if sorted_fixed_by_packages: + sorted_fixed_by_packages.first().calculate_version_rank + + sorted_affected_packages = self.affected_packages.all() + + if sorted_affected_packages: + sorted_affected_packages.first().calculate_version_rank + + grouped_fixed_by_packages = { + key: list(group) + for key, group in groupby( + sorted_fixed_by_packages, + key=attrgetter("type", "namespace", "name", "qualifiers", "subpath"), + ) + } + + all_affected_fixed_by_matches = [] + + for sorted_affected_package in sorted_affected_packages: + affected_fixed_by_matches = { + "affected_package": sorted_affected_package, + "matched_fixed_by_packages": [], + } + + # Build the key to find matching group + key = ( + sorted_affected_package.type, + sorted_affected_package.namespace, + sorted_affected_package.name, + sorted_affected_package.qualifiers, + sorted_affected_package.subpath, + ) + + # Get matching group from pre-grouped fixed_by_packages + matching_fixed_packages = grouped_fixed_by_packages.get(key, []) + + # Get version classes for comparison + affected_version_class = get_purl_version_class(sorted_affected_package) + affected_version = affected_version_class(sorted_affected_package.version) + + # Compare versions and filter valid matches + matched_fixed_by_packages = [ + fixed_by_package.purl + for fixed_by_package in matching_fixed_packages + if get_purl_version_class(fixed_by_package)(fixed_by_package.version) + > affected_version + ] + + affected_fixed_by_matches["matched_fixed_by_packages"] = matched_fixed_by_packages + all_affected_fixed_by_matches.append(affected_fixed_by_matches) + return sorted_fixed_by_packages, sorted_affected_packages, all_affected_fixed_by_matches + + def get_severity_vectors_and_values(self): + """ + Collect severity vectors and values, excluding EPSS scoring systems and handling errors gracefully. + """ + severity_vectors = [] + severity_values = set() + + # Exclude EPSS scoring system + base_severities = self.severities.exclude(scoring_system=EPSS.identifier) + + # QuerySet for severities with valid scoring_elements and scoring_system in SCORING_SYSTEMS + valid_scoring_severities = base_severities.filter( + scoring_elements__isnull=False, scoring_system__in=SCORING_SYSTEMS.keys() + ) + + for severity in valid_scoring_severities: + try: + vector_values = SCORING_SYSTEMS[severity.scoring_system].get( + severity.scoring_elements + ) + if vector_values: + severity_vectors.append(vector_values) + except ( + CVSS2MalformedError, + CVSS3MalformedError, + CVSS4MalformedError, + NotImplementedError, + ) as e: + logging.error(f"CVSSMalformedError for {severity.scoring_elements}: {e}") + + valid_value_severities = base_severities.filter(value__isnull=False).exclude(value="") + + severity_values.update(valid_value_severities.values_list("value", flat=True)) + + return severity_vectors, severity_values + + +def get_cwes(self): + """Yield CWE Weakness objects""" + for cwe_category in self.cwe_files: + cwe_category.seek(0) + reader = csv.DictReader(cwe_category) + for row in reader: + yield DBWeakness(*list(row.values())[0:-1]) + tree = ET.parse(xml_database_path) + root = tree.getroot() + for tag_num in [1, 2]: # Categories , Views + tag = root[tag_num] + for child in tag: + yield DBWeakness( + *[ + child.attrib["ID"], + child.attrib.get("Name"), + None, + child.attrib.get("Status"), + child[0].text, + ] + ) + + +Database.get_cwes = get_cwes + class Weakness(models.Model): """ @@ -379,7 +509,15 @@ class Weakness(models.Model): cwe_id = models.IntegerField(help_text="CWE id") vulnerabilities = models.ManyToManyField(Vulnerability, related_name="weaknesses") - db = Database() + + cwe_by_id = {} + + def get_cwe(self, cwe_id): + if not self.cwe_by_id: + db = Database() + for weakness in db.get_cwes(): + self.cwe_by_id[str(weakness.cwe_id)] = weakness + return self.cwe_by_id[cwe_id] @property def cwe(self): @@ -391,7 +529,7 @@ def weakness(self): Return a queryset of Weakness for this vulnerability. """ try: - weakness = self.db.get(self.cwe_id) + weakness = self.get_cwe(str(self.cwe_id)) return weakness except Exception as e: logger.warning(f"Could not find CWE {self.cwe_id}: {e}") diff --git a/vulnerabilities/templates/vulnerability_details.html b/vulnerabilities/templates/vulnerability_details.html index e9e58c79e..7001c8f3b 100644 --- a/vulnerabilities/templates/vulnerability_details.html +++ b/vulnerabilities/templates/vulnerability_details.html @@ -33,10 +33,10 @@ Essentials -
  • +
  • - Affected/Fixed by packages ({{ affected_packages|length }}/{{ fixed_by_packages|length }}) + Severities ({{ severities|length }})
  • @@ -48,12 +48,12 @@
  • - - - Severities vectors ({{ severity_vectors|length }}) - - -
  • + + + Severity details ({{ severity_vectors|length }}) + + + {% if vulnerability.exploits %}
  • @@ -66,11 +66,11 @@ {% endif %}
  • - - - EPSS - - + + + EPSS + +
  • @@ -152,156 +152,72 @@ {{ vulnerability.risk_score }} - - - - - -
    - Severity ({{ severities|length }}) -
    -
    - - - - - - - {% for severity in severities %} - - - - - - {% empty %} - - - - {% endfor %} -
    System Score Found at
    {{ severity.scoring_system }}{{ severity.value }} - {{ severity.url }} -
    - There are no known severity scores. -
    -
    - -
    - Affected/Fixed by packages ({{ affected_packages|length }}/{{ fixed_by_packages|length }}) -
    -
    - - - - + + - - - {% for package in affected_packages|slice:":3" %} + +
    AffectedFixed byAffected and Fixed Packages + + Package Details + +
    +
    + Weaknesses ({{ weaknesses|length }}) +
    +
    + + {% for weakness in weaknesses %} - - + + {% empty %} - {% endfor %} - {% if affected_packages|length > 3 %} - - - - {% endif %} - -
    - {{ package.purl }} - - {% for match in all_affected_fixed_by_matches %} - {% if match.affected_package == package %} - {% if match.matched_fixed_by_packages|length > 0 %} - {% for pkg in match.matched_fixed_by_packages %} - {{ pkg }} -
    - {% endfor %} - {% else %} - There are no reported fixed by versions. - {% endif %} - {% endif %} - {% endfor %} +
    CWE-{{ weakness.cwe_id }} + + {{ weakness.name }} +
    - This vulnerability is not known to affect any packages. + + There are no known CWE.
    - See Affected/Fixed by packages tab for more -
    -
    - -
    - Weaknesses ({{ weaknesses|length }}) -
    -
    - - {% for weakness in weaknesses %} - - - - - - {% empty %} - - - - {% endfor %} -
    CWE-{{ weakness.cwe_id }} - - {{ weakness.name }} - -
    - There are no known CWE. -
    + +
    -
    - - - - - - - - - {% for package in affected_packages %} - - - - - {% empty %} - - - - {% endfor %} - + +
    +
    AffectedFixed by
    - {{ package.purl }} - - - {% for match in all_affected_fixed_by_matches %} - {% if match.affected_package == package %} - {% if match.matched_fixed_by_packages|length > 0 %} - {% for pkg in match.matched_fixed_by_packages %} - {{ pkg }} -
    - {% endfor %} - {% else %} - There are no reported fixed by versions. - {% endif %} - {% endif %} - {% endfor %} - -
    - This vulnerability is not known to affect any packages. -
    + + + + + + {% for severity in severities %} + + + + + + {% empty %} + + + + {% endfor %}
    System Score Found at
    {{ severity.scoring_system }}{{ severity.value }} + {{ severity.url }} +
    + There are no known severity scores. +
    @@ -341,103 +257,6 @@ -
    - {% for severity_vector in severity_vectors %} - {% if severity_vector.version == '2.0' %} - Vector: {{ severity_vector.vectorString }} - - - - - - - - - - - - - - - - - - - -
    Exploitability (E)Access Vector (AV)Access Complexity (AC)Authentication (Au)Confidentiality Impact (C)Integrity Impact (I)Availability Impact (A)
    {{ severity_vector.exploitability|cvss_printer:"high,functional,unproven,proof_of_concept,not_defined" }}{{ severity_vector.accessVector|cvss_printer:"local,adjacent_network,network" }}{{ severity_vector.accessComplexity|cvss_printer:"high,medium,low" }}{{ severity_vector.authentication|cvss_printer:"multiple,single,none" }}{{ severity_vector.confidentialityImpact|cvss_printer:"none,partial,complete" }}{{ severity_vector.integrityImpact|cvss_printer:"none,partial,complete" }}{{ severity_vector.availabilityImpact|cvss_printer:"none,partial,complete" }}
    - {% elif severity_vector.version == '3.1' or severity_vector.version == '3.0'%} - Vector: {{ severity_vector.vectorString }} - - - - - - - - - - - - - - - - - - - - - -
    Attack Vector (AV)Attack Complexity (AC)Privileges Required (PR)User Interaction (UI)Scope (S)Confidentiality Impact (C)Integrity Impact (I)Availability Impact (A)
    {{ severity_vector.attackVector|cvss_printer:"network,adjacent_network,local,physical"}}{{ severity_vector.attackComplexity|cvss_printer:"low,high" }}{{ severity_vector.privilegesRequired|cvss_printer:"none,low,high" }}{{ severity_vector.userInteraction|cvss_printer:"none,required"}}{{ severity_vector.scope|cvss_printer:"unchanged,changed" }}{{ severity_vector.confidentialityImpact|cvss_printer:"high,low,none" }}{{ severity_vector.integrityImpact|cvss_printer:"high,low,none" }}{{ severity_vector.availabilityImpact|cvss_printer:"high,low,none" }}
    - {% elif severity_vector.version == '4' %} - Vector: {{ severity_vector.vectorString }} - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    Attack Vector (AV)Attack Complexity (AC)Attack Requirements (AT)Privileges Required (PR)User Interaction (UI)Vulnerable System Impact Confidentiality (VC)Vulnerable System Impact Integrity (VI)Vulnerable System Impact Availability (VA)Subsequent System Impact Confidentiality (SC)Subsequent System Impact Integrity (SI)Subsequent System Impact Availability (SA)
    {{ severity_vector.attackVector|cvss_printer:"network,adjacent,local,physical"}}{{ severity_vector.attackComplexity|cvss_printer:"low,high" }}{{ severity_vector.attackRequirement|cvss_printer:"none,present" }}{{ severity_vector.privilegesRequired|cvss_printer:"none,low,high" }}{{ severity_vector.userInteraction|cvss_printer:"none,passive,active"}}{{ severity_vector.vulnerableSystemImpactConfidentiality|cvss_printer:"high,low,none" }}{{ severity_vector.vulnerableSystemImpactIntegrity|cvss_printer:"high,low,none" }}{{ severity_vector.vulnerableSystemImpactAvailability|cvss_printer:"high,low,none" }}{{ severity_vector.subsequentSystemImpactConfidentiality|cvss_printer:"high,low,none" }}{{ severity_vector.subsequentSystemImpactIntegrity|cvss_printer:"high,low,none" }}{{ severity_vector.subsequentSystemImpactAvailability|cvss_printer:"high,low,none" }}
    - {% elif severity_vector.version == 'ssvc' %} -
    - Vector: {{ severity_vector.vectorString }} -
    - {% endif %} - {% empty %} - - - There are no known vectors. - - - {% endfor %} -
    - -
    {% for exploit in vulnerability.exploits.all %} @@ -586,108 +405,192 @@ {% endfor %} + +
    + {% for severity_vector in severity_vectors %} + {% if severity_vector.vector.version == '2.0' %} + Vector: {{ severity_vector.vector.vectorString }} Found at {{ severity_vector.origin }} +
    + + + + + + + + + + + + + + + + + + +
    Exploitability (E)Access Vector (AV)Access Complexity (AC)Authentication (Au)Confidentiality Impact (C)Integrity Impact (I)Availability Impact (A)
    {{ severity_vector.vector.exploitability|cvss_printer:"high,functional,unproven,proof_of_concept,not_defined" }}{{ severity_vector.vector.accessVector|cvss_printer:"local,adjacent_network,network" }}{{ severity_vector.vector.accessComplexity|cvss_printer:"high,medium,low" }}{{ severity_vector.vector.authentication|cvss_printer:"multiple,single,none" }}{{ severity_vector.vector.confidentialityImpact|cvss_printer:"none,partial,complete" }}{{ severity_vector.vector.integrityImpact|cvss_printer:"none,partial,complete" }}{{ severity_vector.vector.availabilityImpact|cvss_printer:"none,partial,complete" }}
    + {% elif severity_vector.vector.version == '3.1' or severity_vector.vector.version == '3.0'%} + Vector: {{ severity_vector.vector.vectorString }} Found at {{ severity_vector.origin }} + + + + + + + + + + + + + + + + + + + + + +
    Attack Vector (AV)Attack Complexity (AC)Privileges Required (PR)User Interaction (UI)Scope (S)Confidentiality Impact (C)Integrity Impact (I)Availability Impact (A)
    {{ severity_vector.vector.attackVector|cvss_printer:"network,adjacent_network,local,physical"}}{{ severity_vector.vector.attackComplexity|cvss_printer:"low,high" }}{{ severity_vector.vector.privilegesRequired|cvss_printer:"none,low,high" }}{{ severity_vector.vector.userInteraction|cvss_printer:"none,required"}}{{ severity_vector.vector.scope|cvss_printer:"unchanged,changed" }}{{ severity_vector.vector.confidentialityImpact|cvss_printer:"high,low,none" }}{{ severity_vector.vector.integrityImpact|cvss_printer:"high,low,none" }}{{ severity_vector.vector.availabilityImpact|cvss_printer:"high,low,none" }}
    + {% elif severity_vector.vector.version == '4' %} + Vector: {{ severity_vector.vector.vectorString }} Found at {{ severity_vector.origin }} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Attack Vector (AV)Attack Complexity (AC)Attack Requirements (AT)Privileges Required (PR)User Interaction (UI)Vulnerable System Impact Confidentiality (VC)Vulnerable System Impact Integrity (VI)Vulnerable System Impact Availability (VA)Subsequent System Impact Confidentiality (SC)Subsequent System Impact Integrity (SI)Subsequent System Impact Availability (SA)
    {{ severity_vector.vector.attackVector|cvss_printer:"network,adjacent,local,physical"}}{{ severity_vector.vector.attackComplexity|cvss_printer:"low,high" }}{{ severity_vector.vector.attackRequirement|cvss_printer:"none,present" }}{{ severity_vector.vector.privilegesRequired|cvss_printer:"none,low,high" }}{{ severity_vector.vector.userInteraction|cvss_printer:"none,passive,active"}}{{ severity_vector.vector.vulnerableSystemImpactConfidentiality|cvss_printer:"high,low,none" }}{{ severity_vector.vector.vulnerableSystemImpactIntegrity|cvss_printer:"high,low,none" }}{{ severity_vector.vector.vulnerableSystemImpactAvailability|cvss_printer:"high,low,none" }}{{ severity_vector.vector.subsequentSystemImpactConfidentiality|cvss_printer:"high,low,none" }}{{ severity_vector.vector.subsequentSystemImpactIntegrity|cvss_printer:"high,low,none" }}{{ severity_vector.vector.subsequentSystemImpactAvailability|cvss_printer:"high,low,none" }}
    + {% elif severity_vector.vector.version == 'ssvc' %} +
    + Vector: {{ severity_vector.vector.vectorString }} Found at {{ severity_vector.origin }} +
    + {% endif %} + {% empty %} + + + There are no known vectors. + + + {% endfor %} +
    - {% for severity in severities %} - {% if severity.scoring_system == 'epss' %} -
    +
    + {% if epss_data %}
    - Exploit Prediction Scoring System + Exploit Prediction Scoring System (EPSS)
    - - - - - - - - - - - - {% if severity.published_at %} - - - - + + + + + + + + + + {% if epss_data.published_at %} + + + + {% endif %} - - +
    - - Percentile - - {{ severity.scoring_elements }}
    - - EPSS score - - {{ severity.value }}
    - - Published at - - {{ severity.published_at }}
    + + Percentile + + {{ epss_data.percentile }}
    + + EPSS Score + + {{ epss_data.score }}
    + + Published At + + {{ epss_data.published_at }}
    -
    + {% else %} +

    No EPSS data available for this vulnerability.

    {% endif %} - {% empty %} -
    - - - There are no EPSS available. - - -
    - {% endfor %} +
    -
    - - - - - - - - - - - {% for log in history %} +
    +
    - - Date - - Actor - Action Source - VulnerableCode Version -
    + - - - - - + + + + + - {% empty %} - - - - {% endfor %} -
    {{ log.get_iso_time }}{{ log.actor_name }}{{ log.get_action_type_label }} {{log.source_url }} {{ log.software_version }} + + Date + + Actor + Action Source + VulnerableCode Version +
    - There are no relevant records. -
    -
    + + {% for log in history %} + + {{ log.get_iso_time }} + {{ log.actor_name }} + {{ log.get_action_type_label }} + {{log.source_url }} + {{ log.software_version }} + + {% empty %} + + + There are no relevant records. + + + {% endfor %} + + @@ -711,5 +614,4 @@ } - -{% endblock %} +{% endblock %} \ No newline at end of file diff --git a/vulnerabilities/templates/vulnerability_package_details.html b/vulnerabilities/templates/vulnerability_package_details.html new file mode 100644 index 000000000..21fb52192 --- /dev/null +++ b/vulnerabilities/templates/vulnerability_package_details.html @@ -0,0 +1,88 @@ +{% extends "base.html" %} +{% load humanize %} +{% load widget_tweaks %} +{% load static %} +{% load show_cvss %} +{% load url_filters %} + +{% block title %} +VulnerableCode Vulnerability Package Details - {{ vulnerability.vulnerability_id }} +{% endblock %} + +{% block content %} + +{% if vulnerability %} +
    +
    +
    +
    + Vulnerable and Fixing Package details for Vulnerability: + + {{ vulnerability.vulnerability_id }} + +
    +
    +
    + + + + + + + + + {% for package in affected_packages %} + + + + + {% empty %} + + + + {% endfor %} + +
    AffectedFixed by
    + {{ package.purl }} + + + {% for match in all_affected_fixed_by_matches %} + {% if match.affected_package == package %} + {% if match.matched_fixed_by_packages|length > 0 %} + {% for pkg in match.matched_fixed_by_packages %} + {{ pkg }} +
    + {% endfor %} + {% else %} + There are no reported fixed by versions. + {% endif %} + {% endif %} + {% endfor %} + +
    + This vulnerability is not known to affect any packages. +
    +
    +
    +
    +{% endif %} + + + + + +{% endblock %} \ No newline at end of file diff --git a/vulnerabilities/tests/test_data/package_sort/sorted_purls.txt b/vulnerabilities/tests/test_data/package_sort/sorted_purls.txt index 886119bfd..7faf4c22a 100644 --- a/vulnerabilities/tests/test_data/package_sort/sorted_purls.txt +++ b/vulnerabilities/tests/test_data/package_sort/sorted_purls.txt @@ -21,10 +21,10 @@ pkg:conan/capnproto@0.15.2 pkg:deb/debian/jackson-databind@2.8.6-1%2Bdeb9u7?distro=stretch pkg:deb/debian/jackson-databind@2.8.6-1%2Bdeb9u10?distro=stretch pkg:deb/debian/jackson-databind@2.9.8-3%2Bdeb10u4?distro=sid -pkg:deb/debian/jackson-databind@2.12.1-1%2Bdeb11u1 pkg:deb/debian/jackson-databind@2.12.1-1%2Bdeb11u1?distro=sid -pkg:deb/debian/jackson-databind@2.13.2.2-1?distro=sid +pkg:deb/debian/jackson-databind@2.12.1-1%2Bdeb11u1 pkg:deb/debian/jackson-databind@2.13.2.2-1?distro=stretch +pkg:deb/debian/jackson-databind@2.13.2.2-1?distro=sid pkg:deb/debian/jackson-databind@2.14.0-1?distro=sid pkg:deb/ubuntu/dpkg@1.13.11ubuntu7~proposed pkg:deb/ubuntu/dpkg@1.13.11ubuntu7.2 @@ -94,10 +94,10 @@ pkg:pypi/jinja2@2.1.1 pkg:pypi/jinja2@2.2 pkg:pypi/jinja2@2.2.1 pkg:pypi/jinja2@2.10 -pkg:rpm/redhat/openssl@1.0.1e-30.el6_6?arch=11 -pkg:rpm/redhat/openssl@1.0.1e-30.el6_6?arch=12 -pkg:rpm/redhat/openssl@1.0.1e-30.el6_6?arch=13 -pkg:rpm/redhat/openssl@1.0.1e-30.el6_6?arch=2 -pkg:rpm/redhat/openssl@1.0.1e-30.el6_6?arch=5 -pkg:rpm/redhat/openssl@1.0.1e-30.el6_6?arch=7 pkg:rpm/redhat/openssl@1.0.1e-30.el6_6?arch=9 +pkg:rpm/redhat/openssl@1.0.1e-30.el6_6?arch=7 +pkg:rpm/redhat/openssl@1.0.1e-30.el6_6?arch=5 +pkg:rpm/redhat/openssl@1.0.1e-30.el6_6?arch=2 +pkg:rpm/redhat/openssl@1.0.1e-30.el6_6?arch=13 +pkg:rpm/redhat/openssl@1.0.1e-30.el6_6?arch=12 +pkg:rpm/redhat/openssl@1.0.1e-30.el6_6?arch=11 diff --git a/vulnerabilities/tests/test_view.py b/vulnerabilities/tests/test_view.py index 98a555294..3b32ee31c 100644 --- a/vulnerabilities/tests/test_view.py +++ b/vulnerabilities/tests/test_view.py @@ -23,10 +23,9 @@ from vulnerabilities.models import Vulnerability from vulnerabilities.models import VulnerabilitySeverity from vulnerabilities.templatetags.url_filters import url_quote_filter +from vulnerabilities.utils import get_purl_version_class from vulnerabilities.views import PackageDetails from vulnerabilities.views import PackageSearch -from vulnerabilities.views import get_purl_version_class -from vulnerabilities.views import purl_sort_key BASE_DIR = os.path.dirname(os.path.abspath(__file__)) TEST_DIR = os.path.join(BASE_DIR, "test_data/package_sort") @@ -202,12 +201,13 @@ def setUp(self): for pkg in input_purls: real_purl = PackageURL.from_string(pkg) attrs = {k: v for k, v in real_purl.to_dict().items() if v} - Package.objects.create(**attrs) + pkg = Package.objects.create(**attrs) + pkg.calculate_version_rank def test_sorted_queryset(self): qs_all = Package.objects.all() pkgs_qs_all = list(qs_all) - sorted_pkgs_qs_all = sorted(pkgs_qs_all, key=purl_sort_key) + sorted_pkgs_qs_all = pkgs_qs_all pkg_package_urls = [obj.package_url for obj in sorted_pkgs_qs_all] sorted_purls = os.path.join(TEST_DIR, "sorted_purls.txt") diff --git a/vulnerabilities/utils.py b/vulnerabilities/utils.py index 969a08f2f..d9a3c7e04 100644 --- a/vulnerabilities/utils.py +++ b/vulnerabilities/utils.py @@ -32,6 +32,7 @@ from packageurl import PackageURL from packageurl.contrib.django.utils import without_empty_values from univers.version_range import RANGE_CLASS_BY_SCHEMES +from univers.version_range import AlpineLinuxVersionRange from univers.version_range import NginxVersionRange from univers.version_range import VersionRange @@ -536,3 +537,12 @@ def normalize_purl(purl: Union[PackageURL, str]): if isinstance(purl, PackageURL): purl = str(purl) return PackageURL.from_string(purl) + + +def get_purl_version_class(purl): + RANGE_CLASS_BY_SCHEMES["apk"] = AlpineLinuxVersionRange + purl_version_class = None + check_version_class = RANGE_CLASS_BY_SCHEMES.get(purl.type, None) + if check_version_class: + purl_version_class = check_version_class.version_class + return purl_version_class diff --git a/vulnerabilities/views.py b/vulnerabilities/views.py index 77f75238d..a2df48634 100644 --- a/vulnerabilities/views.py +++ b/vulnerabilities/views.py @@ -14,6 +14,7 @@ from django.contrib import messages from django.core.exceptions import ValidationError from django.core.mail import send_mail +from django.db.models import Prefetch from django.http.response import Http404 from django.shortcuts import redirect from django.shortcuts import render @@ -29,39 +30,14 @@ from vulnerabilities.forms import ApiUserCreationForm from vulnerabilities.forms import PackageSearchForm from vulnerabilities.forms import VulnerabilitySearchForm -from vulnerabilities.models import VulnerabilityStatusType from vulnerabilities.severity_systems import EPSS from vulnerabilities.severity_systems import SCORING_SYSTEMS -from vulnerabilities.utils import get_severity_range from vulnerablecode import __version__ as VULNERABLECODE_VERSION from vulnerablecode.settings import env PAGE_SIZE = 20 -def purl_sort_key(purl: models.Package): - """ - Return a sort key for the built-in sorted() function when sorting a list - of Package objects. If the Package ``type`` is supported by univers, apply - the univers version class to the Package ``version``, and otherwise use the - ``version`` attribute as is. - """ - purl_version_class = get_purl_version_class(purl) - purl_sort_version = purl.version - if purl_version_class: - purl_sort_version = purl_version_class(purl.version) - return (purl.type, purl.namespace, purl.name, purl_sort_version, purl.qualifiers, purl.subpath) - - -def get_purl_version_class(purl: models.Package): - RANGE_CLASS_BY_SCHEMES["apk"] = AlpineLinuxVersionRange - purl_version_class = None - check_version_class = RANGE_CLASS_BY_SCHEMES.get(purl.type, None) - if check_version_class: - purl_version_class = check_version_class.version_class - return purl_version_class - - class PackageSearch(ListView): model = models.Package template_name = "packages.html" @@ -159,90 +135,93 @@ def get_queryset(self): return ( super() .get_queryset() + .select_related() .prefetch_related( - "references", - "aliases", - "weaknesses", - "severities", - "exploits", + Prefetch( + "references", + queryset=models.VulnerabilityReference.objects.only( + "reference_id", "reference_type", "url" + ), + ), + Prefetch( + "aliases", + queryset=models.Alias.objects.only("alias"), + ), + Prefetch( + "weaknesses", + queryset=models.Weakness.objects.only("cwe_id"), + ), + Prefetch( + "severities", + queryset=models.VulnerabilitySeverity.objects.only( + "scoring_system", "value", "url", "scoring_elements", "published_at" + ), + ), + Prefetch( + "exploits", + queryset=models.Exploit.objects.only( + "data_source", "description", "required_action", "due_date", "notes" + ), + ), ) ) def get_context_data(self, **kwargs): + """ + Build context with preloaded QuerySets and minimize redundant queries. + """ context = super().get_context_data(**kwargs) - weaknesses = self.object.weaknesses.all() + vulnerability = self.object + + # Pre-fetch and process data in Python instead of the template weaknesses_present_in_db = [ - weakness_object for weakness_object in weaknesses if weakness_object.weakness + weakness_object + for weakness_object in vulnerability.weaknesses.all() + if weakness_object.weakness ] - status = self.object.get_status_label + + valid_severities = self.object.severities.exclude(scoring_system=EPSS.identifier).filter( + scoring_elements__isnull=False, scoring_system__in=SCORING_SYSTEMS.keys() + ) severity_vectors = [] - severity_values = set() - for s in self.object.severities.all(): - if s.scoring_system == EPSS.identifier: - continue - - if s.scoring_elements and s.scoring_system in SCORING_SYSTEMS: - try: - vector_values = SCORING_SYSTEMS[s.scoring_system].get(s.scoring_elements) - severity_vectors.append(vector_values) - except ( - CVSS2MalformedError, - CVSS3MalformedError, - CVSS4MalformedError, - NotImplementedError, - ): - logging.error(f"CVSSMalformedError for {s.scoring_elements}") - - if s.value: - severity_values.add(s.value) - - sorted_affected_packages = sorted(self.object.affected_packages.all(), key=purl_sort_key) - sorted_fixed_by_packages = sorted(self.object.fixed_by_packages.all(), key=purl_sort_key) - - all_affected_fixed_by_matches = [] - for sorted_affected_package in sorted_affected_packages: - affected_fixed_by_matches = {} - affected_fixed_by_matches["affected_package"] = sorted_affected_package - matched_fixed_by_packages = [] - for fixed_by_package in sorted_fixed_by_packages: - - # Ghost Package can't fix vulnerability. - if fixed_by_package.is_ghost: - continue - - sorted_affected_version_class = get_purl_version_class(sorted_affected_package) - fixed_by_version_class = get_purl_version_class(fixed_by_package) - if ( - (fixed_by_package.type == sorted_affected_package.type) - and (fixed_by_package.namespace == sorted_affected_package.namespace) - and (fixed_by_package.name == sorted_affected_package.name) - and (fixed_by_package.qualifiers == sorted_affected_package.qualifiers) - and (fixed_by_package.subpath == sorted_affected_package.subpath) - and ( - fixed_by_version_class(fixed_by_package.version) - > sorted_affected_version_class(sorted_affected_package.version) - ) - ): - matched_fixed_by_packages.append(fixed_by_package.purl) - affected_fixed_by_matches["matched_fixed_by_packages"] = matched_fixed_by_packages - all_affected_fixed_by_matches.append(affected_fixed_by_matches) + + for severity in valid_severities: + try: + vector_values = SCORING_SYSTEMS[severity.scoring_system].get( + severity.scoring_elements + ) + if vector_values: + severity_vectors.append({"vector": vector_values, "origin": severity.url}) + except ( + CVSS2MalformedError, + CVSS3MalformedError, + CVSS4MalformedError, + NotImplementedError, + ): + logging.error(f"CVSSMalformedError for {severity.scoring_elements}") + + epss_severity = vulnerability.severities.filter(scoring_system="epss").first() + epss_data = None + if epss_severity: + epss_data = { + "percentile": epss_severity.scoring_elements, + "score": epss_severity.value, + "published_at": epss_severity.published_at, + } context.update( { - "vulnerability": self.object, + "vulnerability": vulnerability, "vulnerability_search_form": VulnerabilitySearchForm(self.request.GET), - "severities": list(self.object.severities.all()), - "severity_score_range": get_severity_range(severity_values), + "severities": list(vulnerability.severities.all()), "severity_vectors": severity_vectors, - "references": self.object.references.all(), - "aliases": self.object.aliases.all(), - "affected_packages": sorted_affected_packages, - "fixed_by_packages": sorted_fixed_by_packages, + "references": list(vulnerability.references.all()), + "aliases": list(vulnerability.aliases.all()), "weaknesses": weaknesses_present_in_db, - "status": status, - "history": self.object.history, - "all_affected_fixed_by_matches": all_affected_fixed_by_matches, + "status": vulnerability.get_status_label, + "history": vulnerability.history, + "epss_data": epss_data, } ) return context @@ -316,3 +295,54 @@ def form_valid(self, form): def get_success_url(self): return reverse_lazy("api_user_request") + + +class VulnerabilityPackagesDetails(DetailView): + """ + View to display all packages affected by or fixing a specific vulnerability. + URL: /vulnerabilities/{vulnerability_id}/packages + """ + + model = models.Vulnerability + template_name = "vulnerability_package_details.html" + slug_url_kwarg = "vulnerability_id" + slug_field = "vulnerability_id" + + def get_queryset(self): + """ + Prefetch and optimize related data to minimize database hits. + """ + return ( + super() + .get_queryset() + .prefetch_related( + Prefetch( + "affecting_packages", + queryset=models.Package.objects.only("type", "namespace", "name", "version"), + ), + Prefetch( + "fixed_by_packages", + queryset=models.Package.objects.only("type", "namespace", "name", "version"), + ), + ) + ) + + def get_context_data(self, **kwargs): + """ + Build context with preloaded QuerySets and minimize redundant queries. + """ + context = super().get_context_data(**kwargs) + vulnerability = self.object + ( + sorted_fixed_by_packages, + sorted_affected_packages, + all_affected_fixed_by_matches, + ) = vulnerability.aggregate_fixed_and_affected_packages() + context.update( + { + "affected_packages": sorted_affected_packages, + "fixed_by_packages": sorted_fixed_by_packages, + "all_affected_fixed_by_matches": all_affected_fixed_by_matches, + } + ) + return context diff --git a/vulnerablecode/urls.py b/vulnerablecode/urls.py index 54540a66d..c6dd3da44 100644 --- a/vulnerablecode/urls.py +++ b/vulnerablecode/urls.py @@ -28,6 +28,7 @@ from vulnerabilities.views import PackageDetails from vulnerabilities.views import PackageSearch from vulnerabilities.views import VulnerabilityDetails +from vulnerabilities.views import VulnerabilityPackagesDetails from vulnerabilities.views import VulnerabilitySearch from vulnerablecode.settings import DEBUG_TOOLBAR @@ -83,6 +84,11 @@ def __init__(self, *args, **kwargs): VulnerabilityDetails.as_view(), name="vulnerability_details", ), + path( + "vulnerabilities//packages", + VulnerabilityPackagesDetails.as_view(), + name="vulnerability_package_details", + ), path( "api/", include(api_router.urls), From cbda0ca26ba323293ac78ac89c22500bd5f392b3 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Thu, 13 Mar 2025 13:51:13 +0530 Subject: [PATCH 063/545] Fast content ID migration (#1795) Reference: https://github.com/aboutcode-org/vulnerablecode/issues/1796 Recompute unique content ID for advisories and dedupe the advisories * Add new content ID function Signed-off-by: Tushar Goel * Add tests and address review comments Signed-off-by: Tushar Goel * New content ID pipeline Signed-off-by: Tushar Goel * New content ID pipeline Signed-off-by: Tushar Goel * Address review comments Signed-off-by: Tushar Goel * Address review comments Signed-off-by: Tushar Goel * Address review comments Signed-off-by: Tushar Goel * Address review comments Signed-off-by: Tushar Goel * Address review comments Signed-off-by: Tushar Goel * Address review comments Signed-off-by: Tushar Goel * Address review comments Signed-off-by: Tushar Goel * Address review comments Signed-off-by: Tushar Goel * Remove unique content ID from unqiue together Signed-off-by: Tushar Goel * Remove unique together from advisories Signed-off-by: Tushar Goel * Fix migrations Signed-off-by: Tushar Goel * Fix pipeline errors Signed-off-by: Tushar Goel * Add filter for fast itreation Signed-off-by: Tushar Goel * Increase batch size Signed-off-by: Tushar Goel * Fix error Signed-off-by: Tushar Goel * Add logs Signed-off-by: Tushar Goel * Defer db indexing for content id Signed-off-by: Keshav Priyadarshi * Ensure the reference id is always a string Signed-off-by: Keshav Priyadarshi * Alternate content id migration pipeline Signed-off-by: Keshav Priyadarshi * Keep the oldest advisory while deduping Signed-off-by: Keshav Priyadarshi * Use iterator() instead of paginated() for fetching advisories - paginated() performs poorly when iterating over large records compared to the built-in iterator() Signed-off-by: Keshav Priyadarshi * Move pipeline test to test/pipelines/ Signed-off-by: Keshav Priyadarshi * Update test for the new dedupe pipeline Signed-off-by: Keshav Priyadarshi --------- Signed-off-by: Tushar Goel Signed-off-by: Keshav Priyadarshi Co-authored-by: Tushar Goel Co-authored-by: Tushar Goel <34160672+TG1999@users.noreply.github.com> --- vulnerabilities/importer.py | 65 ++- vulnerabilities/improvers/__init__.py | 2 + .../0089_alter_advisory_unique_content_id.py | 22 + vulnerabilities/models.py | 16 +- .../pipelines/remove_duplicate_advisories.py | 110 +++++ vulnerabilities/severity_systems.py | 3 + .../test_remove_duplicate_advisories.py | 117 ++++++ vulnerabilities/tests/test_add_cvsssv31.py | 2 + .../tests/test_compute_content_id.py | 228 +++++++++++ ...security_advisories-importer-expected.json | 78 ++-- ...security_advisories-importer-expected.json | 380 +++++++++--------- vulnerabilities/utils.py | 56 +++ 12 files changed, 821 insertions(+), 258 deletions(-) create mode 100644 vulnerabilities/migrations/0089_alter_advisory_unique_content_id.py create mode 100644 vulnerabilities/pipelines/remove_duplicate_advisories.py create mode 100644 vulnerabilities/tests/pipelines/test_remove_duplicate_advisories.py create mode 100644 vulnerabilities/tests/test_compute_content_id.py diff --git a/vulnerabilities/importer.py b/vulnerabilities/importer.py index 2a296c680..b22d29d00 100644 --- a/vulnerabilities/importer.py +++ b/vulnerabilities/importer.py @@ -9,6 +9,7 @@ import dataclasses import datetime +import functools import logging import os import shutil @@ -46,7 +47,8 @@ logger = logging.getLogger(__name__) -@dataclasses.dataclass(order=True) +@dataclasses.dataclass(eq=True) +@functools.total_ordering class VulnerabilitySeverity: # FIXME: this should be named scoring_system, like in the model system: ScoringSystem @@ -55,15 +57,26 @@ class VulnerabilitySeverity: published_at: Optional[datetime.datetime] = None def to_dict(self): - published_at_dict = ( - {"published_at": self.published_at.isoformat()} if self.published_at else {} - ) - return { + data = { "system": self.system.identifier, "value": self.value, "scoring_elements": self.scoring_elements, - **published_at_dict, } + if self.published_at: + if isinstance(self.published_at, datetime.datetime): + data["published_at"] = self.published_at.isoformat() + else: + data["published_at"] = self.published_at + return data + + def __lt__(self, other): + if not isinstance(other, VulnerabilitySeverity): + return NotImplemented + return self._cmp_key() < other._cmp_key() + + # TODO: Add cache + def _cmp_key(self): + return (self.system.identifier, self.value, self.scoring_elements, self.published_at) @classmethod def from_dict(cls, severity: dict): @@ -79,7 +92,8 @@ def from_dict(cls, severity: dict): ) -@dataclasses.dataclass(order=True) +@dataclasses.dataclass(eq=True) +@functools.total_ordering class Reference: reference_id: str = "" reference_type: str = "" @@ -90,27 +104,28 @@ def __post_init__(self): if not self.url: raise TypeError("Reference must have a url") - def normalized(self): - severities = sorted(self.severities) - return Reference( - reference_id=self.reference_id, - url=self.url, - severities=severities, - reference_type=self.reference_type, - ) + def __lt__(self, other): + if not isinstance(other, Reference): + return NotImplemented + return self._cmp_key() < other._cmp_key() + + # TODO: Add cache + def _cmp_key(self): + return (self.reference_id, self.reference_type, self.url, tuple(self.severities)) def to_dict(self): + """Return a normalized dictionary representation""" return { "reference_id": self.reference_id, "reference_type": self.reference_type, "url": self.url, - "severities": [severity.to_dict() for severity in self.severities], + "severities": [severity.to_dict() for severity in sorted(self.severities)], } @classmethod def from_dict(cls, ref: dict): return cls( - reference_id=ref["reference_id"], + reference_id=str(ref["reference_id"]), reference_type=ref.get("reference_type") or "", url=ref["url"], severities=[ @@ -140,7 +155,8 @@ class NoAffectedPackages(Exception): """ -@dataclasses.dataclass(order=True, frozen=True) +@functools.total_ordering +@dataclasses.dataclass(eq=True) class AffectedPackage: """ Relate a Package URL with a range of affected versions and a fixed version. @@ -170,6 +186,19 @@ def get_fixed_purl(self): raise ValueError(f"Affected Package {self.package!r} does not have a fixed version") return update_purl_version(purl=self.package, version=str(self.fixed_version)) + def __lt__(self, other): + if not isinstance(other, AffectedPackage): + return NotImplemented + return self._cmp_key() < other._cmp_key() + + # TODO: Add cache + def _cmp_key(self): + return ( + str(self.package), + str(self.affected_version_range or ""), + str(self.fixed_version or ""), + ) + @classmethod def merge( cls, affected_packages: Iterable diff --git a/vulnerabilities/improvers/__init__.py b/vulnerabilities/improvers/__init__.py index 9b11c7920..37143d125 100644 --- a/vulnerabilities/improvers/__init__.py +++ b/vulnerabilities/improvers/__init__.py @@ -18,6 +18,7 @@ from vulnerabilities.pipelines import enhance_with_kev from vulnerabilities.pipelines import enhance_with_metasploit from vulnerabilities.pipelines import flag_ghost_packages +from vulnerabilities.pipelines import remove_duplicate_advisories IMPROVERS_REGISTRY = [ valid_versions.GitHubBasicImprover, @@ -45,6 +46,7 @@ compute_package_version_rank.ComputeVersionRankPipeline, collect_commits.CollectFixCommitsPipeline, add_cvss31_to_CVEs.CVEAdvisoryMappingPipeline, + remove_duplicate_advisories.RemoveDuplicateAdvisoriesPipeline, ] IMPROVERS_REGISTRY = { diff --git a/vulnerabilities/migrations/0089_alter_advisory_unique_content_id.py b/vulnerabilities/migrations/0089_alter_advisory_unique_content_id.py new file mode 100644 index 000000000..1cea2eb7e --- /dev/null +++ b/vulnerabilities/migrations/0089_alter_advisory_unique_content_id.py @@ -0,0 +1,22 @@ +# Generated by Django 4.2.17 on 2025-02-27 07:47 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("vulnerabilities", "0088_fix_alpine_purl_type"), + ] + + operations = [ + migrations.AlterField( + model_name="advisory", + name="unique_content_id", + field=models.CharField( + blank=True, + help_text="A 64 character unique identifier for the content of the advisory since we use sha256 as hex", + max_length=64, + ), + ), + ] diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index 9b6df7c13..7bfc1ba11 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -53,6 +53,7 @@ from vulnerabilities import utils from vulnerabilities.severity_systems import EPSS from vulnerabilities.severity_systems import SCORING_SYSTEMS +from vulnerabilities.utils import compute_content_id from vulnerabilities.utils import normalize_purl from vulnerabilities.utils import purl_to_dict from vulnerablecode import __version__ as VULNERABLECODE_VERSION @@ -1315,8 +1316,9 @@ class Advisory(models.Model): """ unique_content_id = models.CharField( - max_length=32, + max_length=64, blank=True, + help_text="A 64 character unique identifier for the content of the advisory since we use sha256 as hex", ) aliases = models.JSONField(blank=True, default=list, help_text="A list of alias strings") summary = models.TextField( @@ -1357,16 +1359,8 @@ class Meta: ordering = ["aliases", "date_published", "unique_content_id"] def save(self, *args, **kwargs): - checksum = hashlib.md5() - for field in ( - self.summary, - self.affected_packages, - self.references, - self.weaknesses, - ): - value = json.dumps(field, separators=(",", ":")).encode("utf-8") - checksum.update(value) - self.unique_content_id = checksum.hexdigest() + advisory_data = self.to_advisory_data() + self.unique_content_id = compute_content_id(advisory_data, include_metadata=False) super().save(*args, **kwargs) def to_advisory_data(self) -> "AdvisoryData": diff --git a/vulnerabilities/pipelines/remove_duplicate_advisories.py b/vulnerabilities/pipelines/remove_duplicate_advisories.py new file mode 100644 index 000000000..bb4d749b2 --- /dev/null +++ b/vulnerabilities/pipelines/remove_duplicate_advisories.py @@ -0,0 +1,110 @@ +# +# Copyright (c) nexB Inc. and others. All rights reserved. +# VulnerableCode is a trademark of nexB Inc. +# SPDX-License-Identifier: Apache-2.0 +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/aboutcode-org/vulnerablecode for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. +# + +from aboutcode.pipeline import LoopProgress + +from vulnerabilities.models import Advisory +from vulnerabilities.pipelines import VulnerableCodePipeline +from vulnerabilities.utils import compute_content_id + + +class RemoveDuplicateAdvisoriesPipeline(VulnerableCodePipeline): + """Pipeline to compute new advisory content id and remove duplicate advisories based on their content.""" + + pipeline_id = "remove_duplicate_advisories" + + @classmethod + def steps(cls): + return (cls.remove_duplicates,) + + def remove_duplicates(self): + """ + Recompute the content ID and remove duplicate advisories, keeping the oldest one. + """ + + advisories_count = Advisory.objects.all().count() + self.log(f"Computing new content id for {advisories_count} and removing duplicates.") + + update_batch_size = 500 + delete_batch_size = 5000 + chunk_size = 5000 + deleted_advisories_count = 0 + updated_advisories_count = 0 + duplicate_advisory_ids = [] + advisories_to_update = [] + content_ids = set() + + advisories = Advisory.objects.all().order_by("id").iterator(chunk_size=chunk_size) + + progress = LoopProgress( + total_iterations=advisories_count, + logger=self.log, + progress_step=1, + ) + + for advisory in progress.iter(advisories): + content_id = compute_content_id(advisory.to_advisory_data()) + + if content_id in content_ids: + duplicate_advisory_ids.append(advisory.id) + else: + content_ids.add(content_id) + if advisory.unique_content_id != content_id: + advisory.unique_content_id = content_id + advisories_to_update.append(advisory) + + if len(duplicate_advisory_ids) > delete_batch_size: + deleted_advisories_count += delete_advisories( + advisory_ids=duplicate_advisory_ids, + logger=self.log, + ) + duplicate_advisory_ids.clear() + + if len(advisories_to_update) > update_batch_size: + updated_advisories_count += bulk_update_advisories( + advisories=advisories_to_update, + fields=["unique_content_id"], + logger=self.log, + ) + advisories_to_update.clear() + + deleted_advisories_count += delete_advisories( + advisory_ids=duplicate_advisory_ids, + logger=self.log, + ) + updated_advisories_count += bulk_update_advisories( + advisories=advisories_to_update, + fields=["unique_content_id"], + logger=self.log, + ) + + self.log(f"Removed {deleted_advisories_count} duplicates advisories.") + self.log(f"Updated content id for {deleted_advisories_count} advisories.") + + +def bulk_update_advisories(advisories, fields, logger): + item_count = 0 + if advisories: + try: + Advisory.objects.bulk_update(objs=advisories, fields=fields) + item_count += len(advisories) + except Exception as e: + logger(f"Error updating Advisory: {e}") + return item_count + + +def delete_advisories(advisory_ids, logger): + item_count = 0 + if advisory_ids: + try: + Advisory.objects.filter(id__in=advisory_ids).delete() + item_count += len(advisory_ids) + except Exception as e: + logger(f"Error deleting Advisory: {e}") + return item_count diff --git a/vulnerabilities/severity_systems.py b/vulnerabilities/severity_systems.py index 946cb6479..17008a219 100644 --- a/vulnerabilities/severity_systems.py +++ b/vulnerabilities/severity_systems.py @@ -42,6 +42,9 @@ def compute(self, scoring_elements: str) -> str: def get(self, scoring_elements: str): return NotImplementedError + def __str__(self): + return f"{self.identifier}" + @dataclasses.dataclass(order=True) class Cvssv2ScoringSystem(ScoringSystem): diff --git a/vulnerabilities/tests/pipelines/test_remove_duplicate_advisories.py b/vulnerabilities/tests/pipelines/test_remove_duplicate_advisories.py new file mode 100644 index 000000000..89187d488 --- /dev/null +++ b/vulnerabilities/tests/pipelines/test_remove_duplicate_advisories.py @@ -0,0 +1,117 @@ +# +# Copyright (c) nexB Inc. and others. All rights reserved. +# VulnerableCode is a trademark of nexB Inc. +# SPDX-License-Identifier: Apache-2.0 +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/aboutcode-org/vulnerablecode for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. +# + +import datetime +from unittest.mock import patch + +import pytz +from django.test import TestCase +from packageurl import PackageURL + +from vulnerabilities.importer import AdvisoryData +from vulnerabilities.importer import AffectedPackage +from vulnerabilities.importer import Reference +from vulnerabilities.models import Advisory +from vulnerabilities.pipelines.remove_duplicate_advisories import RemoveDuplicateAdvisoriesPipeline + + +class TestRemoveDuplicateAdvisoriesPipeline(TestCase): + def setUp(self): + self.advisory_data = AdvisoryData( + summary="Test summary", + affected_packages=[ + AffectedPackage( + package=PackageURL(type="npm", name="package1"), + affected_version_range="vers:npm/>=1.0.0|<2.0.0", + ) + ], + references=[Reference(url="https://example.com/vuln1")], + ) + + def test_remove_duplicates_keeps_oldest(self): + """ + Test that when multiple advisories have the same content, + only the oldest one is kept. + """ + # Create three advisories with same content but different dates + dates = [ + datetime.datetime(2024, 1, 1, tzinfo=pytz.UTC), + datetime.datetime(2024, 1, 2, tzinfo=pytz.UTC), + datetime.datetime(2024, 1, 3, tzinfo=pytz.UTC), + ] + + advisories = [] + for date in dates: + advisory = Advisory.objects.create( + summary=self.advisory_data.summary, + affected_packages=[pkg.to_dict() for pkg in self.advisory_data.affected_packages], + references=[ref.to_dict() for ref in self.advisory_data.references], + date_imported=date, + date_collected=date, + ) + advisories.append(advisory) + print(advisory.id) + + # Run the pipeline + pipeline = RemoveDuplicateAdvisoriesPipeline() + pipeline.execute() + + # Check that only the first advisory remains + remaining = Advisory.objects.all() + self.assertEqual(remaining.count(), 1) + self.assertEqual(remaining.first().date_imported, dates[0]) + + def test_different_content_preserved(self): + """ + Test that advisories with different content are preserved. + """ + # Create two advisories with different content + advisory1 = Advisory.objects.create( + summary="Summary 1", + affected_packages=[], + date_collected=datetime.datetime(2024, 1, 1, tzinfo=pytz.UTC), + references=[], + date_imported=datetime.datetime(2024, 1, 1, tzinfo=pytz.UTC), + ) + + advisory2 = Advisory.objects.create( + summary="Summary 2", + affected_packages=[], + references=[], + date_collected=datetime.datetime(2024, 1, 1, tzinfo=pytz.UTC), + date_imported=datetime.datetime(2024, 1, 2, tzinfo=pytz.UTC), + ) + + # Run the pipeline + pipeline = RemoveDuplicateAdvisoriesPipeline() + pipeline.execute() + + # Check that both advisories remain + self.assertEqual(Advisory.objects.count(), 2) + + def test_recompute_content_ids(self): + """ + Test that advisories without content IDs get them updated. + """ + # Create advisory without content ID + advisory = Advisory.objects.create( + summary=self.advisory_data.summary, + affected_packages=[pkg.to_dict() for pkg in self.advisory_data.affected_packages], + references=[ref.to_dict() for ref in self.advisory_data.references], + unique_content_id="", + date_collected=datetime.datetime(2024, 1, 1, tzinfo=pytz.UTC), + ) + + # Run the pipeline + pipeline = RemoveDuplicateAdvisoriesPipeline() + pipeline.execute() + + # Check that content ID was updated + advisory.refresh_from_db() + self.assertNotEqual(advisory.unique_content_id, "") diff --git a/vulnerabilities/tests/test_add_cvsssv31.py b/vulnerabilities/tests/test_add_cvsssv31.py index c79b51879..e20d1158a 100644 --- a/vulnerabilities/tests/test_add_cvsssv31.py +++ b/vulnerabilities/tests/test_add_cvsssv31.py @@ -29,6 +29,8 @@ def setUp(self): } ], "url": "https://nvd.nist.gov/vuln/detail/CVE-2024-1234", + "reference_id": "CVE-2024-1234", + "reference_type": "cve", } ], date_collected="2024-09-27T19:38:00Z", diff --git a/vulnerabilities/tests/test_compute_content_id.py b/vulnerabilities/tests/test_compute_content_id.py new file mode 100644 index 000000000..87fe9e9f0 --- /dev/null +++ b/vulnerabilities/tests/test_compute_content_id.py @@ -0,0 +1,228 @@ +# +# Copyright (c) nexB Inc. and others. All rights reserved. +# VulnerableCode is a trademark of nexB Inc. +# SPDX-License-Identifier: Apache-2.0 +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/aboutcode-org/vulnerablecode for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. +# + +import datetime +from unittest import TestCase + +import pytz +from packageurl import PackageURL +from univers.version_range import VersionRange + +from vulnerabilities.importer import AdvisoryData +from vulnerabilities.importer import AffectedPackage +from vulnerabilities.importer import Reference +from vulnerabilities.importer import VulnerabilitySeverity +from vulnerabilities.severity_systems import SCORING_SYSTEMS +from vulnerabilities.utils import compute_content_id + + +class TestComputeContentId(TestCase): + def setUp(self): + self.maxDiff = None + self.base_advisory = AdvisoryData( + summary="Test summary", + affected_packages=[ + AffectedPackage( + package=PackageURL( + type="npm", + name="package1", + qualifiers={}, + ), + affected_version_range=VersionRange.from_string("vers:npm/>=1.0.0|<2.0.0"), + ) + ], + references=[ + Reference( + url="https://example.com/vuln1", + reference_id="GHSA-1234-5678-9012", + severities=[ + VulnerabilitySeverity( + system=SCORING_SYSTEMS["cvssv3.1"], + value="7.5", + ) + ], + ) + ], + date_published=datetime.datetime(2024, 1, 1, tzinfo=pytz.UTC), + ) + + def test_same_content_different_order_same_id(self): + """ + Test that advisories with same content but different ordering have same content ID + """ + advisory1 = self.base_advisory + + # Same content but different order of references and affected packages + advisory2 = AdvisoryData( + summary="Test summary", + affected_packages=list(reversed(self.base_advisory.affected_packages)), + references=list(reversed(self.base_advisory.references)), + date_published=datetime.datetime(2024, 1, 1, tzinfo=pytz.UTC), + ) + + assert compute_content_id(advisory1) == compute_content_id(advisory2) + + def test_different_metadata_same_content_same_id(self): + """ + Test that advisories with same content but different metadata have same content ID + when include_metadata=False + """ + advisory1 = self.base_advisory + + advisory2 = AdvisoryData( + summary=self.base_advisory.summary, + affected_packages=self.base_advisory.affected_packages, + references=self.base_advisory.references, + date_published=self.base_advisory.date_published, + url=self.base_advisory.url, + ) + + assert compute_content_id(advisory1) == compute_content_id(advisory2) + + def test_different_metadata_different_id_when_included(self): + """ + Test that advisories with same content but different metadata have different content IDs + when include_metadata=True + """ + advisory1 = self.base_advisory + + advisory2 = AdvisoryData( + summary="Test summary", + affected_packages=self.base_advisory.affected_packages, + references=self.base_advisory.references, + date_published=datetime.datetime(2024, 1, 1, tzinfo=pytz.UTC), + url="https://different.url", + ) + + self.assertNotEqual( + compute_content_id(advisory1, include_metadata=True), + compute_content_id(advisory2, include_metadata=True), + ) + + def test_different_summary_different_id(self): + """ + Test that advisories with different summaries have different content IDs + """ + advisory1 = self.base_advisory + + advisory2 = AdvisoryData( + summary="Different summary", + affected_packages=self.base_advisory.affected_packages, + references=self.base_advisory.references, + date_published=datetime.datetime(2024, 1, 1, tzinfo=pytz.UTC), + ) + + self.assertNotEqual( + compute_content_id(advisory1), + compute_content_id(advisory2), + ) + + def test_different_affected_packages_different_id(self): + """ + Test that advisories with different affected packages have different content IDs + """ + advisory1 = self.base_advisory + + advisory2 = AdvisoryData( + summary="Test summary", + affected_packages=[ + AffectedPackage( + package=PackageURL( + type="npm", + name="different-package", + qualifiers={}, + ), + affected_version_range=VersionRange.from_string("vers:npm/>=1.0.0|<2.0.0"), + ) + ], + references=self.base_advisory.references, + date_published=datetime.datetime(2024, 1, 1, tzinfo=pytz.UTC), + ) + + self.assertNotEqual( + compute_content_id(advisory1), + compute_content_id(advisory2), + ) + + def test_different_references_different_id(self): + """ + Test that advisories with different references have different content IDs + """ + advisory1 = self.base_advisory + + advisory2 = AdvisoryData( + summary="Test summary", + affected_packages=self.base_advisory.affected_packages, + references=[ + Reference( + url="https://example.com/different-vuln", + reference_id="GHSA-9999-9999-9999", + severities=[ + VulnerabilitySeverity( + system=SCORING_SYSTEMS["cvssv3.1"], + value="8.5", + ) + ], + ) + ], + date_published=datetime.datetime(2024, 1, 1, tzinfo=pytz.UTC), + ) + + self.assertNotEqual( + compute_content_id(advisory1), + compute_content_id(advisory2), + ) + + def test_different_weaknesses_different_id(self): + """ + Test that advisories with different weaknesses have different content IDs + """ + advisory1 = AdvisoryData( + summary="Test summary", + affected_packages=self.base_advisory.affected_packages, + references=self.base_advisory.references, + weaknesses=[1, 2, 3], + date_published=datetime.datetime(2024, 1, 1, tzinfo=pytz.UTC), + ) + + advisory2 = AdvisoryData( + summary="Test summary", + affected_packages=self.base_advisory.affected_packages, + references=self.base_advisory.references, + weaknesses=[4, 5, 6], + date_published=datetime.datetime(2024, 1, 1, tzinfo=pytz.UTC), + ) + + self.assertNotEqual( + compute_content_id(advisory1), + compute_content_id(advisory2), + ) + + def test_empty_fields_same_id(self): + """ + Test that advisories with empty optional fields still generate same content ID + """ + advisory1 = AdvisoryData( + summary="", + affected_packages=self.base_advisory.affected_packages, + references=self.base_advisory.references, + date_published=None, + ) + + advisory2 = AdvisoryData( + summary="", + affected_packages=self.base_advisory.affected_packages, + references=self.base_advisory.references, + date_published=None, + ) + + self.assertEqual( + compute_content_id(advisory1), + compute_content_id(advisory2), + ) diff --git a/vulnerabilities/tests/test_data/nginx/security_advisories-importer-expected.json b/vulnerabilities/tests/test_data/nginx/security_advisories-importer-expected.json index 938e77249..4a2b97556 100644 --- a/vulnerabilities/tests/test_data/nginx/security_advisories-importer-expected.json +++ b/vulnerabilities/tests/test_data/nginx/security_advisories-importer-expected.json @@ -1,6 +1,6 @@ [ { - "unique_content_id": "e06ef4fb12b1b0817736222cc219c5be", + "unique_content_id": "8f54462a45ac49635f660b6fb755d5e05cdbc34ebaa565e38ca20c522579ce7f", "aliases": [ "CORE-2010-0121" ], @@ -36,7 +36,7 @@ "weaknesses": [] }, { - "unique_content_id": "dab2e1aa4777dbcd579905643982aab1", + "unique_content_id": "fcb0ba0ce66c1f1cf3b4213fd6e9108ab9965d633582d3e9c070a792e02d9876", "aliases": [ "CVE-2009-3896" ], @@ -115,7 +115,7 @@ "weaknesses": [] }, { - "unique_content_id": "91c6638b38a1e6e2ff4997eeefef8cf8", + "unique_content_id": "e9adfcf58bd2f302fd81436744937e8ea8bae7e1d7133d54cc4097bb94e68656", "aliases": [ "CVE-2009-3898" ], @@ -158,7 +158,7 @@ "weaknesses": [] }, { - "unique_content_id": "31675b37fe392d1e36b77f7198b1d008", + "unique_content_id": "1000911200f3a7046464251c86a45451e6d049b88cb3e5edc6d009a1867418f7", "aliases": [ "CVE-2009-4487" ], @@ -189,7 +189,7 @@ "weaknesses": [] }, { - "unique_content_id": "ef00adb6af6c2a00e81c8ec8de71eed6", + "unique_content_id": "92ce767b8cea36271d33c119cb6f706f64f5aba7335cca6791eca90a87f48de1", "aliases": [ "CVE-2010-2263" ], @@ -232,7 +232,7 @@ "weaknesses": [] }, { - "unique_content_id": "eb41c9a738129f7f76c5ff813d190621", + "unique_content_id": "9a3699853c72ab1e08f226c4f09f669b6e8b6f0431fa4e78549cd87d8466e0f7", "aliases": [ "CVE-2010-2266" ], @@ -275,7 +275,7 @@ "weaknesses": [] }, { - "unique_content_id": "d403898b9315a9ec88d9a401af5352fb", + "unique_content_id": "79d90dc8b83d6267a92f31d11be14dc27e619f6edaa996935bf4d0d33b70e575", "aliases": [ "CVE-2011-4315" ], @@ -318,7 +318,7 @@ "weaknesses": [] }, { - "unique_content_id": "96c2ffdeacca4901942abd83d54f33f5", + "unique_content_id": "044f1ec3ed59bdbafada7e40b37f7a3cbd0afc31c67aac002251f7ed56e756db", "aliases": [ "CVE-2011-4963" ], @@ -373,7 +373,7 @@ "weaknesses": [] }, { - "unique_content_id": "ca72fb146fcd014ee284ef66f7fc1c08", + "unique_content_id": "9bb829ca8d94430d97ea8bb4d67cddb9f41140a7550e5dced08918f35f1dc5f1", "aliases": [ "CVE-2012-1180" ], @@ -434,7 +434,7 @@ "weaknesses": [] }, { - "unique_content_id": "901e1dc04473ff40c6e503baec5e9bf6", + "unique_content_id": "9d373a60d30d98c6a84d134e0f1c1880b4e82b795a9175c51b172c9d988633c4", "aliases": [ "CVE-2012-2089" ], @@ -495,7 +495,7 @@ "weaknesses": [] }, { - "unique_content_id": "e74396e2dc204fb095c802fe54d4d176", + "unique_content_id": "6dfd4b51bcdf1ee31bfdd97ee6370422b70533c1db972de69cdc2e281a4bb90a", "aliases": [ "CVE-2013-2028" ], @@ -556,7 +556,7 @@ "weaknesses": [] }, { - "unique_content_id": "13592aaee15657bff9afca8c98edf8bf", + "unique_content_id": "4590b8b17cfdf0314dffd75372ba416fd8ced35cdeb673aabe9d2ed5b19dab3d", "aliases": [ "CVE-2013-2070" ], @@ -647,7 +647,7 @@ "weaknesses": [] }, { - "unique_content_id": "0f21f4e3d88f4af06f0c46d096e90320", + "unique_content_id": "b011769b7166e6e3a5b0dabd560be9fec2b4963a0c14c8934b394504041dd801", "aliases": [ "CVE-2013-4547" ], @@ -714,7 +714,7 @@ "weaknesses": [] }, { - "unique_content_id": "3430956de63de2b1188c3d1e50c3b0cd", + "unique_content_id": "f9a0149f8d0c6afe588cc7c0a170e45c828219c342b9d7ca12d0e830c68b752a", "aliases": [ "CVE-2014-0088" ], @@ -763,7 +763,7 @@ "weaknesses": [] }, { - "unique_content_id": "db01da77157a7a773285dc98169416ec", + "unique_content_id": "04ec1beb69b3712ef90b5975ff13d5d9ece8dc4c31e2fbd033e1e7be98f889ed", "aliases": [ "CVE-2014-0133" ], @@ -824,7 +824,7 @@ "weaknesses": [] }, { - "unique_content_id": "83d5fba07f12acd2e4947e68d233fbe5", + "unique_content_id": "e3af8c6275036d10bb0d3b20807288808bcb24ff1fad37f09757d381f90fc862", "aliases": [ "CVE-2014-3556" ], @@ -891,7 +891,7 @@ "weaknesses": [] }, { - "unique_content_id": "ce87032bced3f187b1c0fbacc52b8c16", + "unique_content_id": "68957cdbe4f38386944b07c2f3138ad59f02df490dab487d8709f8642a395496", "aliases": [ "CVE-2014-3616" ], @@ -946,7 +946,7 @@ "weaknesses": [] }, { - "unique_content_id": "71c918b8f82b4de8cfa23fc96fa0d7a7", + "unique_content_id": "cc6ff6eaba227bf65c93964fdf2731b75ff1597638283ae950e3941cd4932632", "aliases": [ "CVE-2016-0742" ], @@ -1001,7 +1001,7 @@ "weaknesses": [] }, { - "unique_content_id": "2ec9de991e2cb7a5a0ba79bed8556a41", + "unique_content_id": "74d2403b1a2d875ba8411a315d217fd704642a39c3e9392bd2b81cd4e4cca8a8", "aliases": [ "CVE-2016-0746" ], @@ -1056,7 +1056,7 @@ "weaknesses": [] }, { - "unique_content_id": "925abc90d30273fe8cb404b7f3c8dfd3", + "unique_content_id": "3f9a96e88c2c8cb3ad5852621091d686b420e0fa25921a9f10f330e02e7f47d6", "aliases": [ "CVE-2016-0747" ], @@ -1111,7 +1111,7 @@ "weaknesses": [] }, { - "unique_content_id": "04f5bc12ff49a95a29c459222379abe4", + "unique_content_id": "3db919e67e7061f392f575e7ac88884850c686c133ebdd4f58dfddb6196e15bf", "aliases": [ "CVE-2016-4450" ], @@ -1190,7 +1190,7 @@ "weaknesses": [] }, { - "unique_content_id": "b3192a372fdac00b2cdf462b562cf73b", + "unique_content_id": "60c648561ee11d1ece306182ff608e5d66aeb748c91c4c91d79aa4f7967f2149", "aliases": [ "CVE-2017-7529" ], @@ -1257,7 +1257,7 @@ "weaknesses": [] }, { - "unique_content_id": "cb70875e6e02b2d41dd8876b4729bf84", + "unique_content_id": "e4731a12d4f385fc4d0774714c3e79dc98b8ec9c1c648120e0aa196a0d165066", "aliases": [ "CVE-2018-16843" ], @@ -1312,7 +1312,7 @@ "weaknesses": [] }, { - "unique_content_id": "cf47abf58659080601c4cd87a119a769", + "unique_content_id": "37a3e3a4d916420d151462c0e761db15f3dfb81ead3e3fa18e84ef4a93151d4c", "aliases": [ "CVE-2018-16844" ], @@ -1367,7 +1367,7 @@ "weaknesses": [] }, { - "unique_content_id": "33d08a513ea5fef861e924f2601f7ac6", + "unique_content_id": "ef80f06b34224fbde70a6a359ccf297c0ec2bfae9148973d3689a1c2acb888ad", "aliases": [ "CVE-2018-16845" ], @@ -1434,7 +1434,7 @@ "weaknesses": [] }, { - "unique_content_id": "8ca47577347bd9f2027e09e32bc74866", + "unique_content_id": "7dd1dec4f019ce4e044852324feb9444dbc965f26c98025bc28f50294251c5c0", "aliases": [ "CVE-2019-9511" ], @@ -1489,7 +1489,7 @@ "weaknesses": [] }, { - "unique_content_id": "74ec3c647d544d6e6935492b7dceb572", + "unique_content_id": "f52c1d6763864aa721f3c5d6fa201712a04cea0851085e8129014e56ba7b4bbe", "aliases": [ "CVE-2019-9513" ], @@ -1544,7 +1544,7 @@ "weaknesses": [] }, { - "unique_content_id": "2537fa6a9e8e84a3c06bb122fcbf468d", + "unique_content_id": "fcb04608ea5442dbf70575273074915efc16a95be9d8c84d5f3146f6917b3fb1", "aliases": [ "CVE-2019-9516" ], @@ -1599,7 +1599,7 @@ "weaknesses": [] }, { - "unique_content_id": "27612bc7cab82114b1549552f5ad48ff", + "unique_content_id": "b141e948fdfecc52a52fd4111fff37b57216a7f8fd1421df478db15e620a4571", "aliases": [ "CVE-2021-23017" ], @@ -1666,7 +1666,7 @@ "weaknesses": [] }, { - "unique_content_id": "dad2ebc242641f6a276b00769ef57efa", + "unique_content_id": "516f2188bdac91f9372ec3e200c4e754179f61fb8bf3a4613d97ebb569e46831", "aliases": [ "CVE-2022-41741" ], @@ -1733,7 +1733,7 @@ "weaknesses": [] }, { - "unique_content_id": "e17dde538a78c978602298541bcd29f0", + "unique_content_id": "743193c823a19a8eea1eeb8bb5ea6c3314ca6350b8d6ba0bcf2ac29d2e99ab11", "aliases": [ "CVE-2022-41742" ], @@ -1800,7 +1800,7 @@ "weaknesses": [] }, { - "unique_content_id": "e4c6a0358264fb7523f6ee40f844854f", + "unique_content_id": "702a79bf8a92e5ce967d5d540f03d225e05906df0cb641c5538e0e8b8045aa89", "aliases": [ "CVE-2024-24989" ], @@ -1837,7 +1837,7 @@ "weaknesses": [] }, { - "unique_content_id": "f87492771be35866bf4dce017ea54dc8", + "unique_content_id": "71ee7b435e15272f8531b568d58f82e33cfb3881f3ee80b5cae1788183f91827", "aliases": [ "CVE-2024-24990" ], @@ -1874,7 +1874,7 @@ "weaknesses": [] }, { - "unique_content_id": "79d9b38e6e89e3f3fc5ca4b2e64d0faa", + "unique_content_id": "041e081a630681e36df17fc2471cd58a789dce20b54dce62c66900baceb7d771", "aliases": [ "CVE-2024-31079" ], @@ -1929,7 +1929,7 @@ "weaknesses": [] }, { - "unique_content_id": "b3d7627b206f561242cdd2eae0e3bbeb", + "unique_content_id": "95dab77a3ea69d6d0bac6b48719f4e1d5435af7f1f1a0c1d62aa343bed5e3f32", "aliases": [ "CVE-2024-32760" ], @@ -1984,7 +1984,7 @@ "weaknesses": [] }, { - "unique_content_id": "43c2f41bb851164d3495f3c204a57f20", + "unique_content_id": "b97accb1929bfc3181c61e41c2163f051cac435ea3671b05ebf708ac24c53f15", "aliases": [ "CVE-2024-34161" ], @@ -2039,7 +2039,7 @@ "weaknesses": [] }, { - "unique_content_id": "b72c609cd1be7c77f4432e1bc8c365f3", + "unique_content_id": "93ffd507f57f7b01de0bc7cff479daba1c120e28d45b60a14f8fa98bdf597f4a", "aliases": [ "CVE-2024-35200" ], @@ -2094,7 +2094,7 @@ "weaknesses": [] }, { - "unique_content_id": "686399b9012be40d39b5366ec1695768", + "unique_content_id": "fc72f81267258996f729b98893890074ad6155adcc3352d30a04765977836995", "aliases": [ "VU#120541", "CVE-2009-3555" @@ -2150,7 +2150,7 @@ "weaknesses": [] }, { - "unique_content_id": "c616b60f7fd802e88ca29fce6222654e", + "unique_content_id": "de7a819f87c93c708251b734406d2b9916fce494ab3987be40ca37426b0c2044", "aliases": [ "VU#180065", "CVE-2009-2629" diff --git a/vulnerabilities/tests/test_data/openssl/security_advisories-importer-expected.json b/vulnerabilities/tests/test_data/openssl/security_advisories-importer-expected.json index 12f141ef7..d844c0c9f 100644 --- a/vulnerabilities/tests/test_data/openssl/security_advisories-importer-expected.json +++ b/vulnerabilities/tests/test_data/openssl/security_advisories-importer-expected.json @@ -1,6 +1,6 @@ [ { - "unique_content_id": "88aac050ad73754e929805f2ab5e64e7", + "unique_content_id": "4ee23c143c0a01cd7035e1646adaf2222725ad2c96447ffc524eb79d1ac532dd", "aliases": [ "VC-OPENSSL-20141015" ], @@ -48,7 +48,7 @@ "weaknesses": [] }, { - "unique_content_id": "35448b5f7b3fba9f72b91c02f114fb54", + "unique_content_id": "db3632c3ff2c87ef3524c93e91dc8cbeca0778583bcb08c9a8807cbb282d31cb", "aliases": [ "CVE-2002-0655", "VC-OPENSSL-20020730-CVE-2002-0655" @@ -86,7 +86,7 @@ "weaknesses": [] }, { - "unique_content_id": "829a6d1f23353afa49ace62ba465a58f", + "unique_content_id": "f4f8760e71f028224b6bdbe5b477b90217df8ca6905036317584b92781c2a119", "aliases": [ "CVE-2002-0656", "VC-OPENSSL-20020730-CVE-2002-0656" @@ -124,7 +124,7 @@ "weaknesses": [] }, { - "unique_content_id": "167751346aa8fefc0a6e3b73ccb1f1a0", + "unique_content_id": "9bdebb1f707c4c32b8834d1c6d0b55faa70072728c35bc0215df164af8448367", "aliases": [ "CVE-2002-0657", "VC-OPENSSL-20020730-CVE-2002-0657" @@ -162,7 +162,7 @@ "weaknesses": [] }, { - "unique_content_id": "cd2aa8fefe14c523b0f404ea639582db", + "unique_content_id": "01616cd468b12076531c0a0453c8766381afac45b3bae651b2535336c25195c6", "aliases": [ "CVE-2002-0659", "VC-OPENSSL-20020730-CVE-2002-0659" @@ -200,7 +200,7 @@ "weaknesses": [] }, { - "unique_content_id": "8544420c83cf74faff35e8829adaa340", + "unique_content_id": "49964979bdbf578d45f122df679ba527fd8fbf64cc2d077728fb1c7f506f4c7f", "aliases": [ "CVE-2002-1568", "VC-OPENSSL-20020808-CVE-2002-1568" @@ -238,7 +238,7 @@ "weaknesses": [] }, { - "unique_content_id": "61d2edb3343321c505bed6e2c93025b1", + "unique_content_id": "9a471da876825cebb089f856300f156b2987e0ffe50686b1646bb2041e7e4c8b", "aliases": [ "CVE-2003-0078", "VC-OPENSSL-20030219-CVE-2003-0078" @@ -288,7 +288,7 @@ "weaknesses": [] }, { - "unique_content_id": "a0eeb293e46b8d3bbd5029ccaa8585bd", + "unique_content_id": "ea79326dc573c9da310a5d90e901d9c1c6844afbc7ba492ee6edcf3fc6ed9208", "aliases": [ "CVE-2003-0131", "VC-OPENSSL-20030319-CVE-2003-0131" @@ -338,7 +338,7 @@ "weaknesses": [] }, { - "unique_content_id": "4fbc2d1aad1223b8ab887ce8d4d07175", + "unique_content_id": "29882534d53b1efc839bf130322ad85c220fa6326b24268aeed6af66f2855d02", "aliases": [ "CVE-2003-0147", "VC-OPENSSL-20030314-CVE-2003-0147" @@ -388,7 +388,7 @@ "weaknesses": [] }, { - "unique_content_id": "525144b2cfc83c2afb4746cbb043f665", + "unique_content_id": "2ba1e73cd00bc41e969ea310ec78534f4c6d5124ca0b871dc4ce322a4b34e232", "aliases": [ "CVE-2003-0543", "VC-OPENSSL-20030930-CVE-2003-0543" @@ -438,7 +438,7 @@ "weaknesses": [] }, { - "unique_content_id": "b20ae6e077855796c5fa2ea663a88269", + "unique_content_id": "038ee7715473ae9e8184e755bbc864397d9e9c4bdc7b878782197d5f445085ac", "aliases": [ "CVE-2003-0544", "VC-OPENSSL-20030930-CVE-2003-0544" @@ -488,7 +488,7 @@ "weaknesses": [] }, { - "unique_content_id": "23009992dbac485c71608f4cf9811ef2", + "unique_content_id": "e510e167dfcfce7357fe0616e7ae6ff525c3c2325ea6e0011c06d1300f1d7c10", "aliases": [ "CVE-2003-0545", "VC-OPENSSL-20030930-CVE-2003-0545" @@ -526,7 +526,7 @@ "weaknesses": [] }, { - "unique_content_id": "47507506fbd9633ba7a6429dc0db28b5", + "unique_content_id": "fb504a9108cb16e440dc0db440f4bae47f2683838b518db42a371fc0453d6a88", "aliases": [ "CVE-2003-0851", "VC-OPENSSL-20031104-CVE-2003-0851" @@ -564,7 +564,7 @@ "weaknesses": [] }, { - "unique_content_id": "2c802d89f18645aa477b635d3a5242ad", + "unique_content_id": "a467aec230d90bf340b7325fe9207425c4d35680a470268682407639819c56f6", "aliases": [ "CVE-2004-0079", "VC-OPENSSL-20040317-CVE-2004-0079" @@ -614,7 +614,7 @@ "weaknesses": [] }, { - "unique_content_id": "6f23a0db775050dc33df47c7cc883b11", + "unique_content_id": "7a9fed2602761c2ae8073bce2e5e1dfa60cb84b83c4fe6e05906bbbaf5e46c7a", "aliases": [ "CVE-2004-0081", "VC-OPENSSL-20040317-CVE-2004-0081" @@ -652,7 +652,7 @@ "weaknesses": [] }, { - "unique_content_id": "cb0e8758b89ae43b1ed34bfb3c0b3b56", + "unique_content_id": "9d9976f31462bb2e67fbf400706c1d2b0299c697e42bf4d3b8dd8e57a37d8e6a", "aliases": [ "CVE-2004-0112", "VC-OPENSSL-20040317-CVE-2004-0112" @@ -690,7 +690,7 @@ "weaknesses": [] }, { - "unique_content_id": "de61ebaf88fec68edc50b1bbc3c82f15", + "unique_content_id": "5b55cf4a1e9c3add130bf345864834163a6924f0165a25458ddf710b31d56b70", "aliases": [ "CVE-2004-0975", "VC-OPENSSL-20040930-CVE-2004-0975" @@ -740,7 +740,7 @@ "weaknesses": [] }, { - "unique_content_id": "9cc871a9e62ad5ca419397816ae02f3f", + "unique_content_id": "cba43db55e749a2cd6a8e2b4a8859b0cfb99c57ebb384b08ff64687b69982e0c", "aliases": [ "CVE-2005-2969", "VC-OPENSSL-20051011-CVE-2005-2969" @@ -802,7 +802,7 @@ "weaknesses": [] }, { - "unique_content_id": "95ecb527c6494eb3dc0e22337c257b02", + "unique_content_id": "9257f845c847e35c7d1aa8587eac8fecc3e42ea36da4a73525adfc9c552d92d6", "aliases": [ "CVE-2006-2937", "VC-OPENSSL-20060928-CVE-2006-2937" @@ -852,7 +852,7 @@ "weaknesses": [] }, { - "unique_content_id": "1ed97c8f77a2948144952bbf2df0d15f", + "unique_content_id": "1012d0129bc2bf8d506f3a5abe83570b93979b82add79f0167a08320e397d181", "aliases": [ "CVE-2006-2940", "VC-OPENSSL-20060928-CVE-2006-2940" @@ -914,7 +914,7 @@ "weaknesses": [] }, { - "unique_content_id": "275102d3f86e163b329b3bd7e4032658", + "unique_content_id": "8280b343c51657b22636bc717abb349ca3c44f0c053bc1e4a5f0b36440229d47", "aliases": [ "CVE-2006-3738", "VC-OPENSSL-20060928-CVE-2006-3738" @@ -976,7 +976,7 @@ "weaknesses": [] }, { - "unique_content_id": "509415f8d684ef69f274426ff454ee18", + "unique_content_id": "d40f47b16b42d15836f11963090ae9bd8ee81396815649437c05a3763f5c0028", "aliases": [ "CVE-2006-4339", "VC-OPENSSL-20060905-CVE-2006-4339" @@ -1038,7 +1038,7 @@ "weaknesses": [] }, { - "unique_content_id": "65804b3824faa47750e76089a0851d29", + "unique_content_id": "6ce834bf29c1216739243c40e4e7e13563b6e7ee37195b59489542cdae28c644", "aliases": [ "CVE-2006-4343", "VC-OPENSSL-20060928-CVE-2006-4343" @@ -1100,7 +1100,7 @@ "weaknesses": [] }, { - "unique_content_id": "df251bb60bdec54891d4de225180f2ee", + "unique_content_id": "af7a8ad59af270f7ef97f3219807aacf3e5ef68c009a1a127593c7ed0371393d", "aliases": [ "CVE-2007-4995", "VC-OPENSSL-20071012-CVE-2007-4995" @@ -1138,7 +1138,7 @@ "weaknesses": [] }, { - "unique_content_id": "987af90a510832e0adfe428cf642f8b3", + "unique_content_id": "6e1fe5317b3377fba03774a136517301651a683c7bd40e56718a77b14718f8ba", "aliases": [ "CVE-2007-5135", "VC-OPENSSL-20071012-CVE-2007-5135" @@ -1176,7 +1176,7 @@ "weaknesses": [] }, { - "unique_content_id": "2583bf8ccba8c985bab919b69ccc00e5", + "unique_content_id": "31901d67d2f1a8a6e0558d82580f7223d7f5d8986fa025f202bbc2f8bfbcf282", "aliases": [ "CVE-2008-0891", "VC-OPENSSL-20080528-CVE-2008-0891" @@ -1214,7 +1214,7 @@ "weaknesses": [] }, { - "unique_content_id": "707840f8f10854ba4abf1409b159f35d", + "unique_content_id": "0a025dba94a703c96c56234016505ec5bb2424a29bb0881b837d2a7e0fc0c9a4", "aliases": [ "CVE-2008-1672", "VC-OPENSSL-20080528-CVE-2008-1672" @@ -1252,7 +1252,7 @@ "weaknesses": [] }, { - "unique_content_id": "a52c691f587165864b42caa4be445576", + "unique_content_id": "7537c1d90dd6c6ff6c065a4a9b2ebd8f7060d69f1c2f4e8d1029c6cd17dbac0c", "aliases": [ "CVE-2008-5077", "VC-OPENSSL-20090107-CVE-2008-5077" @@ -1290,7 +1290,7 @@ "weaknesses": [] }, { - "unique_content_id": "6ec3760bac617981cc8cd2369115f10e", + "unique_content_id": "ec18943f7b002b1a3999bfb8b71078f6c0cc14fadd2a226accc81b7e3c07b57d", "aliases": [ "CVE-2009-0590", "VC-OPENSSL-20090325-CVE-2009-0590" @@ -1328,7 +1328,7 @@ "weaknesses": [] }, { - "unique_content_id": "2b44645ffc6197aaeb99296cc87b3258", + "unique_content_id": "2ca10b0c5e2883828105f49783b0369798b610871a821fd020a9cd541a82539e", "aliases": [ "CVE-2009-0591", "VC-OPENSSL-20090325-CVE-2009-0591" @@ -1366,7 +1366,7 @@ "weaknesses": [] }, { - "unique_content_id": "4fffdc4369dd44a30fae0836347f91de", + "unique_content_id": "f414a498973b8e2d69129426ea6a5e3201efd1b8c5f9f6a4f8f3cba543701cb3", "aliases": [ "CVE-2009-0789", "VC-OPENSSL-20090325-CVE-2009-0789" @@ -1404,7 +1404,7 @@ "weaknesses": [] }, { - "unique_content_id": "e250eb725e8ae34ba3933779594935f6", + "unique_content_id": "12e1eced51b649340678cf2d6e9b206e411c2fcd76c9a2d2f4c358b4ce480589", "aliases": [ "CVE-2009-1377", "VC-OPENSSL-20090512-CVE-2009-1377" @@ -1448,7 +1448,7 @@ "weaknesses": [] }, { - "unique_content_id": "868b6df2d8ffc22c9f9d83fd7da54401", + "unique_content_id": "bac66dcd2f0ad0469f600dbec41e0ec28219aab575fd5319a4f6d71675deda30", "aliases": [ "CVE-2009-1378", "VC-OPENSSL-20090512-CVE-2009-1378" @@ -1492,7 +1492,7 @@ "weaknesses": [] }, { - "unique_content_id": "9233bcc1b091ea2d0fe8d8a2820191f5", + "unique_content_id": "bd12a0b86dcdd5a9a410597243f1700603dd5cd3ca6f0c40ab08aaeafd7d4edf", "aliases": [ "CVE-2009-1379", "VC-OPENSSL-20090512-CVE-2009-1379" @@ -1536,7 +1536,7 @@ "weaknesses": [] }, { - "unique_content_id": "0097aaf34c70d34f665917931de0a380", + "unique_content_id": "b28a70e21f739116e19415a8ce53ecc95060ceacba347960a8292cc70a46762b", "aliases": [ "CVE-2009-1386", "VC-OPENSSL-20090602-CVE-2009-1386" @@ -1574,7 +1574,7 @@ "weaknesses": [] }, { - "unique_content_id": "e872aef605740cacbb7547101151f4c7", + "unique_content_id": "42f716c07ad6ec9ae3eaece55884154a042ca5fe1ebc7abc0b6bd1e56aabe942", "aliases": [ "CVE-2009-1387", "VC-OPENSSL-20090205-CVE-2009-1387" @@ -1612,7 +1612,7 @@ "weaknesses": [] }, { - "unique_content_id": "e4f35efada1573e600eeb3f197a9654e", + "unique_content_id": "850ee33c668bfb81f14d0412e4339312cfc05088304246c02b4ec3cf8274f1b1", "aliases": [ "CVE-2009-3245", "VC-OPENSSL-20100223-CVE-2009-3245" @@ -1650,7 +1650,7 @@ "weaknesses": [] }, { - "unique_content_id": "3ede4a6de30467e840dadb6b1a2f94fc", + "unique_content_id": "e4c27c5b08884c79d2350038aa3ea44e57ac58d20ea4dcf682658288b7ec4268", "aliases": [ "CVE-2009-3555", "VC-OPENSSL-20091105-CVE-2009-3555" @@ -1688,7 +1688,7 @@ "weaknesses": [] }, { - "unique_content_id": "91d6f4b44c2f61e0b1d98cbec9e4633d", + "unique_content_id": "61e80d10d33dde52fc3c7bc32f19fe3763bffef204240f578b490986e1ce7aff", "aliases": [ "CVE-2009-4355", "VC-OPENSSL-20100113-CVE-2009-4355" @@ -1726,7 +1726,7 @@ "weaknesses": [] }, { - "unique_content_id": "f07be07de5fe8173dc2934d11c36c94d", + "unique_content_id": "08e65d1f3043871ffe0f802544cb08ac0822cf486e7cb9aebb04b301c46b986c", "aliases": [ "CVE-2010-0433", "VC-OPENSSL-20100119-CVE-2010-0433" @@ -1764,7 +1764,7 @@ "weaknesses": [] }, { - "unique_content_id": "94276d565fb0e1af8800da5df17f96be", + "unique_content_id": "bd7aef7bfdb58b2311644f5ef6b9fba6252b4ee4823061cce018f34f38e61ac6", "aliases": [ "CVE-2010-0740", "VC-OPENSSL-20100324-CVE-2010-0740" @@ -1802,7 +1802,7 @@ "weaknesses": [] }, { - "unique_content_id": "fdfe8fe89fb08b0cedb50a64445793f9", + "unique_content_id": "f7669cb060a5572fa05fd4e5dcbb589def9270038f39957489fe982c2b723713", "aliases": [ "CVE-2010-0742", "VC-OPENSSL-20100601-CVE-2010-0742" @@ -1852,7 +1852,7 @@ "weaknesses": [] }, { - "unique_content_id": "bfee13b4a1f7df094ab9f172cf3556c9", + "unique_content_id": "806fa09aede3c5095c3bf55d4973cc6160bf7786a6efe3201815ceeb30cccf2b", "aliases": [ "CVE-2010-1633", "VC-OPENSSL-20100601-CVE-2010-1633" @@ -1890,7 +1890,7 @@ "weaknesses": [] }, { - "unique_content_id": "76c3ba83fe766ac2a084b0bd3de847f5", + "unique_content_id": "7b65ee41c2d48ae2fc3ab1c1935814347695def01407b8da246cab5018fd4f01", "aliases": [ "CVE-2010-3864", "VC-OPENSSL-20101116-CVE-2010-3864" @@ -1940,7 +1940,7 @@ "weaknesses": [] }, { - "unique_content_id": "316f2dc208adb956396af86e8d35c818", + "unique_content_id": "e0c32279e2afef8a7c959758dd603e340e8b3ae83744f2af395802b4d7152546", "aliases": [ "CVE-2010-4180", "VC-OPENSSL-20101202-CVE-2010-4180" @@ -1990,7 +1990,7 @@ "weaknesses": [] }, { - "unique_content_id": "a65500a311ab1c4e556fa47df1b487e1", + "unique_content_id": "93fa5cf53d6cabf247c30a66821d9a5e07a1013f64a2417d5e26ac28581c4301", "aliases": [ "CVE-2010-4252", "VC-OPENSSL-20101202-CVE-2010-4252" @@ -2028,7 +2028,7 @@ "weaknesses": [] }, { - "unique_content_id": "5c73df85af33b3649d0f8f5cf48465d3", + "unique_content_id": "23f38bdcf51ed382203722a20b7d4821569824f9d019c122bf958aa76dd50613", "aliases": [ "CVE-2010-5298", "VC-OPENSSL-20140408-CVE-2010-5298" @@ -2078,7 +2078,7 @@ "weaknesses": [] }, { - "unique_content_id": "4d2690fa788437a1517d397eabb14249", + "unique_content_id": "b9846e705257211137a5d75434ca61d87844c9fae7bc25a5a943b397a57a32c2", "aliases": [ "CVE-2011-0014", "VC-OPENSSL-20110208-CVE-2011-0014" @@ -2128,7 +2128,7 @@ "weaknesses": [] }, { - "unique_content_id": "220a4682b4ef1cc32a29898f3057b9b3", + "unique_content_id": "ceda83e23c529430797c0b2affbe99cfbd68a5919628c3a8921070972ad425d3", "aliases": [ "CVE-2011-3207", "VC-OPENSSL-20110906-CVE-2011-3207" @@ -2166,7 +2166,7 @@ "weaknesses": [] }, { - "unique_content_id": "990e85544590d4e2411449cfbc182afd", + "unique_content_id": "63385b83187d8305d4b3a99688f51116e1e99e77469a4de02e39611bbc58cf10", "aliases": [ "CVE-2011-3210", "VC-OPENSSL-20110906-CVE-2011-3210" @@ -2216,7 +2216,7 @@ "weaknesses": [] }, { - "unique_content_id": "6a353734271d92996f12a08fde03f7bb", + "unique_content_id": "392d936885fcdae2fb2b4200be4c4dbe8cb7fef88164723777c37de37b84d573", "aliases": [ "CVE-2011-4108", "VC-OPENSSL-20120104-CVE-2011-4108" @@ -2266,7 +2266,7 @@ "weaknesses": [] }, { - "unique_content_id": "5459b1f4a775b3122cdb0ec3ad815b3d", + "unique_content_id": "617f7a0525e9e761eae4eb9c93e690fabebd6717a3295b104064c694207f1897", "aliases": [ "CVE-2011-4109", "VC-OPENSSL-20120104-CVE-2011-4109" @@ -2304,7 +2304,7 @@ "weaknesses": [] }, { - "unique_content_id": "85e39cd316fb40cbdc47d19d1f93fade", + "unique_content_id": "985ab2093b4bed8444751c8a5f106add9b1f71fefbe400f56ff4a34d7fc29d00", "aliases": [ "CVE-2011-4576", "VC-OPENSSL-20120104-CVE-2011-4576" @@ -2354,7 +2354,7 @@ "weaknesses": [] }, { - "unique_content_id": "578281e8060ac1dc67b9d229e4b003ab", + "unique_content_id": "a75d293b72e75c3618655c718811f59a039e176e1592a13e7fc6a723dd4003d6", "aliases": [ "CVE-2011-4577", "VC-OPENSSL-20120104-CVE-2011-4577" @@ -2404,7 +2404,7 @@ "weaknesses": [] }, { - "unique_content_id": "48361e01b38b28352705c300f7ee407b", + "unique_content_id": "c10f7480d6e0decea7f1d9b9884ea97b04025caa0c39bbc1338955d9ac46b48d", "aliases": [ "CVE-2011-4619", "VC-OPENSSL-20120104-CVE-2011-4619" @@ -2454,7 +2454,7 @@ "weaknesses": [] }, { - "unique_content_id": "9c9b9e8b9a5f1a355656382f71722432", + "unique_content_id": "b98fd56170c94c5fe71a1823c88ad50a789a513aa656f1cef217a11c83d645b7", "aliases": [ "CVE-2012-0027", "VC-OPENSSL-20120104-CVE-2012-0027" @@ -2492,7 +2492,7 @@ "weaknesses": [] }, { - "unique_content_id": "5af91d2aece046ccf3bc688d3dff09d5", + "unique_content_id": "525a3a5ff9914fd1388fdd071f143b794e6c642f2e45beb7d7d0bc49a78057a3", "aliases": [ "CVE-2012-0050", "VC-OPENSSL-20120104-CVE-2012-0050" @@ -2542,7 +2542,7 @@ "weaknesses": [] }, { - "unique_content_id": "6744bf2a3fd6eba6b18d59fb648d443b", + "unique_content_id": "c60189dfbd7ddb73a1d2a470d59fa6fcb7bedad776a8d717a0cbca7d3b416095", "aliases": [ "CVE-2012-0884", "VC-OPENSSL-20120312-CVE-2012-0884" @@ -2592,7 +2592,7 @@ "weaknesses": [] }, { - "unique_content_id": "c4e836c345751d38a3bff43c10e5a655", + "unique_content_id": "9d1e4715f7138b1a78fbf5251551b5d200ccd9ec52515b1b2939757df362997b", "aliases": [ "CVE-2012-2110", "VC-OPENSSL-20120419-CVE-2012-2110" @@ -2654,7 +2654,7 @@ "weaknesses": [] }, { - "unique_content_id": "7451866670acf0bd4a5f0c9d74bdfb18", + "unique_content_id": "ea921fcdf273dfa8a452dab36604e137574b2bd9234e81b08a4885a267939e64", "aliases": [ "CVE-2012-2131", "VC-OPENSSL-20120424-CVE-2012-2131" @@ -2692,7 +2692,7 @@ "weaknesses": [] }, { - "unique_content_id": "9a9efe32bb6fb903c9814b808b7f0206", + "unique_content_id": "0fd2dc9500a45c761c7a6ddadcaca6403b0dcaefd25ec7c8a9a2e4dba0211efe", "aliases": [ "CVE-2012-2333", "VC-OPENSSL-20120510-CVE-2012-2333" @@ -2754,7 +2754,7 @@ "weaknesses": [] }, { - "unique_content_id": "9c755e2b9ac36e9d77e7aa63ca6b91e5", + "unique_content_id": "37fd821acfb83d5e24554010a0319b02b5c7c1c552d4dba2918bb1047836ed2c", "aliases": [ "CVE-2012-2686", "VC-OPENSSL-20130205-CVE-2012-2686" @@ -2792,7 +2792,7 @@ "weaknesses": [] }, { - "unique_content_id": "084bb9ad1da9dafc260f041cfdaf868e", + "unique_content_id": "fcd18f8ddd7c4c680932ce9d21da72cd35ad71fe163ce5734f136cf4d1913002", "aliases": [ "CVE-2013-0166", "VC-OPENSSL-20130205-CVE-2013-0166" @@ -2854,7 +2854,7 @@ "weaknesses": [] }, { - "unique_content_id": "d1003ac6fdcb1a2a4d7bca936e239b42", + "unique_content_id": "274bafa8474e5913afcb27cc6ffde809fb6f6ba505f13df3234f8ee946e218ee", "aliases": [ "CVE-2013-0169", "VC-OPENSSL-20130204-CVE-2013-0169" @@ -2916,7 +2916,7 @@ "weaknesses": [] }, { - "unique_content_id": "0e3a3a12e8060b9395fe7b48a7276377", + "unique_content_id": "98a0e5556bb1bf1ef2d84156a75154a169ffed9e73af5bedc7e7d76c7e2dda3c", "aliases": [ "CVE-2013-4353", "VC-OPENSSL-20140106-CVE-2013-4353" @@ -2954,7 +2954,7 @@ "weaknesses": [] }, { - "unique_content_id": "bd7c16b098a35e13b1659e8c4934253d", + "unique_content_id": "7628f9cd3cb03285c9bfdbb9b7dc222f54c2e5ae9498bce55eb751f6dfce660d", "aliases": [ "CVE-2013-6449", "VC-OPENSSL-20131214-CVE-2013-6449" @@ -2992,7 +2992,7 @@ "weaknesses": [] }, { - "unique_content_id": "cd972700acea991417121019f009bac1", + "unique_content_id": "bc2e1522ce53f1d9658df6561b069413fd1a1e237b8d127da67a245315e1763f", "aliases": [ "CVE-2013-6450", "VC-OPENSSL-20131213-CVE-2013-6450" @@ -3042,7 +3042,7 @@ "weaknesses": [] }, { - "unique_content_id": "5d7762928fe0665ff593f8b93f0f7c2d", + "unique_content_id": "84057cab1e58fea9c99a32830b1f9459f608e4a1842a5c621e56d7570923cad5", "aliases": [ "CVE-2014-0076", "VC-OPENSSL-20140214-CVE-2014-0076" @@ -3116,7 +3116,7 @@ "weaknesses": [] }, { - "unique_content_id": "3b9f07c3f3fc9a3177b7cba6994626f2", + "unique_content_id": "757f04cde75470cb2bec8053f5fc874a82bae6b35945ec483df2e28eeb0cfc78", "aliases": [ "CVE-2014-0160", "VC-OPENSSL-20140407-CVE-2014-0160" @@ -3154,7 +3154,7 @@ "weaknesses": [] }, { - "unique_content_id": "da21b7edec2a01bd2495586e3e344a2c", + "unique_content_id": "e09b36d835f2209f6be06a5138c917c4210c32191bef1c9dc5a2faa1f8850e32", "aliases": [ "CVE-2014-0195", "VC-OPENSSL-20140605-CVE-2014-0195" @@ -3216,7 +3216,7 @@ "weaknesses": [] }, { - "unique_content_id": "ee4174c785ef4de123c8f5c8c4fbf9b2", + "unique_content_id": "156f765a217953dbd4da2ecb89c9f1998f67752ff9a12bbb575d396f7f8902a2", "aliases": [ "CVE-2014-0198", "VC-OPENSSL-20140421-CVE-2014-0198" @@ -3266,7 +3266,7 @@ "weaknesses": [] }, { - "unique_content_id": "bc12de8c2221021ccb7c3659b08cd3f5", + "unique_content_id": "1220fb598061d81d0d92e10093d9cf1e9de722b48ce1e08513ff839410106623", "aliases": [ "CVE-2014-0221", "VC-OPENSSL-20140605-CVE-2014-0221" @@ -3328,7 +3328,7 @@ "weaknesses": [] }, { - "unique_content_id": "a73f61be805e75d9468e11afb3158d45", + "unique_content_id": "8e650cb3afbf00bdf5312f07bb03de889b8709b53e66779f3ff6664d49f060cb", "aliases": [ "CVE-2014-0224", "VC-OPENSSL-20140605-CVE-2014-0224" @@ -3390,7 +3390,7 @@ "weaknesses": [] }, { - "unique_content_id": "4e8f724565b6429137ea959defa72090", + "unique_content_id": "3af893757d5d17f3214542da1f1511d519cfbcda8bc5691a205aadb469f130f3", "aliases": [ "CVE-2014-3470", "VC-OPENSSL-20140530-CVE-2014-3470" @@ -3452,7 +3452,7 @@ "weaknesses": [] }, { - "unique_content_id": "913ba8a6e88c02283428f89a6d24952b", + "unique_content_id": "73269c9023356431d683604381118286fb8aeddcd87d0151e488b7255fa89f2c", "aliases": [ "CVE-2014-3505", "VC-OPENSSL-20140806-CVE-2014-3505" @@ -3514,7 +3514,7 @@ "weaknesses": [] }, { - "unique_content_id": "27f89a41dfab2654a12a2d701b68ad9c", + "unique_content_id": "c96902798094fef86133d6163da3a0ef8e16161941fb0c9987451c3856334da2", "aliases": [ "CVE-2014-3506", "VC-OPENSSL-20140806-CVE-2014-3506" @@ -3576,7 +3576,7 @@ "weaknesses": [] }, { - "unique_content_id": "48ecff4dbadf3f99198fcfb4138048d8", + "unique_content_id": "2947a778fbea64d8f99d370af3a8d0169602ff5adff88d86ccf57a09c3fb556c", "aliases": [ "CVE-2014-3507", "VC-OPENSSL-20140806-CVE-2014-3507" @@ -3638,7 +3638,7 @@ "weaknesses": [] }, { - "unique_content_id": "e1e9269594db16c804a566e20f436cd2", + "unique_content_id": "e111b3c925ff4930bf9df47e3c68ad219bfc78029011f026f5db9dfcb3623cba", "aliases": [ "CVE-2014-3508", "VC-OPENSSL-20140806-CVE-2014-3508" @@ -3700,7 +3700,7 @@ "weaknesses": [] }, { - "unique_content_id": "db4e7a865c812a2f137555357a4ea54a", + "unique_content_id": "d999097e03330b37701e1a362f85711c43272b4fc8606896b221ff2c09a6f5cb", "aliases": [ "CVE-2014-3509", "VC-OPENSSL-20140806-CVE-2014-3509" @@ -3750,7 +3750,7 @@ "weaknesses": [] }, { - "unique_content_id": "073034548d58e9674b4080cd0c36f8cb", + "unique_content_id": "f3294bb2b90c0dac71eb21010721728aa9fbaf64cd7b1aff3bbe97099e5db16e", "aliases": [ "CVE-2014-3510", "VC-OPENSSL-20140806-CVE-2014-3510" @@ -3812,7 +3812,7 @@ "weaknesses": [] }, { - "unique_content_id": "a5d66943f85ab01f18b1181d5dccceb3", + "unique_content_id": "99661f6b61c2befbf0a840ac395f67ae171810c041a0400837c4e202fff1c6ef", "aliases": [ "CVE-2014-3511", "VC-OPENSSL-20140806-CVE-2014-3511" @@ -3850,7 +3850,7 @@ "weaknesses": [] }, { - "unique_content_id": "4c289b7168ed3ac1dc649dd94e296ee2", + "unique_content_id": "52bc0907465cbad85c1cf82eecf18885bbbe24de573bda4cdb9f8367f269a783", "aliases": [ "CVE-2014-3512", "VC-OPENSSL-20140806-CVE-2014-3512" @@ -3888,7 +3888,7 @@ "weaknesses": [] }, { - "unique_content_id": "3dbf91d5443471c2da6cf221eddf9898", + "unique_content_id": "c54c6fed589f1ca8024f1917126aae2983baa39871610960f380e5340ce50252", "aliases": [ "CVE-2014-3513", "VC-OPENSSL-20141015-CVE-2014-3513" @@ -3932,7 +3932,7 @@ "weaknesses": [] }, { - "unique_content_id": "63bf7bb20dcd1c7a3214c025ea53c1da", + "unique_content_id": "dcced98f8929707dec2045556ad27a5f407f0a6da5b0de6bb9cb0bf6c4eba16c", "aliases": [ "CVE-2014-3567", "VC-OPENSSL-20141015-CVE-2014-3567" @@ -4000,7 +4000,7 @@ "weaknesses": [] }, { - "unique_content_id": "ba13f3aea682e9e1c5fab3672da07088", + "unique_content_id": "0441ee6483168f14e3eb89495aa9144a146935020e60b4fafdd5de9dc52fbb05", "aliases": [ "CVE-2014-3568", "VC-OPENSSL-20141015-CVE-2014-3568" @@ -4068,7 +4068,7 @@ "weaknesses": [] }, { - "unique_content_id": "d615f85fc740c95b6b98e150b56d1ae3", + "unique_content_id": "753342c985991295f308ceffe0455636ac19375dc81d8e311fa5cf1d23473dd5", "aliases": [ "CVE-2014-3569", "VC-OPENSSL-20141021-CVE-2014-3569" @@ -4136,7 +4136,7 @@ "weaknesses": [] }, { - "unique_content_id": "f2f9de1344eacac2f17f6642b9655651", + "unique_content_id": "4247eafd0646ef018955aac7a30d2c023512a5b5f3a1803427473090a57766e5", "aliases": [ "CVE-2014-3570", "VC-OPENSSL-20150108-CVE-2014-3570" @@ -4204,7 +4204,7 @@ "weaknesses": [] }, { - "unique_content_id": "b80715d645997362b4be69a335b46cd5", + "unique_content_id": "bf213d08073d8ca6d471398fe7b23b4ee5111732d9f7976e6ed6740944653e2d", "aliases": [ "CVE-2014-3571", "VC-OPENSSL-20150105-CVE-2014-3571" @@ -4272,7 +4272,7 @@ "weaknesses": [] }, { - "unique_content_id": "16d87492de289b2cbfd7ba3ef7e106fc", + "unique_content_id": "38ffe37c3e05fc10c74d621c2a23b78e2b3238c88a8cec376705a04e80131162", "aliases": [ "CVE-2014-3572", "VC-OPENSSL-20150105-CVE-2014-3572" @@ -4340,7 +4340,7 @@ "weaknesses": [] }, { - "unique_content_id": "cca76ec7e4ca1da60dc37bfb7065a74d", + "unique_content_id": "173da4e79bb96a760519a18feb9667b22c727def897afd7cab56b2fc840ff141", "aliases": [ "CVE-2014-5139", "VC-OPENSSL-20140806-CVE-2014-5139" @@ -4378,7 +4378,7 @@ "weaknesses": [] }, { - "unique_content_id": "1d42619f9d572e6c6f831da1d4b5347c", + "unique_content_id": "c6cf5f33fdcc803e66a88537cf41831c6b88ce19c6c320843d5c59c63c148c83", "aliases": [ "CVE-2014-8176", "VC-OPENSSL-20150611-CVE-2014-8176" @@ -4446,7 +4446,7 @@ "weaknesses": [] }, { - "unique_content_id": "3f5c428c988da21fcf75625d7764c31e", + "unique_content_id": "a7261d54aab29faf70f12bbfbdd3f3e78cf2beebfeb915dbd7a29714a8955fed", "aliases": [ "CVE-2014-8275", "VC-OPENSSL-20150105-CVE-2014-8275" @@ -4514,7 +4514,7 @@ "weaknesses": [] }, { - "unique_content_id": "ecbce64df0cdd160db419c6db1cd9dc4", + "unique_content_id": "60f12268a60e39fd28c928e89af1f4038210aff7ad1f1fd748b8968ca65dfbdd", "aliases": [ "CVE-2015-0204", "VC-OPENSSL-20150106-CVE-2015-0204" @@ -4582,7 +4582,7 @@ "weaknesses": [] }, { - "unique_content_id": "14a72a501af8865388558895f94f4719", + "unique_content_id": "f5bac2344614e13386f702b70ba31694e5db10133151e5372c410e6fbff702ca", "aliases": [ "CVE-2015-0205", "VC-OPENSSL-20150108-CVE-2015-0205" @@ -4638,7 +4638,7 @@ "weaknesses": [] }, { - "unique_content_id": "ae55e9f4f7210581875a2de83cc058ec", + "unique_content_id": "f94fa5bd638308939b95d4d520dd8e57678c3f4709d63229a75fe3868c15446d", "aliases": [ "CVE-2015-0206", "VC-OPENSSL-20150108-CVE-2015-0206" @@ -4694,7 +4694,7 @@ "weaknesses": [] }, { - "unique_content_id": "66636a0c48ff0f39676cc43ff2fad975", + "unique_content_id": "45236a4d12fbe78a8b2d6a428b53a890bddcc1dedee31cb6d41b20af54e9bbb3", "aliases": [ "CVE-2015-0207", "VC-OPENSSL-20150319-CVE-2015-0207" @@ -4738,7 +4738,7 @@ "weaknesses": [] }, { - "unique_content_id": "5d5cb3ddc2d7d372e96fc9e7eb0e6172", + "unique_content_id": "774c0aaa394ae3ac59c32105e791fec7c71c602f342a042afd485ed819983fc6", "aliases": [ "CVE-2015-0208", "VC-OPENSSL-20150319-CVE-2015-0208" @@ -4782,7 +4782,7 @@ "weaknesses": [] }, { - "unique_content_id": "b91a75f67326a148c90e6ad45ba11839", + "unique_content_id": "f53c9570c9efdac69f3e8300699223b0497562e4c9fb9398fdf2f29ba05efb53", "aliases": [ "CVE-2015-0209", "VC-OPENSSL-20150319-CVE-2015-0209" @@ -4862,7 +4862,7 @@ "weaknesses": [] }, { - "unique_content_id": "92852e9f71e2d4220063d01c7e871d0f", + "unique_content_id": "ae10e26137b18ce0f074a9e88ad800799cfa131e4c9075c49e5cd736bd4ae7ef", "aliases": [ "CVE-2015-0285", "VC-OPENSSL-20150310-CVE-2015-0285" @@ -4906,7 +4906,7 @@ "weaknesses": [] }, { - "unique_content_id": "d0946aba30cf839fdbc468685b6bd683", + "unique_content_id": "fcff8a052ccf49c48dbe7f8d5a88a485de1a213799585647c2124b98ae5ccb52", "aliases": [ "CVE-2015-0286", "VC-OPENSSL-20150319-CVE-2015-0286" @@ -4986,7 +4986,7 @@ "weaknesses": [] }, { - "unique_content_id": "7f14c539a7b1d7b62b178e81a164ca57", + "unique_content_id": "3964ca62faf5fd2df7ebf079fc420e480621026d27cc10f9de31e2738a05936c", "aliases": [ "CVE-2015-0287", "VC-OPENSSL-20150319-CVE-2015-0287" @@ -5066,7 +5066,7 @@ "weaknesses": [] }, { - "unique_content_id": "fde824bdb24f286066693f15a53a9c11", + "unique_content_id": "751dcb76349de0d4bd85b5a27c52b97bf0f472fc1bd4b3a334c67afd762a0bf1", "aliases": [ "CVE-2015-0288", "VC-OPENSSL-20150302-CVE-2015-0288" @@ -5146,7 +5146,7 @@ "weaknesses": [] }, { - "unique_content_id": "a6996bfe711e793b22ceb3d47c975099", + "unique_content_id": "66bbf7b524be1160d1805c966d32418f2dd42b204296d6b885939dafb1ce52f5", "aliases": [ "CVE-2015-0289", "VC-OPENSSL-20150319-CVE-2015-0289" @@ -5226,7 +5226,7 @@ "weaknesses": [] }, { - "unique_content_id": "8c8ab1d205efac4fa9eeb6888a73d02b", + "unique_content_id": "23b9fdf2b9a73946210388721d4df0de3a020ac58b1e6669c3696b33a602ec98", "aliases": [ "CVE-2015-0290", "VC-OPENSSL-20150319-CVE-2015-0290" @@ -5270,7 +5270,7 @@ "weaknesses": [] }, { - "unique_content_id": "9c790e8e82381b71bd62ae5a2403aa43", + "unique_content_id": "6bdb68d814ff5f69711b93446eb25ddf133d6fbb35bab358bb97b3c423bb5811", "aliases": [ "CVE-2015-0291", "VC-OPENSSL-20150319-CVE-2015-0291" @@ -5314,7 +5314,7 @@ "weaknesses": [] }, { - "unique_content_id": "037837042ea4921162841a8a572dedb7", + "unique_content_id": "9928809c0f0a04e7ae6a89ccefdce3eb83e34e047f3470f7778b42182c3b0a3e", "aliases": [ "CVE-2015-0292", "VC-OPENSSL-20150319-CVE-2015-0292" @@ -5382,7 +5382,7 @@ "weaknesses": [] }, { - "unique_content_id": "6b326dde327d1535193796cfd337f305", + "unique_content_id": "d86068f891546989943214dbe20bceca4d29250299395048df1666ebef7ede03", "aliases": [ "CVE-2015-0293", "VC-OPENSSL-20150319-CVE-2015-0293" @@ -5462,7 +5462,7 @@ "weaknesses": [] }, { - "unique_content_id": "87c491358b43983d41be3e34f577787f", + "unique_content_id": "e2e31fceb4d827820c9a6c2c0144827a16d464ad33bd6139cf5e5c7389864c4c", "aliases": [ "CVE-2015-1787", "VC-OPENSSL-20150319-CVE-2015-1787" @@ -5506,7 +5506,7 @@ "weaknesses": [] }, { - "unique_content_id": "742341fd7596524c221d7ac8aa8025de", + "unique_content_id": "be2ba7ab66a7f53457702397f237a1894566b9d27e7d776969c121a98b0b48c3", "aliases": [ "CVE-2015-1788", "VC-OPENSSL-20150611-CVE-2015-1788" @@ -5586,7 +5586,7 @@ "weaknesses": [] }, { - "unique_content_id": "154f6f04f63ee6fba925180ed9e059c1", + "unique_content_id": "6693aa99959f40abe75da63ee98844b32d6c80ee49cd880d7211f82f39bff9bf", "aliases": [ "CVE-2015-1789", "VC-OPENSSL-20150611-CVE-2015-1789" @@ -5666,7 +5666,7 @@ "weaknesses": [] }, { - "unique_content_id": "2b988a60b7d38da17ad12c1d84455a70", + "unique_content_id": "0d6ca333ae5301c543aa3d5fee659526e6e7df19d6cd23503b080d44f393be29", "aliases": [ "CVE-2015-1790", "VC-OPENSSL-20150611-CVE-2015-1790" @@ -5746,7 +5746,7 @@ "weaknesses": [] }, { - "unique_content_id": "c9cffc6fc71a28da39de00bca06f0ce3", + "unique_content_id": "2ad006bcecf434794b6cafb90c3e60eda8f3465baf0d60adf2eb0547f6075427", "aliases": [ "CVE-2015-1791", "VC-OPENSSL-20150602-CVE-2015-1791" @@ -5826,7 +5826,7 @@ "weaknesses": [] }, { - "unique_content_id": "303206c390cb78e168c8425d3c6d2c91", + "unique_content_id": "fbbe723124334c66dbc53652a1a157264900e602a74ed731a2223c212d189f15", "aliases": [ "CVE-2015-1792", "VC-OPENSSL-20150611-CVE-2015-1792" @@ -5906,7 +5906,7 @@ "weaknesses": [] }, { - "unique_content_id": "78795bf94381c0a1772ed444fb576c91", + "unique_content_id": "77f031f81329fda29782191d97de2003d3b4fadda5cae0ddf20bcd4ba0958c6e", "aliases": [ "CVE-2015-1793", "VC-OPENSSL-20150709-CVE-2015-1793" @@ -5962,7 +5962,7 @@ "weaknesses": [] }, { - "unique_content_id": "34e7fc0f12a532fb0e3f133767651b82", + "unique_content_id": "6b2da461b684884127216718edee478e331e8b64439b5c98f36f9284ead68922", "aliases": [ "CVE-2015-1794", "VC-OPENSSL-20150811-CVE-2015-1794" @@ -6006,7 +6006,7 @@ "weaknesses": [] }, { - "unique_content_id": "c49999301ee8aa01a9ddd428979f0bc4", + "unique_content_id": "7c4c38c81c872cfcb7ae77bc45b1a78760ddda5aa1ebf6e061d41443c7a0870a", "aliases": [ "CVE-2015-3193", "VC-OPENSSL-20151203-CVE-2015-3193" @@ -6050,7 +6050,7 @@ "weaknesses": [] }, { - "unique_content_id": "3c8cc92c8be75ecbbf22aa5caa33bfa9", + "unique_content_id": "826e677a591d0d5e808454bc70f127fab4629f8ee5c2f16bc03c03740fc52661", "aliases": [ "CVE-2015-3194", "VC-OPENSSL-20151203-CVE-2015-3194" @@ -6106,7 +6106,7 @@ "weaknesses": [] }, { - "unique_content_id": "aa54b531fb7b90075a099e3d74098089", + "unique_content_id": "7eedb5c223cb23e47aa9ce69cf53869fc975e43e734731737790f5355c57c46f", "aliases": [ "CVE-2015-3195", "VC-OPENSSL-20151203-CVE-2015-3195" @@ -6186,7 +6186,7 @@ "weaknesses": [] }, { - "unique_content_id": "58623263c1b67d72553e0282afd5d03a", + "unique_content_id": "434c0477ef7b438f9b58ddb4cf5d072f24523f7231cf77fb3d492dc0ae358d03", "aliases": [ "CVE-2015-3196", "VC-OPENSSL-20151203-CVE-2015-3196" @@ -6254,7 +6254,7 @@ "weaknesses": [] }, { - "unique_content_id": "a1b7aec7c53c8018f9f0fc9118de71b4", + "unique_content_id": "582bb190d8800ea86907f44769c22a29e8f34079c0b2ce5e09052db99707480b", "aliases": [ "CVE-2015-3197", "VC-OPENSSL-20160128-CVE-2015-3197" @@ -6310,7 +6310,7 @@ "weaknesses": [] }, { - "unique_content_id": "bb0ba32b691bb5c4273824bad2f457a9", + "unique_content_id": "202a2aec8d017aab9c615cfdaf94d9a7137c18c8e41c2d999025759310199b81", "aliases": [ "CVE-2016-0701", "VC-OPENSSL-20160128-CVE-2016-0701" @@ -6354,7 +6354,7 @@ "weaknesses": [] }, { - "unique_content_id": "5115d9fca6da89c0f09b18c66063043e", + "unique_content_id": "c2f87f5ea625ae3e87ab3a3ec82e47995b16601835ef7be500414932928f3c69", "aliases": [ "CVE-2016-0702", "VC-OPENSSL-20160301-CVE-2016-0702" @@ -6410,7 +6410,7 @@ "weaknesses": [] }, { - "unique_content_id": "356419ba58928dd92651de3bd8726759", + "unique_content_id": "0dc285b8adde395581c94e422ef09ae80752d8b5b7e8177bee2bd05a9044f07c", "aliases": [ "CVE-2016-0703", "VC-OPENSSL-20160301-CVE-2016-0703" @@ -6490,7 +6490,7 @@ "weaknesses": [] }, { - "unique_content_id": "ca04670f15a036f2d20611d996b2e03d", + "unique_content_id": "e7bba3f95fb4b39e7b5f6a6297935e8cfcadbbabda552ee1b06e65e9282ab672", "aliases": [ "CVE-2016-0704", "VC-OPENSSL-20160301-CVE-2016-0704" @@ -6570,7 +6570,7 @@ "weaknesses": [] }, { - "unique_content_id": "1e32ac05e706f05b60d0c367814faf5b", + "unique_content_id": "8f5d81b6201854025eba1228dc3dbb1562bdefdd101afb131581d6c49722d872", "aliases": [ "CVE-2016-0705", "VC-OPENSSL-20160301-CVE-2016-0705" @@ -6626,7 +6626,7 @@ "weaknesses": [] }, { - "unique_content_id": "dcfad5e453c456b47b7dcb85f3bbf948", + "unique_content_id": "54d1b0ccbb4b663c9a43e3d2a6be131b6b5a0413fbb5f22cee822ed6936d94fe", "aliases": [ "CVE-2016-0797", "VC-OPENSSL-20160301-CVE-2016-0797" @@ -6682,7 +6682,7 @@ "weaknesses": [] }, { - "unique_content_id": "56718964514021ad2571d5e9bb4e1ba9", + "unique_content_id": "5cee408201ad50518a04c7597ae547e01069a3e3a71411bb5d03665d395c9c3f", "aliases": [ "CVE-2016-0798", "VC-OPENSSL-20160301-CVE-2016-0798" @@ -6738,7 +6738,7 @@ "weaknesses": [] }, { - "unique_content_id": "65ffc54cdd6e37ee324ff207835500d6", + "unique_content_id": "84fcbdaee2028d10d0a154f4562e0212135d6cce3bfd9eda6b933c8e302f6351", "aliases": [ "CVE-2016-0799", "VC-OPENSSL-20160301-CVE-2016-0799" @@ -6794,7 +6794,7 @@ "weaknesses": [] }, { - "unique_content_id": "4f983dc0849c0739895c99ff8042ef0f", + "unique_content_id": "3bb968a563522f059f423c3561dfed5d17a6d5c4d6bd3d1715133146dcc94142", "aliases": [ "CVE-2016-0800", "VC-OPENSSL-20160301-CVE-2016-0800" @@ -6850,7 +6850,7 @@ "weaknesses": [] }, { - "unique_content_id": "80621d002083a0f1c1d9267b2575c2af", + "unique_content_id": "3eadfec35b5b88ba68ecb0d97d2cba4203556ca8be6c28566ff28d045dbeeaba", "aliases": [ "CVE-2016-2105", "VC-OPENSSL-20160503-CVE-2016-2105" @@ -6906,7 +6906,7 @@ "weaknesses": [] }, { - "unique_content_id": "6646efbc2c3440a5aaedd5479df16fe0", + "unique_content_id": "fb73586b842fb010ced45dc708d8346e3aded542fe78c11f03f83bf754997edd", "aliases": [ "CVE-2016-2106", "VC-OPENSSL-20160503-CVE-2016-2106" @@ -6962,7 +6962,7 @@ "weaknesses": [] }, { - "unique_content_id": "eaa2fce419eaf5b4ea668e9106c1fd43", + "unique_content_id": "45c33cd5992b2f757ade809ec1b55e35aed7fa0d57bb8b46c8f7ab46d4cf5d81", "aliases": [ "CVE-2016-2107", "VC-OPENSSL-20160503-CVE-2016-2107" @@ -7024,7 +7024,7 @@ "weaknesses": [] }, { - "unique_content_id": "eadc3ef5343caffdb16fc7a845983d99", + "unique_content_id": "2455be4d3319416de5807835a5e13cb7a40a862fcb21503efe33c9b0836132bc", "aliases": [ "CVE-2016-2108", "VC-OPENSSL-20160503-CVE-2016-2108" @@ -7080,7 +7080,7 @@ "weaknesses": [] }, { - "unique_content_id": "0d33c0311add27a6e1a49d7a3d965c38", + "unique_content_id": "86acb94d7c04bbcbd8c25c43ae292bd04c94a03e34fdc267053638c248e0b7f3", "aliases": [ "CVE-2016-2109", "VC-OPENSSL-20160503-CVE-2016-2109" @@ -7136,7 +7136,7 @@ "weaknesses": [] }, { - "unique_content_id": "9448f7ccc33194fa36bbdb2f40e749b2", + "unique_content_id": "79d98ea5b970167fc32b2dd513af82c8f21fc88f5863ff80c40bf92a86567dc8", "aliases": [ "CVE-2016-2176", "VC-OPENSSL-20160503-CVE-2016-2176" @@ -7192,7 +7192,7 @@ "weaknesses": [] }, { - "unique_content_id": "4c10365eacf49048d2ca1f3d490de4c2", + "unique_content_id": "7cf78e4965685dd994d47b5b4648c8671c19b75994e4b56ad143255738c4b716", "aliases": [ "CVE-2016-2177", "VC-OPENSSL-20160601-CVE-2016-2177" @@ -7248,7 +7248,7 @@ "weaknesses": [] }, { - "unique_content_id": "69c98b0d04f2bf1a2d1f044b54108625", + "unique_content_id": "41d1b686cc25b51e538b1294c03f9bd49194604c0a5b1878a85ef935c82f0573", "aliases": [ "CVE-2016-2178", "VC-OPENSSL-20160607-CVE-2016-2178" @@ -7304,7 +7304,7 @@ "weaknesses": [] }, { - "unique_content_id": "c541cb508cce45e8ffa33b03c44a7706", + "unique_content_id": "24d135d43dac5961bd8e824a6be06bf737548a27b6908a9bdb06c4cf6be7da66", "aliases": [ "CVE-2016-2179", "VC-OPENSSL-20160822-CVE-2016-2179" @@ -7372,7 +7372,7 @@ "weaknesses": [] }, { - "unique_content_id": "c3ef560f8d241b1b75cdef3199faa45c", + "unique_content_id": "39ea5c947d194650d344e5adcd4353a31075fe76556175678092991fef56935a", "aliases": [ "CVE-2016-2180", "VC-OPENSSL-20160722-CVE-2016-2180" @@ -7428,7 +7428,7 @@ "weaknesses": [] }, { - "unique_content_id": "bad085048774b51abab2b4e37c3868a0", + "unique_content_id": "2dcc23a57bd50008fd1ff1dd5ab1e15ac70d58a3b621d70b039cee4339be5439", "aliases": [ "CVE-2016-2181", "VC-OPENSSL-20160819-CVE-2016-2181" @@ -7496,7 +7496,7 @@ "weaknesses": [] }, { - "unique_content_id": "e29c5c80d781403086304ecb4fce7a59", + "unique_content_id": "fcd1d51451689926072528e12c9206f1c13c61ed97e42c132b0667ea48870171", "aliases": [ "CVE-2016-2182", "VC-OPENSSL-20160816-CVE-2016-2182" @@ -7552,12 +7552,12 @@ "weaknesses": [] }, { - "unique_content_id": "659c848c83841e30d1052e8d49e18051", + "unique_content_id": "d87e634ab174d154043776ba4b3c6659d5f37175726b216710c42ec5144d3d95", "aliases": [ "CVE-2016-2183", "VC-OPENSSL-20160824-CVE-2016-2183" ], - "summary": "Because DES (and triple-DES) has only a 64-bit block size, birthday attacks are a real concern. For example, with the ability to run Javascript in a browser, it is possible to send enough traffic to cause a collision, and then use that information to recover something like a session Cookie. Triple-DES, which shows up as “DES-CBC3” in an OpenSSL cipher string, is still used on the Web, and major browsers are not yet willing to completely disable it. If you run a server, you should disable triple-DES. This is generally a configuration issue. If you run an old server that doesn’t support any better ciphers than DES or RC4, you should upgrade. For 1.0.2 and 1.0.1, we removed the triple-DES ciphers from the “HIGH” keyword and put them into “MEDIUM.” Note that we did not remove them from the “DEFAULT” keyword. For the 1.1.0 release, we treat triple-DES just like we are treating RC4. It is not compiled by default; you have to use “enable-weak-ssl-ciphers” as a config option. Even when those ciphers are compiled, triple-DES is only in the “MEDIUM” keyword. In addition we also removed it from the “DEFAULT” keyword.", + "summary": "Because DES (and triple-DES) has only a 64-bit block size, birthday attacks are a real concern. For example, with the ability to run Javascript in a browser, it is possible to send enough traffic to cause a collision, and then use that information to recover something like a session Cookie. Triple-DES, which shows up as \u201cDES-CBC3\u201d in an OpenSSL cipher string, is still used on the Web, and major browsers are not yet willing to completely disable it. If you run a server, you should disable triple-DES. This is generally a configuration issue. If you run an old server that doesn\u2019t support any better ciphers than DES or RC4, you should upgrade. For 1.0.2 and 1.0.1, we removed the triple-DES ciphers from the \u201cHIGH\u201d keyword and put them into \u201cMEDIUM.\u201d Note that we did not remove them from the \u201cDEFAULT\u201d keyword. For the 1.1.0 release, we treat triple-DES just like we are treating RC4. It is not compiled by default; you have to use \u201cenable-weak-ssl-ciphers\u201d as a config option. Even when those ciphers are compiled, triple-DES is only in the \u201cMEDIUM\u201d keyword. In addition we also removed it from the \u201cDEFAULT\u201d keyword.", "affected_packages": [ { "package": { @@ -7596,7 +7596,7 @@ "weaknesses": [] }, { - "unique_content_id": "cfaace2e186847527636a2195766fc52", + "unique_content_id": "e43493ec8a73bb371bf163314718c77edbe7d72190cd2e88e09e3a65d4500cdb", "aliases": [ "CVE-2016-6302", "VC-OPENSSL-20160823-CVE-2016-6302" @@ -7664,7 +7664,7 @@ "weaknesses": [] }, { - "unique_content_id": "2af63a761bf4ddbbaeb92afa382151cf", + "unique_content_id": "c6b031581915c5cc5b42df4000da62b01be62afbba15c264e9c189aac336f855", "aliases": [ "CVE-2016-6303", "VC-OPENSSL-20160824-CVE-2016-6303" @@ -7732,7 +7732,7 @@ "weaknesses": [] }, { - "unique_content_id": "3b3ff4143b6859104d216a310d58db58", + "unique_content_id": "aef1aa2ae8685c93c4869930f90ef8cd3bcc3fbadf949e4238d182e8fd2684a0", "aliases": [ "CVE-2016-6304", "VC-OPENSSL-20160922-CVE-2016-6304" @@ -7818,7 +7818,7 @@ "weaknesses": [] }, { - "unique_content_id": "ec3000e978936c5dc59eeb71d14f61d0", + "unique_content_id": "76c39ca965fe72efb7732d5f6a6388c6f20696a7d03dcbe47c0c548a60e41ca8", "aliases": [ "CVE-2016-6305", "VC-OPENSSL-20160922-CVE-2016-6305" @@ -7868,7 +7868,7 @@ "weaknesses": [] }, { - "unique_content_id": "17585a9b090ed55460ac0cad6c3b5f6e", + "unique_content_id": "335b68ab52a7311993d1ba47eab3fb676dfaee4b10a497bd992f7cbcb13edd67", "aliases": [ "CVE-2016-6306", "VC-OPENSSL-20160921-CVE-2016-6306" @@ -7936,7 +7936,7 @@ "weaknesses": [] }, { - "unique_content_id": "f598dbb4cacf63ed93e588c1db8ff5b8", + "unique_content_id": "4eaf09765e65f727f0d87c8ee1340fb5157d0195ebc4d87f7d243b7de0540731", "aliases": [ "CVE-2016-6307", "VC-OPENSSL-20160921-CVE-2016-6307" @@ -7986,7 +7986,7 @@ "weaknesses": [] }, { - "unique_content_id": "8ee9b8d2efa51108b44de0e5f0671902", + "unique_content_id": "afa659ff3079acf90f6cbb1cc37cf907f479517ed3f41f22ff048c397dccc1e5", "aliases": [ "CVE-2016-6308", "VC-OPENSSL-20160921-CVE-2016-6308" @@ -8036,7 +8036,7 @@ "weaknesses": [] }, { - "unique_content_id": "ec731ec05e8399f02edc888b078cfcf1", + "unique_content_id": "2c4faeb5ab598f4bcd7363261f979466b7ea8c02e2bfa6f68c53ef466d115f77", "aliases": [ "CVE-2016-6309", "VC-OPENSSL-20160926-CVE-2016-6309" @@ -8086,7 +8086,7 @@ "weaknesses": [] }, { - "unique_content_id": "76efc0216d0391eac89b5097852a6f7e", + "unique_content_id": "3dea46bb518540bd2524894a1c99e33282ca9610f926cb1a2a6ab62fee7e9a8a", "aliases": [ "CVE-2016-7052", "VC-OPENSSL-20160926-CVE-2016-7052" @@ -8136,7 +8136,7 @@ "weaknesses": [] }, { - "unique_content_id": "ad064a076d4f4136c4ff5cc9a1c32cb4", + "unique_content_id": "ced21cf334c43c1968b1e630d0e5e466fc113b5ec477a716a9d2328d84a41e08", "aliases": [ "CVE-2016-7053", "VC-OPENSSL-20161110-CVE-2016-7053" @@ -8186,7 +8186,7 @@ "weaknesses": [] }, { - "unique_content_id": "2260cd2fea019c35edd74053d43afbfa", + "unique_content_id": "de494f6e53a555a8a467bd0841b9b26accb6e568bdb3f941b8100b02f3325224", "aliases": [ "CVE-2016-7054", "VC-OPENSSL-20161110-CVE-2016-7054" @@ -8236,7 +8236,7 @@ "weaknesses": [] }, { - "unique_content_id": "c46f2f9d6517a007f907f8a2e4c84820", + "unique_content_id": "f65656f9a1a4be03cdd849a3aa82992f4af18eae8d67af063c16232e3f59f754", "aliases": [ "CVE-2016-7055", "VC-OPENSSL-20161110-CVE-2016-7055" @@ -8304,7 +8304,7 @@ "weaknesses": [] }, { - "unique_content_id": "6f703a0f132094abbd39fd883ed6e241", + "unique_content_id": "a98bf1ba98e36233e2f7857bfdf284a1cedd8cfe0d07d9f913d8b075bee096f1", "aliases": [ "CVE-2017-3730", "VC-OPENSSL-20170126-CVE-2017-3730" @@ -8354,7 +8354,7 @@ "weaknesses": [] }, { - "unique_content_id": "c271a33647e7cdefdce8ed38c15e1bb7", + "unique_content_id": "16f408917ccdd649067c701789a6f062b284973f8da7ae2ce42116010005ffc2", "aliases": [ "CVE-2017-3731", "VC-OPENSSL-20170126-CVE-2017-3731" @@ -8422,7 +8422,7 @@ "weaknesses": [] }, { - "unique_content_id": "b14fc26f1382b65b58128617820053c3", + "unique_content_id": "d525e54aa33322501d8c100f7be2df5900113d09c8409a50ce37b77478001f13", "aliases": [ "CVE-2017-3732", "VC-OPENSSL-20170126-CVE-2017-3732" @@ -8490,7 +8490,7 @@ "weaknesses": [] }, { - "unique_content_id": "e5c015c5ea09f74ca8830fc675109209", + "unique_content_id": "70561a8c52747e3882749934d40dc3c52e1a6fccd239925f1ff317779b30257e", "aliases": [ "CVE-2017-3733", "VC-OPENSSL-20170216-CVE-2017-3733" @@ -8540,7 +8540,7 @@ "weaknesses": [] }, { - "unique_content_id": "3e3d332a535202d4a355d9c6f46f8511", + "unique_content_id": "6e51a8310007cae6d2dd2da43402f0ce33e9bc503675618ae3ed5e22435384c0", "aliases": [ "CVE-2017-3735", "VC-OPENSSL-20170828-CVE-2017-3735" @@ -8608,7 +8608,7 @@ "weaknesses": [] }, { - "unique_content_id": "135805c0fbb3f388567abe5a782e3678", + "unique_content_id": "723a84486e608c93ef84d012c9a3bdbec50fc03f94b6af7f2e3c6db35c4870db", "aliases": [ "CVE-2017-3736", "VC-OPENSSL-20171102-CVE-2017-3736" @@ -8676,7 +8676,7 @@ "weaknesses": [] }, { - "unique_content_id": "37c832cd6a7a445e21de6bcaae2e6aad", + "unique_content_id": "99b0a08fcb1d6012836e07da86ee39aec6568240922f58640fcb7c9b8f561492", "aliases": [ "CVE-2017-3737", "VC-OPENSSL-20171207-CVE-2017-3737" @@ -8726,7 +8726,7 @@ "weaknesses": [] }, { - "unique_content_id": "fe526b02e32f024f79ab16ad59c5cd59", + "unique_content_id": "9855d6d4847a8dac0b2ec4a4a8556a921f9a32c035e43bb98f4201ab12df0d4c", "aliases": [ "CVE-2017-3738", "VC-OPENSSL-20171207-CVE-2017-3738" @@ -8794,7 +8794,7 @@ "weaknesses": [] }, { - "unique_content_id": "891a444705c4d9e9d6d9514e6152b93d", + "unique_content_id": "52b60416f56fbd4cf154ad29a878e1a745b607dcee1653acb5985fa68607508b", "aliases": [ "CVE-2018-0732", "VC-OPENSSL-20180612-CVE-2018-0732" @@ -8862,7 +8862,7 @@ "weaknesses": [] }, { - "unique_content_id": "0add28e4bf2017a49afa086624548363", + "unique_content_id": "f62f0a22bd4695353076d3dc1b2e7670ed0bd9607d774a7cc31c86086cacb015", "aliases": [ "CVE-2018-0733", "VC-OPENSSL-20180327-CVE-2018-0733" @@ -8912,7 +8912,7 @@ "weaknesses": [] }, { - "unique_content_id": "c6585613e6f674c7ea39eefc5057e85d", + "unique_content_id": "d2c4e2cf5d78c3a480feea4e1721e0acbb60155c70d8b6a30a282b546f09afcf", "aliases": [ "CVE-2018-0734", "VC-OPENSSL-20181030-CVE-2018-0734" @@ -8998,7 +8998,7 @@ "weaknesses": [] }, { - "unique_content_id": "3193861b88f934ec25c275d622932dc2", + "unique_content_id": "b208d67bce0a078a253edbd6b6f817f83f3c7e0f384dae57fd11f43aa6645a78", "aliases": [ "CVE-2018-0735", "VC-OPENSSL-20181029-CVE-2018-0735" @@ -9066,7 +9066,7 @@ "weaknesses": [] }, { - "unique_content_id": "5ce5c73a388c1721baa86dd346bc5cca", + "unique_content_id": "fdffac35f130aaa543d59035f255119efd80363f868a8aac69b3b5036d4e9052", "aliases": [ "CVE-2018-0737", "VC-OPENSSL-20180416-CVE-2018-0737" @@ -9134,7 +9134,7 @@ "weaknesses": [] }, { - "unique_content_id": "fd56a1d08c404d18a2425bde4a2cc222", + "unique_content_id": "40730ed1276c0a934bcd453d832b1b05ea61d1aeddf8d2a88ed31efc6625e1c9", "aliases": [ "CVE-2018-0739", "VC-OPENSSL-20180327-CVE-2018-0739" @@ -9202,7 +9202,7 @@ "weaknesses": [] }, { - "unique_content_id": "a86eaada3e2c85065180d5d7eb1d3a31", + "unique_content_id": "dd129503db8d87d87f40d36a21b3e7ad7a51515303ca1ddff0a7722bf6b6b809", "aliases": [ "CVE-2018-5407", "VC-OPENSSL-20181102-CVE-2018-5407" @@ -9270,7 +9270,7 @@ "weaknesses": [] }, { - "unique_content_id": "939439dfee2c7c3ef79f3f7fa3e5f90b", + "unique_content_id": "a182ef84f10d8869b39936326cf01831942571fde976d293e8cbb7f9182371de", "aliases": [ "CVE-2019-1543", "VC-OPENSSL-20190306-CVE-2019-1543" @@ -9338,7 +9338,7 @@ "weaknesses": [] }, { - "unique_content_id": "c251f1e3c85429b0daa07cb6ea7d1e67", + "unique_content_id": "1608445a20cee1c7f70bf4d4567f869870a5bda078ae3054db819f7197868284", "aliases": [ "CVE-2019-1547", "VC-OPENSSL-20190910-CVE-2019-1547" @@ -9424,7 +9424,7 @@ "weaknesses": [] }, { - "unique_content_id": "05226413367dc1d93fc68106f47a330c", + "unique_content_id": "efa3c01bca1f8857f755aac0413f6b448077604f02470f2887ccf96682279dba", "aliases": [ "CVE-2019-1549", "VC-OPENSSL-20190910-CVE-2019-1549" @@ -9474,7 +9474,7 @@ "weaknesses": [] }, { - "unique_content_id": "70a045decd4328c7ff88c8a1d969e8c4", + "unique_content_id": "1386c9f10ab439a308d3b6c4bfa71d7f17de4bb9b041d065029c422a0d559caf", "aliases": [ "CVE-2019-1551", "VC-OPENSSL-20191206-CVE-2019-1551" @@ -9542,7 +9542,7 @@ "weaknesses": [] }, { - "unique_content_id": "4213f363ba037058475897c693173044", + "unique_content_id": "edd85067182fe9c90b55fc43bfb734f907e3209f959fe776fbca8d96c71accb6", "aliases": [ "CVE-2019-1552", "VC-OPENSSL-20190730-CVE-2019-1552" @@ -9634,7 +9634,7 @@ "weaknesses": [] }, { - "unique_content_id": "bd17aac4dde8bee4fba0c673c8287082", + "unique_content_id": "d2ba9b6bba240765f8121e99e081e43b48475b118a7c16aed6cc5556d5b6be89", "aliases": [ "CVE-2019-1559", "VC-OPENSSL-20190226-CVE-2019-1559" @@ -9684,7 +9684,7 @@ "weaknesses": [] }, { - "unique_content_id": "45ac1a1229fc8b49656c3e6fd99221cd", + "unique_content_id": "07c966215a883c2032c38139472d5ff371ad61b8affa5e951c49f96438ab07cc", "aliases": [ "CVE-2019-1563", "VC-OPENSSL-20190910-CVE-2019-1563" @@ -9770,7 +9770,7 @@ "weaknesses": [] }, { - "unique_content_id": "afb9d94adcf86f7b0de8aa4f7ff7c6b4", + "unique_content_id": "5657b64f70e97033e61583196c24a7a9e4b643cd241052028cb0a2b764adbe7e", "aliases": [ "CVE-2020-1967", "VC-OPENSSL-20200421-CVE-2020-1967" @@ -9820,7 +9820,7 @@ "weaknesses": [] }, { - "unique_content_id": "56010436497977628dcea6e96888d450", + "unique_content_id": "8291dd784cec9b49787a85058d536e4d4c9a136bdc21bcfb7e975a2c41218195", "aliases": [ "CVE-2020-1968", "VC-OPENSSL-20200909-CVE-2020-1968" @@ -9864,7 +9864,7 @@ "weaknesses": [] }, { - "unique_content_id": "87b17158b6ad69a4d8043755547f45ad", + "unique_content_id": "a5da7dab57b99ce22236cb42d5329a816d2abf2481a6857c5b4ce16acb8b940f", "aliases": [ "CVE-2020-1971", "VC-OPENSSL-20201208-CVE-2020-1971" @@ -9932,7 +9932,7 @@ "weaknesses": [] }, { - "unique_content_id": "ebbc5ad78a20128d4894106ef368c8f1", + "unique_content_id": "d94a89c4d33239b6b8b49b765224bdb2cff846ce52cf8d1bfd59e6401fd406d7", "aliases": [ "CVE-2021-23839", "VC-OPENSSL-20210216-CVE-2021-23839" @@ -9982,7 +9982,7 @@ "weaknesses": [] }, { - "unique_content_id": "62778ba1713cdf9851ef92f4d2f46fa7", + "unique_content_id": "37127413ec3efbf57f25327ebbe739c46f14fb0992651a32236c3fc60a12e4a4", "aliases": [ "CVE-2021-23840", "VC-OPENSSL-20210216-CVE-2021-23840" @@ -10050,7 +10050,7 @@ "weaknesses": [] }, { - "unique_content_id": "510307f6edf17f0620c4a096bb61df0c", + "unique_content_id": "4f747fd9c1e01f00c514c9af30852970db6eb6c9b83462affe737ebd3b893a0d", "aliases": [ "CVE-2021-23841", "VC-OPENSSL-20210216-CVE-2021-23841" @@ -10118,7 +10118,7 @@ "weaknesses": [] }, { - "unique_content_id": "b9610772604a38aae37934639b563f2d", + "unique_content_id": "7e96ee7be9f83a18c1773a7c46610f55024cfbe0be196a47e2b3ea741ae398e2", "aliases": [ "CVE-2021-3449", "VC-OPENSSL-20210325-CVE-2021-3449" @@ -10168,7 +10168,7 @@ "weaknesses": [] }, { - "unique_content_id": "8017a45e047c6a8a07ddcef5b019a5a9", + "unique_content_id": "943c0441bb44156232628b06433f25a6e1d5c4bef1db447845be8bb595d55320", "aliases": [ "CVE-2021-3450", "VC-OPENSSL-20210325-CVE-2021-3450" @@ -10218,7 +10218,7 @@ "weaknesses": [] }, { - "unique_content_id": "7c59ebbda08fad46ad3628c58c6e1f4f", + "unique_content_id": "b2e254e7e251e702fd77c5eaf069909ab6e7ddf360fc3ff323ee75dc20566220", "aliases": [ "CVE-2021-3711", "VC-OPENSSL-20210824-CVE-2021-3711" @@ -10268,7 +10268,7 @@ "weaknesses": [] }, { - "unique_content_id": "97ca2e1d473bc9e2e802285c56f85541", + "unique_content_id": "ac1bc5a0f0673f7e6556dd40ca607825904051f6f1686650f07ba5727dcab9f9", "aliases": [ "CVE-2021-3712", "VC-OPENSSL-20210824-CVE-2021-3712" @@ -10336,7 +10336,7 @@ "weaknesses": [] }, { - "unique_content_id": "1c5bbe67613cfce3a310b822466ad17e", + "unique_content_id": "2480e0bc015e4765e66637e4b96ea45d8d93d41719e171100ca32011f81d6e80", "aliases": [ "CVE-2021-4044", "VC-OPENSSL-20211214-CVE-2021-4044" @@ -10386,7 +10386,7 @@ "weaknesses": [] }, { - "unique_content_id": "0039548ab133f97e2138bb298ccc7cae", + "unique_content_id": "80c2054b079c7d69842fe524fdc6abcf1246a37323a9f29ce4f80f4300e8282f", "aliases": [ "CVE-2021-4160", "VC-OPENSSL-20220128-CVE-2021-4160" @@ -10472,7 +10472,7 @@ "weaknesses": [] }, { - "unique_content_id": "caa5eb3135dc715346ce3a32211b024e", + "unique_content_id": "dc0cbb60dc9280799a925c566b952d1c952cf5c3b30d9e3d5726c30a815e49d2", "aliases": [ "CVE-2022-0778", "VC-OPENSSL-20220315-CVE-2022-0778" diff --git a/vulnerabilities/utils.py b/vulnerabilities/utils.py index b8aeb959d..8c777610d 100644 --- a/vulnerabilities/utils.py +++ b/vulnerabilities/utils.py @@ -10,6 +10,7 @@ import bisect import csv import dataclasses +import hashlib import json import logging import os @@ -572,3 +573,58 @@ def get_purl_version_class(purl): if check_version_class: purl_version_class = check_version_class.version_class return purl_version_class + + +def normalize_text(text): + """Normalize text by removing whitespace and converting to lowercase.""" + return "".join(text.split()).lower() if text else "" + + +def normalize_list(lst): + """Sort a list to ensure consistent ordering.""" + return sorted(lst) if lst else [] + + +def compute_content_id(advisory_data, include_metadata=False): + """ + Compute a unique content_id for an advisory by normalizing its data and hashing it. + + :param advisory_data: An AdvisoryData object + :param include_metadata: Boolean indicating whether to include `created_by` and `url` + :return: SHA-256 hash digest as content_id + """ + + # Normalize fields + from vulnerabilities.importer import AdvisoryData + from vulnerabilities.models import Advisory + + if isinstance(advisory_data, Advisory): + normalized_data = { + "aliases": normalize_list(advisory_data.aliases), + "summary": normalize_text(advisory_data.summary), + "affected_packages": [ + pkg for pkg in normalize_list(advisory_data.affected_packages) if pkg + ], + "references": [ref for ref in normalize_list(advisory_data.references) if ref], + "weaknesses": normalize_list(advisory_data.weaknesses), + } + normalized_data["url"] = advisory_data.url + + elif isinstance(advisory_data, AdvisoryData): + normalized_data = { + "aliases": normalize_list(advisory_data.aliases), + "summary": normalize_text(advisory_data.summary), + "affected_packages": [ + pkg.to_dict() for pkg in normalize_list(advisory_data.affected_packages) if pkg + ], + "references": [ + ref.to_dict() for ref in normalize_list(advisory_data.references) if ref + ], + "weaknesses": normalize_list(advisory_data.weaknesses), + } + normalized_data["url"] = advisory_data.url + + normalized_json = json.dumps(normalized_data, separators=(",", ":"), sort_keys=True) + content_id = hashlib.sha256(normalized_json.encode("utf-8")).hexdigest() + + return content_id From a6e81c1f08c3c41e9953518201762fb47a7d5787 Mon Sep 17 00:00:00 2001 From: Tushar Goel <34160672+TG1999@users.noreply.github.com> Date: Thu, 20 Mar 2025 15:12:14 +0530 Subject: [PATCH 064/545] Add captcha for user signup (#1822) Signed-off-by: Tushar Goel --- requirements.txt | 1 + setup.cfg | 2 ++ vulnerabilities/forms.py | 6 ++++++ .../templates/api_user_creation_form.html | 12 +++++++++--- vulnerablecode/settings.py | 7 +++++++ 5 files changed, 25 insertions(+), 3 deletions(-) diff --git a/requirements.txt b/requirements.txt index 347259791..b80ec6fb9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -31,6 +31,7 @@ Django==4.2.17 django-crispy-forms==2.3 django-environ==0.11.2 django-filter==24.3 +django-recaptcha==4.0.0 django-widget-tweaks==1.5.0 djangorestframework==3.15.2 doc8==0.11.1 diff --git a/setup.cfg b/setup.cfg index a3db96abd..8c6dc03c0 100644 --- a/setup.cfg +++ b/setup.cfg @@ -99,6 +99,8 @@ install_requires = python-dotenv texttable + django-recaptcha>=4.0.0 + [options.extras_require] dev = diff --git a/vulnerabilities/forms.py b/vulnerabilities/forms.py index a00885637..50511571d 100644 --- a/vulnerabilities/forms.py +++ b/vulnerabilities/forms.py @@ -9,6 +9,8 @@ from django import forms from django.core.validators import validate_email +from django_recaptcha.fields import ReCaptchaField +from django_recaptcha.widgets import ReCaptchaV2Checkbox from vulnerabilities.models import ApiUser @@ -38,6 +40,10 @@ class ApiUserCreationForm(forms.ModelForm): Support a simplified creation for API-only users directly from the UI. """ + captcha = ReCaptchaField( + error_messages={"required": ("Captcha is required")}, widget=ReCaptchaV2Checkbox + ) + class Meta: model = ApiUser fields = ( diff --git a/vulnerabilities/templates/api_user_creation_form.html b/vulnerabilities/templates/api_user_creation_form.html index c7b2291f0..4c596f094 100644 --- a/vulnerabilities/templates/api_user_creation_form.html +++ b/vulnerabilities/templates/api_user_creation_form.html @@ -14,11 +14,17 @@ {% endfor %}
    - {% for field_name, errors in form.errors.items %} + {% if form.errors.captcha %}
    - {{ errors }} + {{ form.errors.captcha }}
    - {% endfor %} + {% else %} +
    + {% for error in form.errors.values %} + {{ error }} + {% endfor %} +
    + {% endif %}

    VulnerableCode API key request diff --git a/vulnerablecode/settings.py b/vulnerablecode/settings.py index 0e545e0f2..a0e1bf1c0 100644 --- a/vulnerablecode/settings.py +++ b/vulnerablecode/settings.py @@ -83,8 +83,15 @@ "drf_spectacular", # required for Django collectstatic discovery "drf_spectacular_sidecar", + "django_recaptcha", ) +RECAPTCHA_PUBLIC_KEY = env.str("RECAPTCHA_PUBLIC_KEY", "") +RECAPTCHA_PRIVATE_KEY = env.str("RECAPTCHA_PRIVATE_KEY", "") +SILENCED_SYSTEM_CHECKS = ["captcha.recaptcha_test_key_error"] +RECAPTCHA_DOMAIN = env.str("RECAPTCHA_DOMAIN", "www.recaptcha.net") + + MIDDLEWARE = ( "django.middleware.security.SecurityMiddleware", "django.contrib.sessions.middleware.SessionMiddleware", From 36a930adc6ff3b9048105e260477d0899f613c01 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 20 Mar 2025 15:28:30 +0530 Subject: [PATCH 065/545] Bump jinja2 from 3.1.5 to 3.1.6 (#1824) Bumps [jinja2](https://github.com/pallets/jinja) from 3.1.5 to 3.1.6. - [Release notes](https://github.com/pallets/jinja/releases) - [Changelog](https://github.com/pallets/jinja/blob/main/CHANGES.rst) - [Commits](https://github.com/pallets/jinja/compare/3.1.5...3.1.6) --- updated-dependencies: - dependency-name: jinja2 dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index b80ec6fb9..fad9bf63c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -54,7 +54,7 @@ ipython==8.10.0 isort==5.10.1 itypes==1.2.0 jedi==0.18.1 -Jinja2==3.1.5 +Jinja2==3.1.6 jsonschema==3.2.0 license-expression==30.3.1 lxml==4.9.1 From 52069f47702a5607feff26d33ea40c89e4c27e93 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 20 Mar 2025 15:46:12 +0530 Subject: [PATCH 066/545] Bump cryptography from 43.0.1 to 44.0.1 (#1823) Bumps [cryptography](https://github.com/pyca/cryptography) from 43.0.1 to 44.0.1. - [Changelog](https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pyca/cryptography/compare/43.0.1...44.0.1) --- updated-dependencies: - dependency-name: cryptography dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Tushar Goel <34160672+TG1999@users.noreply.github.com> --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index fad9bf63c..b0d7a4547 100644 --- a/requirements.txt +++ b/requirements.txt @@ -20,7 +20,7 @@ charset-normalizer==2.0.12 click==8.1.2 coreapi==2.3.3 coreschema==0.0.4 -cryptography==43.0.1 +cryptography==44.0.1 crispy-bootstrap4==2024.1 cwe2==3.0.0 dateparser==1.1.1 From 4f6ffe882ee9351f7fd4692e62b4e6c05398f641 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Thu, 13 Feb 2025 17:13:28 +0530 Subject: [PATCH 067/545] Migrate Advisory aliases field to M2M relationship Signed-off-by: Keshav Priyadarshi --- .../0089_migrate_advisory_aliases.py | 117 ++++++++++++++++++ vulnerabilities/models.py | 9 +- vulnerabilities/tests/test_changelog.py | 9 +- 3 files changed, 128 insertions(+), 7 deletions(-) create mode 100644 vulnerabilities/migrations/0089_migrate_advisory_aliases.py diff --git a/vulnerabilities/migrations/0089_migrate_advisory_aliases.py b/vulnerabilities/migrations/0089_migrate_advisory_aliases.py new file mode 100644 index 000000000..928c6363e --- /dev/null +++ b/vulnerabilities/migrations/0089_migrate_advisory_aliases.py @@ -0,0 +1,117 @@ +# +# Copyright (c) nexB Inc. and others. All rights reserved. +# VulnerableCode is a trademark of nexB Inc. +# SPDX-License-Identifier: Apache-2.0 +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/aboutcode-org/vulnerablecode for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. +# + +from aboutcode.pipeline import LoopProgress +from django.db import migrations +from django.db import models + +""" +Model and data migration for converting the Advisory aliases +JSON field to a concrete M2M Advisory Alias relationship. +""" + +def bulk_update(model, items, fields, logger): + item_count = 0 + if items: + try: + model.objects.bulk_update(objs=items, fields=fields) + item_count += len(items) + except Exception as e: + logger(f"Error updating Advisory: {e}") + items.clear() + return item_count + + +class Migration(migrations.Migration): + + dependencies = [ + ("vulnerabilities", "0088_fix_alpine_purl_type"), + ] + + def populate_new_advisory_aliases_field(apps, schema_editor): + Advisory = apps.get_model("vulnerabilities", "Advisory") + Alias = apps.get_model("vulnerabilities", "Alias") + advisories = Advisory.objects.all() + + chunk_size = 10000 + advisories_count = advisories.count() + print(f"\nPopulate new advisory aliases relationship.") + progress = LoopProgress( + total_iterations=advisories_count, + logger=print, + progress_step=1, + ) + for advisory in progress.iter(advisories.iterator(chunk_size=chunk_size)): + aliases = Alias.objects.filter(alias__in=advisory.old_aliases) + advisory.aliases.set(aliases) + + def reverse_populate_new_advisory_aliases_field(apps, schema_editor): + Advisory = apps.get_model("vulnerabilities", "Advisory") + advisories = Advisory.objects.all() + + updated_advisory_count = 0 + batch_size = 10000 + chunk_size = 10000 + updated_advisory = [] + progress = LoopProgress( + total_iterations=advisories.count(), + logger=print, + progress_step=1, + ) + for advisory in progress.iter(advisories.iterator(chunk_size=chunk_size)): + aliases = advisory.aliases.all() + advisory.old_aliases = [alias.alias for alias in aliases] + updated_advisory.append(advisory) + + if len(updated_advisory) > batch_size: + updated_advisory_count += bulk_update( + model=Advisory, + items=updated_advisory, + fields=["old_aliases"], + logger=print, + ) + + updated_advisory_count += bulk_update( + model=Advisory, + items=updated_advisory, + fields=["old_aliases"], + logger=print, + ) + + operations = [ + # Rename aliases field to old_aliases + migrations.AlterModelOptions( + name="advisory", + options={"ordering": ["date_published", "unique_content_id"]}, + ), + migrations.AlterUniqueTogether( + name="advisory", + unique_together={("unique_content_id", "date_published", "url")}, + ), + migrations.RenameField( + model_name="advisory", + old_name="aliases", + new_name="old_aliases", + ), + migrations.AddField( + model_name="advisory", + name="aliases", + field=models.ManyToManyField(related_name="advisories", to="vulnerabilities.alias"), + ), + # Populate the new M2M aliases relation + migrations.RunPython( + code=populate_new_advisory_aliases_field, + reverse_code=reverse_populate_new_advisory_aliases_field, + ), + # Delete old_alias field + migrations.RemoveField( + model_name="advisory", + name="old_aliases", + ), + ] diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index 7bfc1ba11..f4b9468d2 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -1320,7 +1320,10 @@ class Advisory(models.Model): blank=True, help_text="A 64 character unique identifier for the content of the advisory since we use sha256 as hex", ) - aliases = models.JSONField(blank=True, default=list, help_text="A list of alias strings") + aliases = models.ManyToManyField( + Alias, + related_name="advisories", + ) summary = models.TextField( blank=True, ) @@ -1355,8 +1358,8 @@ class Advisory(models.Model): objects = AdvisoryQuerySet.as_manager() class Meta: - unique_together = ["aliases", "unique_content_id", "date_published", "url"] - ordering = ["aliases", "date_published", "unique_content_id"] + unique_together = ["unique_content_id", "date_published", "url"] + ordering = ["date_published", "unique_content_id"] def save(self, *args, **kwargs): advisory_data = self.to_advisory_data() diff --git a/vulnerabilities/tests/test_changelog.py b/vulnerabilities/tests/test_changelog.py index 1d5eedaea..3f762ac5e 100644 --- a/vulnerabilities/tests/test_changelog.py +++ b/vulnerabilities/tests/test_changelog.py @@ -17,6 +17,7 @@ from vulnerabilities import models from vulnerabilities.importer import AffectedPackage from vulnerabilities.pipelines.npm_importer import NpmImporterPipeline +from vulnerabilities.pipes.advisory import get_or_create_aliases @pytest.mark.django_db @@ -37,8 +38,8 @@ def test_package_changelog(): fixed_version=SemverVersion("1.0"), ).to_dict() ], - aliases=["CVE-123"], ) + adv.aliases.add(*get_or_create_aliases(["CVE-123"])) NpmImporterPipeline().import_advisory(advisory=adv) assert models.PackageChangeLog.objects.filter(package=pkg).count() == 1 NpmImporterPipeline().import_advisory(advisory=adv) @@ -65,8 +66,8 @@ def test_package_changelog(): affected_version_range=NpmVersionRange.from_native(">=2.0"), ).to_dict() ], - aliases=["CVE-145"], ) + adv.aliases.add(*get_or_create_aliases(["CVE-123"])) NpmImporterPipeline().import_advisory(advisory=adv) assert models.PackageChangeLog.objects.filter(package=pkg1).count() == 1 NpmImporterPipeline().import_advisory(advisory=adv) @@ -96,8 +97,8 @@ def test_vulnerability_changelog(): fixed_version=SemverVersion("1.0"), ).to_dict() ], - aliases=["CVE-TEST-1234"], ) + adv.aliases.add(*get_or_create_aliases(["CVE-TEST-1234"])) NpmImporterPipeline().import_advisory(advisory=adv) # 1 Changelogs is expected here: # 1 for importing vuln details @@ -129,8 +130,8 @@ def test_vulnerability_changelog_software_version(): fixed_version=SemverVersion("1.0"), ).to_dict() ], - aliases=["CVE-TEST-1234"], ) + adv.aliases.add(*get_or_create_aliases(["CVE-TEST-1234"])) NpmImporterPipeline().import_advisory(advisory=adv) npm_vulnerability_log = models.VulnerabilityChangeLog.objects.first() From e9e0273457a502adb1ca98b7f6cb8b7d7554f1ab Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Sun, 23 Feb 2025 19:41:17 +0530 Subject: [PATCH 068/545] Make Alias vulnerability field optional Signed-off-by: Keshav Priyadarshi --- .../migrations/0089_migrate_advisory_aliases.py | 16 +++++++++++++++- vulnerabilities/models.py | 6 ++++-- 2 files changed, 19 insertions(+), 3 deletions(-) diff --git a/vulnerabilities/migrations/0089_migrate_advisory_aliases.py b/vulnerabilities/migrations/0089_migrate_advisory_aliases.py index 928c6363e..a9f88b711 100644 --- a/vulnerabilities/migrations/0089_migrate_advisory_aliases.py +++ b/vulnerabilities/migrations/0089_migrate_advisory_aliases.py @@ -10,6 +10,7 @@ from aboutcode.pipeline import LoopProgress from django.db import migrations from django.db import models +import django.db.models.deletion """ Model and data migration for converting the Advisory aliases @@ -85,6 +86,19 @@ def reverse_populate_new_advisory_aliases_field(apps, schema_editor): ) operations = [ + # Make vulnerability relation optional + migrations.AlterField( + model_name="alias", + name="vulnerability", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="aliases", + to="vulnerabilities.vulnerability", + ), + ), + # Rename aliases field to old_aliases migrations.AlterModelOptions( name="advisory", @@ -109,7 +123,7 @@ def reverse_populate_new_advisory_aliases_field(apps, schema_editor): code=populate_new_advisory_aliases_field, reverse_code=reverse_populate_new_advisory_aliases_field, ), - # Delete old_alias field + # Delete JSON aliases field migrations.RemoveField( model_name="advisory", name="old_aliases", diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index f4b9468d2..18aa6ba14 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -1276,8 +1276,10 @@ class Alias(models.Model): vulnerability = models.ForeignKey( Vulnerability, - on_delete=models.CASCADE, related_name="aliases", + on_delete=models.SET_NULL, + null=True, + blank=True, ) objects = AliasQuerySet.as_manager() @@ -1372,7 +1374,7 @@ def to_advisory_data(self) -> "AdvisoryData": from vulnerabilities.importer import Reference return AdvisoryData( - aliases=self.aliases, + aliases=[item.alias for item in self.aliases.all()], summary=self.summary, affected_packages=[ AffectedPackage.from_dict(pkg) for pkg in self.affected_packages if pkg From 0ba0593d894f083e5904d7988cac31fad97453dd Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Sun, 23 Feb 2025 20:04:04 +0530 Subject: [PATCH 069/545] Use advisory alias relation in pipelines and importer Signed-off-by: Keshav Priyadarshi --- vulnerabilities/import_runner.py | 40 ++++++++++--------- .../improvers/vulnerability_status.py | 6 +-- .../pipelines/add_cvss31_to_CVEs.py | 11 ++--- vulnerabilities/pipes/advisory.py | 20 +++++++--- 4 files changed, 41 insertions(+), 36 deletions(-) diff --git a/vulnerabilities/import_runner.py b/vulnerabilities/import_runner.py index 0dcafda10..7a81e34da 100644 --- a/vulnerabilities/import_runner.py +++ b/vulnerabilities/import_runner.py @@ -15,6 +15,7 @@ from django.core.exceptions import ValidationError from django.db import transaction +from django.db.models.query import QuerySet from vulnerabilities.importer import AdvisoryData from vulnerabilities.importer import Importer @@ -96,12 +97,14 @@ def process_advisories( Insert advisories into the database Return the number of inserted advisories. """ + from vulnerabilities.pipes.advisory import get_or_create_aliases + count = 0 advisories = [] for data in advisory_datas: try: + aliases = get_or_create_aliases(aliases=data.aliases) obj, created = Advisory.objects.get_or_create( - aliases=data.aliases, summary=data.summary, affected_packages=[pkg.to_dict() for pkg in data.affected_packages], references=[ref.to_dict() for ref in data.references], @@ -113,6 +116,7 @@ def process_advisories( }, url=data.url, ) + obj.aliases.add(*aliases) if not obj.date_imported: advisories.append(obj) except Exception as e: @@ -148,6 +152,8 @@ def process_inferences(inferences: List[Inference], advisory: Advisory, improver erroneous. Also, the atomic transaction for every advisory and its inferences makes sure that date_imported of advisory is consistent. """ + from vulnerabilities.pipes.advisory import get_or_create_aliases + inferences_processed_count = 0 if not inferences: @@ -157,9 +163,10 @@ def process_inferences(inferences: List[Inference], advisory: Advisory, improver logger.info(f"Improving advisory id: {advisory.id}") for inference in inferences: + aliases = get_or_create_aliases(inference.aliases) vulnerability = get_or_create_vulnerability_and_aliases( vulnerability_id=inference.vulnerability_id, - aliases=inference.aliases, + aliases=aliases, summary=inference.summary, advisory=advisory, ) @@ -265,14 +272,13 @@ def create_valid_vulnerability_reference(url, reference_id=None): def get_or_create_vulnerability_and_aliases( - aliases: List[str], vulnerability_id=None, summary=None, advisory=None + aliases: QuerySet, vulnerability_id=None, summary=None, advisory=None ): """ Get or create vulnerabilitiy and aliases such that all existing and new aliases point to the same vulnerability """ - aliases = set(alias.strip() for alias in aliases if alias and alias.strip()) - new_alias_names, existing_vulns = get_vulns_for_aliases_and_get_new_aliases(aliases) + new_aliases, existing_vulns = get_vulns_for_aliases_and_get_new_aliases(aliases) # All aliases must point to the same vulnerability vulnerability = None @@ -310,11 +316,11 @@ def get_or_create_vulnerability_and_aliases( # f"Inconsistent summary for {vulnerability.vulnerability_id}. " # f"Existing: {vulnerability.summary!r}, provided: {summary!r}" # ) - associate_vulnerability_with_aliases(vulnerability=vulnerability, aliases=new_alias_names) + associate_vulnerability_with_aliases(vulnerability=vulnerability, aliases=new_aliases) else: try: vulnerability = create_vulnerability_and_add_aliases( - aliases=new_alias_names, summary=summary + aliases=new_aliases, summary=summary ) importer_name = get_importer_name(advisory) VulnerabilityChangeLog.log_import( @@ -324,24 +330,22 @@ def get_or_create_vulnerability_and_aliases( ) except Exception as e: logger.error( - f"Cannot create vulnerability with summary {summary!r} and {new_alias_names!r} {e!r}.\n{traceback_format_exc()}." + f"Cannot create vulnerability with summary {summary!r} and {new_aliases!r} {e!r}.\n{traceback_format_exc()}." ) return return vulnerability -def get_vulns_for_aliases_and_get_new_aliases(aliases): +def get_vulns_for_aliases_and_get_new_aliases(aliases: QuerySet): """ Return ``new_aliases`` that are not in the database and ``existing_vulns`` that point to the given ``aliases``. """ - new_aliases = set(aliases) - existing_vulns = set() - for alias in Alias.objects.filter(alias__in=aliases): - existing_vulns.add(alias.vulnerability) - new_aliases.remove(alias.alias) - return new_aliases, existing_vulns + new_aliases = aliases.filter(vulnerability__isnull=True) + existing_vulns = [alias.vulnerability for alias in aliases.filter(vulnerability__isnull=False)] + + return new_aliases, list(set(existing_vulns)) @transaction.atomic @@ -360,7 +364,5 @@ def create_vulnerability_and_add_aliases(aliases, summary): def associate_vulnerability_with_aliases(aliases, vulnerability): - for alias_name in aliases: - alias = Alias(alias=alias_name, vulnerability=vulnerability) - alias.save() - logger.info(f"New alias for {vulnerability!r}: {alias_name}") + aliases.update(vulnerability=vulnerability) + logger.info(f"New alias for {vulnerability!r}: {aliases}") diff --git a/vulnerabilities/improvers/vulnerability_status.py b/vulnerabilities/improvers/vulnerability_status.py index 214e6dc35..e9661344e 100644 --- a/vulnerabilities/improvers/vulnerability_status.py +++ b/vulnerabilities/improvers/vulnerability_status.py @@ -37,11 +37,7 @@ class VulnerabilityStatusImprover(Improver): @property def interesting_advisories(self) -> QuerySet: - return ( - Advisory.objects.filter(Q(created_by=NVDImporterPipeline.pipeline_id)) - .distinct("aliases") - .paginated() - ) + return Advisory.objects.filter(Q(created_by=NVDImporterPipeline.pipeline_id)).paginated() def get_inferences(self, advisory_data: AdvisoryData) -> Iterable[Inference]: """ diff --git a/vulnerabilities/pipelines/add_cvss31_to_CVEs.py b/vulnerabilities/pipelines/add_cvss31_to_CVEs.py index acda42b52..a9791d29c 100644 --- a/vulnerabilities/pipelines/add_cvss31_to_CVEs.py +++ b/vulnerabilities/pipelines/add_cvss31_to_CVEs.py @@ -12,6 +12,7 @@ from vulnerabilities import severity_systems from vulnerabilities.models import Advisory +from vulnerabilities.models import Alias from vulnerabilities.models import VulnerabilitySeverity from vulnerabilities.pipelines import VulnerableCodePipeline @@ -48,7 +49,6 @@ def process_cve_advisory_mapping(self): results = [] for severity in progress.iter(nvd_severities.paginated(per_page=batch_size)): - print(severity.url) cve_pattern = re.compile(r"(CVE-\d{4}-\d{4,7})").search cve_match = cve_pattern(severity.url) if cve_match: @@ -57,12 +57,10 @@ def process_cve_advisory_mapping(self): self.log(f"Could not find CVE ID in URL: {severity.url}") continue - matching_advisories = Advisory.objects.filter( - aliases=[cve_id], - created_by="nvd_importer", - ) + if matching_alias := Alias.objects.get(alias=cve_id): + matching_advisories = matching_alias.advisories.filter(created_by="nvd_importer") - for advisory in matching_advisories: + for advisory in matching_advisories or []: for reference in advisory.references: for sev in reference.get("severities", []): if sev.get("system") == "cvssv3.1": @@ -76,7 +74,6 @@ def process_cve_advisory_mapping(self): ) if results: - print(results) self._process_batch(results) self.log(f"Completed processing CVE to Advisory mappings") diff --git a/vulnerabilities/pipes/advisory.py b/vulnerabilities/pipes/advisory.py index 6637122a3..3b33438ec 100644 --- a/vulnerabilities/pipes/advisory.py +++ b/vulnerabilities/pipes/advisory.py @@ -12,6 +12,7 @@ from datetime import timezone from traceback import format_exc as traceback_format_exc from typing import Callable +from typing import List from django.db import transaction @@ -19,6 +20,7 @@ from vulnerabilities.improver import MAX_CONFIDENCE from vulnerabilities.models import Advisory from vulnerabilities.models import AffectedByPackageRelatedVulnerability +from vulnerabilities.models import Alias from vulnerabilities.models import FixingPackageRelatedVulnerability from vulnerabilities.models import Package from vulnerabilities.models import VulnerabilityReference @@ -27,11 +29,17 @@ from vulnerabilities.models import Weakness +def get_or_create_aliases(aliases: List) -> List: + for alias in aliases: + Alias.objects.get_or_create(alias=alias) + return Alias.objects.filter(alias__in=aliases) + + def insert_advisory(advisory: AdvisoryData, pipeline_id: str, logger: Callable = None): - obj = None + advisory_obj = None + aliases = get_or_create_aliases(aliases=advisory.aliases) try: - obj, _ = Advisory.objects.get_or_create( - aliases=advisory.aliases, + advisory_obj, _ = Advisory.objects.get_or_create( summary=advisory.summary, affected_packages=[pkg.to_dict() for pkg in advisory.affected_packages], references=[ref.to_dict() for ref in advisory.references], @@ -43,6 +51,7 @@ def insert_advisory(advisory: AdvisoryData, pipeline_id: str, logger: Callable = "date_collected": datetime.now(timezone.utc), }, ) + advisory_obj.aliases.add(*aliases) except Exception as e: if logger: logger( @@ -50,7 +59,7 @@ def insert_advisory(advisory: AdvisoryData, pipeline_id: str, logger: Callable = level=logging.ERROR, ) - return obj + return advisory_obj @transaction.atomic @@ -82,9 +91,10 @@ def import_advisory( affected_purls.extend(package_affected_purls) fixed_purls.extend(package_fixed_purls) + aliases = get_or_create_aliases(advisory_data.aliases) vulnerability = import_runner.get_or_create_vulnerability_and_aliases( vulnerability_id=None, - aliases=advisory_data.aliases, + aliases=aliases, summary=advisory_data.summary, advisory=advisory, ) From 6cc5bd3ac49a0045af8183e93c08378882bc27a3 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Sun, 23 Feb 2025 20:07:17 +0530 Subject: [PATCH 070/545] Update the test fixture to use the new alias field Signed-off-by: Keshav Priyadarshi --- ...es.py => 0090_migrate_advisory_aliases.py} | 2 +- .../tests/pipelines/test_base_pipeline.py | 11 +- .../pipelines/test_nginx_importer_pipeline.py | 1 - vulnerabilities/tests/pipes/test_advisory.py | 7 +- vulnerabilities/tests/test_add_cvsssv31.py | 18 +- ...security_advisories-importer-expected.json | 1465 +++---- ...security_advisories-importer-expected.json | 3703 +++++++---------- vulnerabilities/tests/test_example.py | 2 +- vulnerabilities/tests/test_import_runner.py | 5 +- vulnerabilities/tests/test_openssl.py | 1 - .../tests/test_postgres_workaround.py | 5 +- .../test_vulnerability_status_improver.py | 19 +- 12 files changed, 2190 insertions(+), 3049 deletions(-) rename vulnerabilities/migrations/{0089_migrate_advisory_aliases.py => 0090_migrate_advisory_aliases.py} (98%) diff --git a/vulnerabilities/migrations/0089_migrate_advisory_aliases.py b/vulnerabilities/migrations/0090_migrate_advisory_aliases.py similarity index 98% rename from vulnerabilities/migrations/0089_migrate_advisory_aliases.py rename to vulnerabilities/migrations/0090_migrate_advisory_aliases.py index a9f88b711..c0e03295a 100644 --- a/vulnerabilities/migrations/0089_migrate_advisory_aliases.py +++ b/vulnerabilities/migrations/0090_migrate_advisory_aliases.py @@ -32,7 +32,7 @@ def bulk_update(model, items, fields, logger): class Migration(migrations.Migration): dependencies = [ - ("vulnerabilities", "0088_fix_alpine_purl_type"), + ("vulnerabilities", "0089_alter_advisory_unique_content_id"), ] def populate_new_advisory_aliases_field(apps, schema_editor): diff --git a/vulnerabilities/tests/pipelines/test_base_pipeline.py b/vulnerabilities/tests/pipelines/test_base_pipeline.py index 02e8c6b09..7d16315ad 100644 --- a/vulnerabilities/tests/pipelines/test_base_pipeline.py +++ b/vulnerabilities/tests/pipelines/test_base_pipeline.py @@ -20,6 +20,7 @@ from vulnerabilities.importer import Reference from vulnerabilities.pipelines import VulnerableCodeBaseImporterPipeline from vulnerabilities.pipelines import VulnerableCodePipeline +from vulnerabilities.pipes.advisory import get_or_create_aliases from vulnerabilities.tests.pipelines import TestLogger advisory_data1 = AdvisoryData( @@ -38,8 +39,7 @@ def get_advisory1(created_by="test_pipeline"): - return models.Advisory.objects.create( - aliases=advisory_data1.aliases, + adv = models.Advisory.objects.create( summary=advisory_data1.summary, affected_packages=[pkg.to_dict() for pkg in advisory_data1.affected_packages], references=[ref.to_dict() for ref in advisory_data1.references], @@ -47,6 +47,8 @@ def get_advisory1(created_by="test_pipeline"): created_by=created_by, date_collected=timezone.now(), ) + adv.aliases.add(*get_or_create_aliases(advisory_data1.aliases)) + return adv class TestVulnerableCodePipeline(TestCase): @@ -101,7 +103,7 @@ def test_collect_and_store_advisories(self, mock_advisories_count, mock_collect_ self.assertEqual(1, models.Advisory.objects.count()) collected_advisory = models.Advisory.objects.first() - result_aliases = collected_advisory.aliases + result_aliases = [item.alias for item in collected_advisory.aliases.all()] expected_aliases = advisory_data1.aliases self.assertEqual(expected_aliases, result_aliases) @@ -122,4 +124,5 @@ def test_import_new_advisories(self): self.assertEqual(1, imported_vulnerability.aliases.count()) expected_alias = imported_vulnerability.aliases.first() - self.assertEqual(advisory1.aliases[0], expected_alias.alias) + result_alias = advisory1.aliases.first() + self.assertEqual(result_alias.alias, expected_alias.alias) diff --git a/vulnerabilities/tests/pipelines/test_nginx_importer_pipeline.py b/vulnerabilities/tests/pipelines/test_nginx_importer_pipeline.py index c4bce99a6..80f3705bf 100644 --- a/vulnerabilities/tests/pipelines/test_nginx_importer_pipeline.py +++ b/vulnerabilities/tests/pipelines/test_nginx_importer_pipeline.py @@ -30,7 +30,6 @@ ADVISORY_FIELDS_TO_TEST = ( "unique_content_id", - "aliases", "summary", "affected_packages", "references", diff --git a/vulnerabilities/tests/pipes/test_advisory.py b/vulnerabilities/tests/pipes/test_advisory.py index a371ca551..6135fbf59 100644 --- a/vulnerabilities/tests/pipes/test_advisory.py +++ b/vulnerabilities/tests/pipes/test_advisory.py @@ -16,10 +16,10 @@ from vulnerabilities.importer import AdvisoryData from vulnerabilities.importer import AffectedPackage from vulnerabilities.importer import Reference +from vulnerabilities.pipes.advisory import get_or_create_aliases from vulnerabilities.pipes.advisory import import_advisory advisory_data1 = AdvisoryData( - aliases=["CVE-2020-13371337"], summary="vulnerability description here", affected_packages=[ AffectedPackage( @@ -34,8 +34,7 @@ def get_advisory1(created_by="test_pipeline"): - return models.Advisory.objects.create( - aliases=advisory_data1.aliases, + advisory = models.Advisory.objects.create( summary=advisory_data1.summary, affected_packages=[pkg.to_dict() for pkg in advisory_data1.affected_packages], references=[ref.to_dict() for ref in advisory_data1.references], @@ -43,6 +42,8 @@ def get_advisory1(created_by="test_pipeline"): created_by=created_by, date_collected=timezone.now(), ) + advisory.aliases.add(*get_or_create_aliases(advisory_data1.aliases)) + return advisory def get_all_vulnerability_relationships_objects(): diff --git a/vulnerabilities/tests/test_add_cvsssv31.py b/vulnerabilities/tests/test_add_cvsssv31.py index e20d1158a..96c2abf86 100644 --- a/vulnerabilities/tests/test_add_cvsssv31.py +++ b/vulnerabilities/tests/test_add_cvsssv31.py @@ -1,14 +1,20 @@ -import unittest -from unittest.mock import Mock -from unittest.mock import patch +# +# Copyright (c) nexB Inc. and others. All rights reserved. +# VulnerableCode is a trademark of nexB Inc. +# SPDX-License-Identifier: Apache-2.0 +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/aboutcode-org/vulnerablecode for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. +# + from django.test import TestCase from vulnerabilities.models import Advisory -from vulnerabilities.models import Alias from vulnerabilities.models import Vulnerability from vulnerabilities.models import VulnerabilitySeverity from vulnerabilities.pipelines.add_cvss31_to_CVEs import CVEAdvisoryMappingPipeline +from vulnerabilities.pipes.advisory import get_or_create_aliases from vulnerabilities.severity_systems import CVSSV3 from vulnerabilities.severity_systems import CVSSV31 @@ -16,9 +22,8 @@ class TestCVEAdvisoryMappingPipeline(TestCase): def setUp(self): self.pipeline = CVEAdvisoryMappingPipeline() - Advisory.objects.create( + advisory = Advisory.objects.create( created_by="nvd_importer", - aliases=["CVE-2024-1234"], references=[ { "severities": [ @@ -35,6 +40,7 @@ def setUp(self): ], date_collected="2024-09-27T19:38:00Z", ) + advisory.aliases.add(*get_or_create_aliases(["CVE-2024-1234"])) vuln = Vulnerability.objects.create(vulnerability_id="CVE-2024-1234") sev = VulnerabilitySeverity.objects.create( scoring_system=CVSSV3.identifier, diff --git a/vulnerabilities/tests/test_data/nginx/security_advisories-importer-expected.json b/vulnerabilities/tests/test_data/nginx/security_advisories-importer-expected.json index 4a2b97556..047cb209e 100644 --- a/vulnerabilities/tests/test_data/nginx/security_advisories-importer-expected.json +++ b/vulnerabilities/tests/test_data/nginx/security_advisories-importer-expected.json @@ -1,10 +1,7 @@ [ { - "unique_content_id": "8f54462a45ac49635f660b6fb755d5e05cdbc34ebaa565e38ca20c522579ce7f", - "aliases": [ - "CORE-2010-0121" - ], - "summary": "Vulnerabilities with Windows 8.3 filename pseudonyms", + "unique_content_id": "04f5bc12ff49a95a29c459222379abe4", + "summary": "NULL pointer dereference while writing client request body", "affected_packages": [ { "package": { @@ -13,10 +10,10 @@ "subpath": "", "version": "", "namespace": "", - "qualifiers": "os=windows" + "qualifiers": "" }, - "fixed_version": "0.8.33", - "affected_version_range": "vers:nginx/>=0.7.52|<=0.8.32" + "fixed_version": "1.11.1", + "affected_version_range": "vers:nginx/>=1.3.9|<=1.11.0" }, { "package": { @@ -25,87 +22,51 @@ "subpath": "", "version": "", "namespace": "", - "qualifiers": "os=windows" + "qualifiers": "" }, - "fixed_version": "0.7.65", - "affected_version_range": "vers:nginx/>=0.7.52|<=0.8.32" + "fixed_version": "1.10.1", + "affected_version_range": "vers:nginx/>=1.3.9|<=1.11.0" } ], - "references": [], - "date_published": null, - "weaknesses": [] - }, - { - "unique_content_id": "fcb0ba0ce66c1f1cf3b4213fd6e9108ab9965d633582d3e9c070a792e02d9876", - "aliases": [ - "CVE-2009-3896" - ], - "summary": "Null pointer dereference vulnerability", - "affected_packages": [ + "references": [ { - "package": { - "name": "nginx", - "type": "nginx", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "0.8.14", - "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.13" + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2016/000179.html", + "severities": [ + { + "value": "medium", + "system": "generic_textual", + "scoring_elements": "" + } + ], + "reference_id": "", + "reference_type": "" }, { - "package": { - "name": "nginx", - "type": "nginx", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "0.7.62", - "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.13" + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-4450", + "severities": [], + "reference_id": "CVE-2016-4450", + "reference_type": "" }, { - "package": { - "name": "nginx", - "type": "nginx", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "0.6.39", - "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.13" + "url": "https://nginx.org/download/patch.2016.write.txt", + "severities": [], + "reference_id": "", + "reference_type": "" }, { - "package": { - "name": "nginx", - "type": "nginx", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "0.5.38", - "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.13" - } - ], - "references": [ - { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-3896", + "url": "https://nginx.org/download/patch.2016.write.txt.asc", "severities": [], - "reference_id": "CVE-2009-3896", + "reference_id": "", "reference_type": "" }, { - "url": "https://nginx.org/download/patch.null.pointer.txt", + "url": "https://nginx.org/download/patch.2016.write2.txt", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://nginx.org/download/patch.null.pointer.txt.asc", + "url": "https://nginx.org/download/patch.2016.write2.txt.asc", "severities": [], "reference_id": "", "reference_type": "" @@ -115,11 +76,8 @@ "weaknesses": [] }, { - "unique_content_id": "e9adfcf58bd2f302fd81436744937e8ea8bae7e1d7133d54cc4097bb94e68656", - "aliases": [ - "CVE-2009-3898" - ], - "summary": "Directory traversal vulnerability", + "unique_content_id": "0f21f4e3d88f4af06f0c46d096e90320", + "summary": "Request line parsing vulnerability", "affected_packages": [ { "package": { @@ -130,8 +88,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.8.17", - "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.16" + "fixed_version": "1.5.7", + "affected_version_range": "vers:nginx/>=0.8.41|<=1.5.6" }, { "package": { @@ -142,15 +100,39 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.7.63", - "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.16" + "fixed_version": "1.4.4", + "affected_version_range": "vers:nginx/>=0.8.41|<=1.5.6" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-3898", + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2013/000125.html", + "severities": [ + { + "value": "medium", + "system": "generic_textual", + "scoring_elements": "" + } + ], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://nvd.nist.gov/vuln/detail/CVE-2013-4547", "severities": [], - "reference_id": "CVE-2009-3898", + "reference_id": "CVE-2013-4547", + "reference_type": "" + }, + { + "url": "https://nginx.org/download/patch.2013.space.txt", + "severities": [], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://nginx.org/download/patch.2013.space.txt.asc", + "severities": [], + "reference_id": "", "reference_type": "" } ], @@ -158,11 +140,8 @@ "weaknesses": [] }, { - "unique_content_id": "1000911200f3a7046464251c86a45451e6d049b88cb3e5edc6d009a1867418f7", - "aliases": [ - "CVE-2009-4487" - ], - "summary": "An error log data are not sanitized", + "unique_content_id": "13592aaee15657bff9afca8c98edf8bf", + "summary": "Memory disclosure with specially crafted HTTP backend responses", "affected_packages": [ { "package": { @@ -173,28 +152,9 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": null, - "affected_version_range": "vers:nginx/*" - } - ], - "references": [ - { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-4487", - "severities": [], - "reference_id": "CVE-2009-4487", - "reference_type": "" - } - ], - "date_published": null, - "weaknesses": [] - }, - { - "unique_content_id": "92ce767b8cea36271d33c119cb6f706f64f5aba7335cca6791eca90a87f48de1", - "aliases": [ - "CVE-2010-2263" - ], - "summary": "Vulnerabilities with Windows file default stream", - "affected_packages": [ + "fixed_version": "1.5.0", + "affected_version_range": "vers:nginx/>=1.1.4|<=1.2.8|>=1.3.9|<=1.4.0" + }, { "package": { "name": "nginx", @@ -202,10 +162,10 @@ "subpath": "", "version": "", "namespace": "", - "qualifiers": "os=windows" + "qualifiers": "" }, - "fixed_version": "0.8.40", - "affected_version_range": "vers:nginx/>=0.7.52|<=0.8.39" + "fixed_version": "1.4.1", + "affected_version_range": "vers:nginx/>=1.1.4|<=1.2.8|>=1.3.9|<=1.4.0" }, { "package": { @@ -214,17 +174,53 @@ "subpath": "", "version": "", "namespace": "", - "qualifiers": "os=windows" + "qualifiers": "" }, - "fixed_version": "0.7.66", - "affected_version_range": "vers:nginx/>=0.7.52|<=0.8.39" + "fixed_version": "1.2.9", + "affected_version_range": "vers:nginx/>=1.1.4|<=1.2.8|>=1.3.9|<=1.4.0" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2010-2263", + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2013/000114.html", + "severities": [ + { + "value": "medium", + "system": "generic_textual", + "scoring_elements": "" + } + ], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://nvd.nist.gov/vuln/detail/CVE-2013-2070", "severities": [], - "reference_id": "CVE-2010-2263", + "reference_id": "CVE-2013-2070", + "reference_type": "" + }, + { + "url": "https://nginx.org/download/patch.2013.chunked.txt", + "severities": [], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://nginx.org/download/patch.2013.chunked.txt.asc", + "severities": [], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://nginx.org/download/patch.2013.proxy.txt", + "severities": [], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://nginx.org/download/patch.2013.proxy.txt.asc", + "severities": [], + "reference_id": "", "reference_type": "" } ], @@ -232,11 +228,8 @@ "weaknesses": [] }, { - "unique_content_id": "9a3699853c72ab1e08f226c4f09f669b6e8b6f0431fa4e78549cd87d8466e0f7", - "aliases": [ - "CVE-2010-2266" - ], - "summary": "Vulnerabilities with invalid UTF-8 sequence on Windows", + "unique_content_id": "2537fa6a9e8e84a3c06bb122fcbf468d", + "summary": "Excessive memory usage in HTTP/2 with zero length headers", "affected_packages": [ { "package": { @@ -245,10 +238,10 @@ "subpath": "", "version": "", "namespace": "", - "qualifiers": "os=windows" + "qualifiers": "" }, - "fixed_version": "0.8.41", - "affected_version_range": "vers:nginx/>=0.7.52|<=0.8.40" + "fixed_version": "1.17.3", + "affected_version_range": "vers:nginx/>=1.9.5|<=1.17.2" }, { "package": { @@ -257,17 +250,29 @@ "subpath": "", "version": "", "namespace": "", - "qualifiers": "os=windows" + "qualifiers": "" }, - "fixed_version": "0.7.67", - "affected_version_range": "vers:nginx/>=0.7.52|<=0.8.40" + "fixed_version": "1.16.1", + "affected_version_range": "vers:nginx/>=1.9.5|<=1.17.2" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2010-2266", + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2019/000249.html", + "severities": [ + { + "value": "low", + "system": "generic_textual", + "scoring_elements": "" + } + ], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://nvd.nist.gov/vuln/detail/CVE-2019-9516", "severities": [], - "reference_id": "CVE-2010-2266", + "reference_id": "CVE-2019-9516", "reference_type": "" } ], @@ -275,11 +280,8 @@ "weaknesses": [] }, { - "unique_content_id": "79d90dc8b83d6267a92f31d11be14dc27e619f6edaa996935bf4d0d33b70e575", - "aliases": [ - "CVE-2011-4315" - ], - "summary": "Buffer overflow in resolver", + "unique_content_id": "27612bc7cab82114b1549552f5ad48ff", + "summary": "1-byte memory overwrite in resolver", "affected_packages": [ { "package": { @@ -290,8 +292,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.1.8", - "affected_version_range": "vers:nginx/>=0.6.18|<=1.1.7" + "fixed_version": "1.21.0", + "affected_version_range": "vers:nginx/>=0.6.18|<=1.20.0" }, { "package": { @@ -302,27 +304,48 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.10", - "affected_version_range": "vers:nginx/>=0.6.18|<=1.1.7" + "fixed_version": "1.20.1", + "affected_version_range": "vers:nginx/>=0.6.18|<=1.20.0" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2011-4315", + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2021/000300.html", + "severities": [ + { + "value": "medium", + "system": "generic_textual", + "scoring_elements": "" + } + ], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://nvd.nist.gov/vuln/detail/CVE-2021-23017", "severities": [], - "reference_id": "CVE-2011-4315", + "reference_id": "CVE-2021-23017", + "reference_type": "" + }, + { + "url": "https://nginx.org/download/patch.2021.resolver.txt", + "severities": [], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://nginx.org/download/patch.2021.resolver.txt.asc", + "severities": [], + "reference_id": "", "reference_type": "" } ], "date_published": null, "weaknesses": [] }, - { - "unique_content_id": "044f1ec3ed59bdbafada7e40b37f7a3cbd0afc31c67aac002251f7ed56e756db", - "aliases": [ - "CVE-2011-4963" - ], - "summary": "Vulnerabilities with Windows directory aliases", + { + "unique_content_id": "2ec9de991e2cb7a5a0ba79bed8556a41", + "summary": "Use-after-free during CNAME response processing in resolver", "affected_packages": [ { "package": { @@ -331,10 +354,10 @@ "subpath": "", "version": "", "namespace": "", - "qualifiers": "os=windows" + "qualifiers": "" }, - "fixed_version": "1.3.1", - "affected_version_range": "vers:nginx/>=0.7.52|<=1.3.0" + "fixed_version": "1.9.10", + "affected_version_range": "vers:nginx/>=0.6.18|<=1.9.9" }, { "package": { @@ -343,15 +366,15 @@ "subpath": "", "version": "", "namespace": "", - "qualifiers": "os=windows" + "qualifiers": "" }, - "fixed_version": "1.2.1", - "affected_version_range": "vers:nginx/>=0.7.52|<=1.3.0" + "fixed_version": "1.8.1", + "affected_version_range": "vers:nginx/>=0.6.18|<=1.9.9" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2012/000086.html", + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2016/000169.html", "severities": [ { "value": "medium", @@ -363,9 +386,9 @@ "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2011-4963", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-0746", "severities": [], - "reference_id": "CVE-2011-4963", + "reference_id": "CVE-2016-0746", "reference_type": "" } ], @@ -373,11 +396,8 @@ "weaknesses": [] }, { - "unique_content_id": "9bb829ca8d94430d97ea8bb4d67cddb9f41140a7550e5dced08918f35f1dc5f1", - "aliases": [ - "CVE-2012-1180" - ], - "summary": "Memory disclosure with specially crafted backend responses", + "unique_content_id": "31675b37fe392d1e36b77f7198b1d008", + "summary": "An error log data are not sanitized", "affected_packages": [ { "package": { @@ -388,45 +408,15 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.1.17", - "affected_version_range": "vers:nginx/>=0.1.0|<=1.1.16" - }, - { - "package": { - "name": "nginx", - "type": "nginx", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.14", - "affected_version_range": "vers:nginx/>=0.1.0|<=1.1.16" + "fixed_version": null, + "affected_version_range": "vers:nginx/*" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2012/000076.html", - "severities": [], - "reference_id": "", - "reference_type": "" - }, - { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2012-1180", - "severities": [], - "reference_id": "CVE-2012-1180", - "reference_type": "" - }, - { - "url": "https://nginx.org/download/patch.2012.memory.txt", - "severities": [], - "reference_id": "", - "reference_type": "" - }, - { - "url": "https://nginx.org/download/patch.2012.memory.txt.asc", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-4487", "severities": [], - "reference_id": "", + "reference_id": "CVE-2009-4487", "reference_type": "" } ], @@ -434,11 +424,8 @@ "weaknesses": [] }, { - "unique_content_id": "9d373a60d30d98c6a84d134e0f1c1880b4e82b795a9175c51b172c9d988633c4", - "aliases": [ - "CVE-2012-2089" - ], - "summary": "Buffer overflow in the ngx_http_mp4_module", + "unique_content_id": "33d08a513ea5fef861e924f2601f7ac6", + "summary": "Memory disclosure in the ngx_http_mp4_module", "affected_packages": [ { "package": { @@ -449,8 +436,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.1.19", - "affected_version_range": "vers:nginx/>=1.0.7|<=1.0.14|>=1.1.3|<=1.1.18" + "fixed_version": "1.15.6", + "affected_version_range": "vers:nginx/>=1.0.7|<=1.0.15|>=1.1.3|<=1.15.5" }, { "package": { @@ -461,31 +448,37 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.15", - "affected_version_range": "vers:nginx/>=1.0.7|<=1.0.14|>=1.1.3|<=1.1.18" + "fixed_version": "1.14.1", + "affected_version_range": "vers:nginx/>=1.0.7|<=1.0.15|>=1.1.3|<=1.15.5" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2012/000080.html", - "severities": [], + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2018/000221.html", + "severities": [ + { + "value": "medium", + "system": "generic_textual", + "scoring_elements": "" + } + ], "reference_id": "", "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2012-2089", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2018-16845", "severities": [], - "reference_id": "CVE-2012-2089", + "reference_id": "CVE-2018-16845", "reference_type": "" }, { - "url": "https://nginx.org/download/patch.2012.mp4.txt", + "url": "https://nginx.org/download/patch.2018.mp4.txt", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://nginx.org/download/patch.2012.mp4.txt.asc", + "url": "https://nginx.org/download/patch.2018.mp4.txt.asc", "severities": [], "reference_id": "", "reference_type": "" @@ -495,11 +488,8 @@ "weaknesses": [] }, { - "unique_content_id": "6dfd4b51bcdf1ee31bfdd97ee6370422b70533c1db972de69cdc2e281a4bb90a", - "aliases": [ - "CVE-2013-2028" - ], - "summary": "Stack-based buffer overflow with specially crafted request", + "unique_content_id": "3430956de63de2b1188c3d1e50c3b0cd", + "summary": "SPDY memory corruption", "affected_packages": [ { "package": { @@ -510,43 +500,31 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.5.0", - "affected_version_range": "vers:nginx/>=1.3.9|<=1.4.0" - }, - { - "package": { - "name": "nginx", - "type": "nginx", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.4.1", - "affected_version_range": "vers:nginx/>=1.3.9|<=1.4.0" + "fixed_version": "1.5.11", + "affected_version_range": "vers:nginx/1.5.10" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2013/000112.html", + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2014/000132.html", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2013-2028", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-0088", "severities": [], - "reference_id": "CVE-2013-2028", + "reference_id": "CVE-2014-0088", "reference_type": "" }, { - "url": "https://nginx.org/download/patch.2013.chunked.txt", + "url": "https://nginx.org/download/patch.2014.spdy.txt", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://nginx.org/download/patch.2013.chunked.txt.asc", + "url": "https://nginx.org/download/patch.2014.spdy.txt.asc", "severities": [], "reference_id": "", "reference_type": "" @@ -556,11 +534,8 @@ "weaknesses": [] }, { - "unique_content_id": "4590b8b17cfdf0314dffd75372ba416fd8ced35cdeb673aabe9d2ed5b19dab3d", - "aliases": [ - "CVE-2013-2070" - ], - "summary": "Memory disclosure with specially crafted HTTP backend responses", + "unique_content_id": "43c2f41bb851164d3495f3c204a57f20", + "summary": "Memory disclosure in HTTP/3", "affected_packages": [ { "package": { @@ -571,20 +546,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.5.0", - "affected_version_range": "vers:nginx/>=1.1.4|<=1.2.8|>=1.3.9|<=1.4.0" - }, - { - "package": { - "name": "nginx", - "type": "nginx", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.4.1", - "affected_version_range": "vers:nginx/>=1.1.4|<=1.2.8|>=1.3.9|<=1.4.0" + "fixed_version": "1.27.0", + "affected_version_range": "vers:nginx/>=1.25.0|<=1.25.5|1.26.0" }, { "package": { @@ -595,13 +558,13 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.2.9", - "affected_version_range": "vers:nginx/>=1.1.4|<=1.2.8|>=1.3.9|<=1.4.0" + "fixed_version": "1.26.1", + "affected_version_range": "vers:nginx/>=1.25.0|<=1.25.5|1.26.0" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2013/000114.html", + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2024/GMY32CSHFH6VFTN76HJNX7WNEX4RLHF6.html", "severities": [ { "value": "medium", @@ -613,31 +576,59 @@ "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2013-2070", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2024-34161", "severities": [], - "reference_id": "CVE-2013-2070", + "reference_id": "CVE-2024-34161", "reference_type": "" - }, + } + ], + "date_published": null, + "weaknesses": [] + }, + { + "unique_content_id": "686399b9012be40d39b5366ec1695768", + "summary": "The renegotiation vulnerability in SSL protocol", + "affected_packages": [ { - "url": "https://nginx.org/download/patch.2013.chunked.txt", - "severities": [], - "reference_id": "", - "reference_type": "" + "package": { + "name": "nginx", + "type": "nginx", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "0.8.23", + "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.22" }, { - "url": "https://nginx.org/download/patch.2013.chunked.txt.asc", + "package": { + "name": "nginx", + "type": "nginx", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "0.7.64", + "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.22" + } + ], + "references": [ + { + "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-3555", "severities": [], - "reference_id": "", + "reference_id": "CVE-2009-3555", "reference_type": "" }, { - "url": "https://nginx.org/download/patch.2013.proxy.txt", + "url": "https://nginx.org/download/patch.cve-2009-3555.txt", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://nginx.org/download/patch.2013.proxy.txt.asc", + "url": "https://nginx.org/download/patch.cve-2009-3555.txt.asc", "severities": [], "reference_id": "", "reference_type": "" @@ -647,11 +638,8 @@ "weaknesses": [] }, { - "unique_content_id": "b011769b7166e6e3a5b0dabd560be9fec2b4963a0c14c8934b394504041dd801", - "aliases": [ - "CVE-2013-4547" - ], - "summary": "Request line parsing vulnerability", + "unique_content_id": "71c918b8f82b4de8cfa23fc96fa0d7a7", + "summary": "Invalid pointer dereference in resolver", "affected_packages": [ { "package": { @@ -662,8 +650,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.5.7", - "affected_version_range": "vers:nginx/>=0.8.41|<=1.5.6" + "fixed_version": "1.9.10", + "affected_version_range": "vers:nginx/>=0.6.18|<=1.9.9" }, { "package": { @@ -674,13 +662,13 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.4.4", - "affected_version_range": "vers:nginx/>=0.8.41|<=1.5.6" + "fixed_version": "1.8.1", + "affected_version_range": "vers:nginx/>=0.6.18|<=1.9.9" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2013/000125.html", + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2016/000169.html", "severities": [ { "value": "medium", @@ -692,21 +680,9 @@ "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2013-4547", - "severities": [], - "reference_id": "CVE-2013-4547", - "reference_type": "" - }, - { - "url": "https://nginx.org/download/patch.2013.space.txt", - "severities": [], - "reference_id": "", - "reference_type": "" - }, - { - "url": "https://nginx.org/download/patch.2013.space.txt.asc", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-0742", "severities": [], - "reference_id": "", + "reference_id": "CVE-2016-0742", "reference_type": "" } ], @@ -714,11 +690,8 @@ "weaknesses": [] }, { - "unique_content_id": "f9a0149f8d0c6afe588cc7c0a170e45c828219c342b9d7ca12d0e830c68b752a", - "aliases": [ - "CVE-2014-0088" - ], - "summary": "SPDY memory corruption", + "unique_content_id": "74ec3c647d544d6e6935492b7dceb572", + "summary": "Excessive CPU usage in HTTP/2 with priority changes", "affected_packages": [ { "package": { @@ -729,33 +702,39 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.5.11", - "affected_version_range": "vers:nginx/1.5.10" + "fixed_version": "1.17.3", + "affected_version_range": "vers:nginx/>=1.9.5|<=1.17.2" + }, + { + "package": { + "name": "nginx", + "type": "nginx", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.16.1", + "affected_version_range": "vers:nginx/>=1.9.5|<=1.17.2" } ], "references": [ - { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2014/000132.html", - "severities": [], - "reference_id": "", - "reference_type": "" - }, - { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-0088", - "severities": [], - "reference_id": "CVE-2014-0088", - "reference_type": "" - }, - { - "url": "https://nginx.org/download/patch.2014.spdy.txt", - "severities": [], + { + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2019/000249.html", + "severities": [ + { + "value": "low", + "system": "generic_textual", + "scoring_elements": "" + } + ], "reference_id": "", "reference_type": "" }, { - "url": "https://nginx.org/download/patch.2014.spdy.txt.asc", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2019-9513", "severities": [], - "reference_id": "", + "reference_id": "CVE-2019-9513", "reference_type": "" } ], @@ -763,11 +742,8 @@ "weaknesses": [] }, { - "unique_content_id": "04ec1beb69b3712ef90b5975ff13d5d9ece8dc4c31e2fbd033e1e7be98f889ed", - "aliases": [ - "CVE-2014-0133" - ], - "summary": "SPDY heap buffer overflow", + "unique_content_id": "79d9b38e6e89e3f3fc5ca4b2e64d0faa", + "summary": "Stack overflow and use-after-free in HTTP/3", "affected_packages": [ { "package": { @@ -778,8 +754,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.5.12", - "affected_version_range": "vers:nginx/>=1.3.15|<=1.5.11" + "fixed_version": "1.27.0", + "affected_version_range": "vers:nginx/>=1.25.0|<=1.25.5|1.26.0" }, { "package": { @@ -790,33 +766,27 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.4.7", - "affected_version_range": "vers:nginx/>=1.3.15|<=1.5.11" + "fixed_version": "1.26.1", + "affected_version_range": "vers:nginx/>=1.25.0|<=1.25.5|1.26.0" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2014/000135.html", - "severities": [], - "reference_id": "", - "reference_type": "" - }, - { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-0133", - "severities": [], - "reference_id": "CVE-2014-0133", - "reference_type": "" - }, - { - "url": "https://nginx.org/download/patch.2014.spdy2.txt", - "severities": [], + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2024/GMY32CSHFH6VFTN76HJNX7WNEX4RLHF6.html", + "severities": [ + { + "value": "medium", + "system": "generic_textual", + "scoring_elements": "" + } + ], "reference_id": "", "reference_type": "" }, { - "url": "https://nginx.org/download/patch.2014.spdy2.txt.asc", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2024-31079", "severities": [], - "reference_id": "", + "reference_id": "CVE-2024-31079", "reference_type": "" } ], @@ -824,10 +794,7 @@ "weaknesses": [] }, { - "unique_content_id": "e3af8c6275036d10bb0d3b20807288808bcb24ff1fad37f09757d381f90fc862", - "aliases": [ - "CVE-2014-3556" - ], + "unique_content_id": "83d5fba07f12acd2e4947e68d233fbe5", "summary": "STARTTLS command injection", "affected_packages": [ { @@ -891,11 +858,8 @@ "weaknesses": [] }, { - "unique_content_id": "68957cdbe4f38386944b07c2f3138ad59f02df490dab487d8709f8642a395496", - "aliases": [ - "CVE-2014-3616" - ], - "summary": "SSL session reuse vulnerability", + "unique_content_id": "8ca47577347bd9f2027e09e32bc74866", + "summary": "Excessive CPU usage in HTTP/2 with small window updates", "affected_packages": [ { "package": { @@ -906,8 +870,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.7.5", - "affected_version_range": "vers:nginx/>=0.5.6|<=1.7.4" + "fixed_version": "1.17.3", + "affected_version_range": "vers:nginx/>=1.9.5|<=1.17.2" }, { "package": { @@ -918,13 +882,13 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.6.2", - "affected_version_range": "vers:nginx/>=0.5.6|<=1.7.4" + "fixed_version": "1.16.1", + "affected_version_range": "vers:nginx/>=1.9.5|<=1.17.2" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2014/000147.html", + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2019/000249.html", "severities": [ { "value": "medium", @@ -936,9 +900,9 @@ "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3616", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2019-9511", "severities": [], - "reference_id": "CVE-2014-3616", + "reference_id": "CVE-2019-9511", "reference_type": "" } ], @@ -946,11 +910,8 @@ "weaknesses": [] }, { - "unique_content_id": "cc6ff6eaba227bf65c93964fdf2731b75ff1597638283ae950e3941cd4932632", - "aliases": [ - "CVE-2016-0742" - ], - "summary": "Invalid pointer dereference in resolver", + "unique_content_id": "901e1dc04473ff40c6e503baec5e9bf6", + "summary": "Buffer overflow in the ngx_http_mp4_module", "affected_packages": [ { "package": { @@ -961,8 +922,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.9.10", - "affected_version_range": "vers:nginx/>=0.6.18|<=1.9.9" + "fixed_version": "1.1.19", + "affected_version_range": "vers:nginx/>=1.0.7|<=1.0.14|>=1.1.3|<=1.1.18" }, { "package": { @@ -973,27 +934,33 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.8.1", - "affected_version_range": "vers:nginx/>=0.6.18|<=1.9.9" + "fixed_version": "1.0.15", + "affected_version_range": "vers:nginx/>=1.0.7|<=1.0.14|>=1.1.3|<=1.1.18" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2016/000169.html", - "severities": [ - { - "value": "medium", - "system": "generic_textual", - "scoring_elements": "" - } - ], + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2012/000080.html", + "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-0742", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2012-2089", "severities": [], - "reference_id": "CVE-2016-0742", + "reference_id": "CVE-2012-2089", + "reference_type": "" + }, + { + "url": "https://nginx.org/download/patch.2012.mp4.txt", + "severities": [], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://nginx.org/download/patch.2012.mp4.txt.asc", + "severities": [], + "reference_id": "", "reference_type": "" } ], @@ -1001,11 +968,8 @@ "weaknesses": [] }, { - "unique_content_id": "74d2403b1a2d875ba8411a315d217fd704642a39c3e9392bd2b81cd4e4cca8a8", - "aliases": [ - "CVE-2016-0746" - ], - "summary": "Use-after-free during CNAME response processing in resolver", + "unique_content_id": "91c6638b38a1e6e2ff4997eeefef8cf8", + "summary": "Directory traversal vulnerability", "affected_packages": [ { "package": { @@ -1016,8 +980,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.9.10", - "affected_version_range": "vers:nginx/>=0.6.18|<=1.9.9" + "fixed_version": "0.8.17", + "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.16" }, { "package": { @@ -1028,27 +992,15 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.8.1", - "affected_version_range": "vers:nginx/>=0.6.18|<=1.9.9" + "fixed_version": "0.7.63", + "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.16" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2016/000169.html", - "severities": [ - { - "value": "medium", - "system": "generic_textual", - "scoring_elements": "" - } - ], - "reference_id": "", - "reference_type": "" - }, - { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-0746", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-3898", "severities": [], - "reference_id": "CVE-2016-0746", + "reference_id": "CVE-2009-3898", "reference_type": "" } ], @@ -1056,10 +1008,7 @@ "weaknesses": [] }, { - "unique_content_id": "3f9a96e88c2c8cb3ad5852621091d686b420e0fa25921a9f10f330e02e7f47d6", - "aliases": [ - "CVE-2016-0747" - ], + "unique_content_id": "925abc90d30273fe8cb404b7f3c8dfd3", "summary": "Insufficient limits of CNAME resolution in resolver", "affected_packages": [ { @@ -1111,11 +1060,8 @@ "weaknesses": [] }, { - "unique_content_id": "3db919e67e7061f392f575e7ac88884850c686c133ebdd4f58dfddb6196e15bf", - "aliases": [ - "CVE-2016-4450" - ], - "summary": "NULL pointer dereference while writing client request body", + "unique_content_id": "96c2ffdeacca4901942abd83d54f33f5", + "summary": "Vulnerabilities with Windows directory aliases", "affected_packages": [ { "package": { @@ -1124,10 +1070,10 @@ "subpath": "", "version": "", "namespace": "", - "qualifiers": "" + "qualifiers": "os=windows" }, - "fixed_version": "1.11.1", - "affected_version_range": "vers:nginx/>=1.3.9|<=1.11.0" + "fixed_version": "1.3.1", + "affected_version_range": "vers:nginx/>=0.7.52|<=1.3.0" }, { "package": { @@ -1136,15 +1082,15 @@ "subpath": "", "version": "", "namespace": "", - "qualifiers": "" + "qualifiers": "os=windows" }, - "fixed_version": "1.10.1", - "affected_version_range": "vers:nginx/>=1.3.9|<=1.11.0" + "fixed_version": "1.2.1", + "affected_version_range": "vers:nginx/>=0.7.52|<=1.3.0" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2016/000179.html", + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2012/000086.html", "severities": [ { "value": "medium", @@ -1156,33 +1102,9 @@ "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-4450", - "severities": [], - "reference_id": "CVE-2016-4450", - "reference_type": "" - }, - { - "url": "https://nginx.org/download/patch.2016.write.txt", - "severities": [], - "reference_id": "", - "reference_type": "" - }, - { - "url": "https://nginx.org/download/patch.2016.write.txt.asc", - "severities": [], - "reference_id": "", - "reference_type": "" - }, - { - "url": "https://nginx.org/download/patch.2016.write2.txt", - "severities": [], - "reference_id": "", - "reference_type": "" - }, - { - "url": "https://nginx.org/download/patch.2016.write2.txt.asc", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2011-4963", "severities": [], - "reference_id": "", + "reference_id": "CVE-2011-4963", "reference_type": "" } ], @@ -1190,10 +1112,7 @@ "weaknesses": [] }, { - "unique_content_id": "60c648561ee11d1ece306182ff608e5d66aeb748c91c4c91d79aa4f7967f2149", - "aliases": [ - "CVE-2017-7529" - ], + "unique_content_id": "b3192a372fdac00b2cdf462b562cf73b", "summary": "Integer overflow in the range filter", "affected_packages": [ { @@ -1257,11 +1176,8 @@ "weaknesses": [] }, { - "unique_content_id": "e4731a12d4f385fc4d0774714c3e79dc98b8ec9c1c648120e0aa196a0d165066", - "aliases": [ - "CVE-2018-16843" - ], - "summary": "Excessive memory usage in HTTP/2", + "unique_content_id": "b3d7627b206f561242cdd2eae0e3bbeb", + "summary": "Buffer overwrite in HTTP/3", "affected_packages": [ { "package": { @@ -1272,8 +1188,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.15.6", - "affected_version_range": "vers:nginx/>=1.9.5|<=1.15.5" + "fixed_version": "1.27.0", + "affected_version_range": "vers:nginx/>=1.25.0|<=1.25.5|1.26.0" }, { "package": { @@ -1284,16 +1200,16 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.14.1", - "affected_version_range": "vers:nginx/>=1.9.5|<=1.15.5" + "fixed_version": "1.26.1", + "affected_version_range": "vers:nginx/>=1.25.0|<=1.25.5|1.26.0" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2018/000220.html", + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2024/GMY32CSHFH6VFTN76HJNX7WNEX4RLHF6.html", "severities": [ { - "value": "low", + "value": "medium", "system": "generic_textual", "scoring_elements": "" } @@ -1302,9 +1218,9 @@ "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2018-16843", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2024-32760", "severities": [], - "reference_id": "CVE-2018-16843", + "reference_id": "CVE-2024-32760", "reference_type": "" } ], @@ -1312,11 +1228,8 @@ "weaknesses": [] }, { - "unique_content_id": "37a3e3a4d916420d151462c0e761db15f3dfb81ead3e3fa18e84ef4a93151d4c", - "aliases": [ - "CVE-2018-16844" - ], - "summary": "Excessive CPU usage in HTTP/2", + "unique_content_id": "b72c609cd1be7c77f4432e1bc8c365f3", + "summary": "NULL pointer dereference in HTTP/3", "affected_packages": [ { "package": { @@ -1327,8 +1240,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.15.6", - "affected_version_range": "vers:nginx/>=1.9.5|<=1.15.5" + "fixed_version": "1.27.0", + "affected_version_range": "vers:nginx/>=1.25.0|<=1.25.5|1.26.0" }, { "package": { @@ -1339,16 +1252,16 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.14.1", - "affected_version_range": "vers:nginx/>=1.9.5|<=1.15.5" + "fixed_version": "1.26.1", + "affected_version_range": "vers:nginx/>=1.25.0|<=1.25.5|1.26.0" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2018/000220.html", + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2024/GMY32CSHFH6VFTN76HJNX7WNEX4RLHF6.html", "severities": [ { - "value": "low", + "value": "medium", "system": "generic_textual", "scoring_elements": "" } @@ -1357,9 +1270,9 @@ "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2018-16844", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2024-35200", "severities": [], - "reference_id": "CVE-2018-16844", + "reference_id": "CVE-2024-35200", "reference_type": "" } ], @@ -1367,11 +1280,8 @@ "weaknesses": [] }, { - "unique_content_id": "ef80f06b34224fbde70a6a359ccf297c0ec2bfae9148973d3689a1c2acb888ad", - "aliases": [ - "CVE-2018-16845" - ], - "summary": "Memory disclosure in the ngx_http_mp4_module", + "unique_content_id": "c616b60f7fd802e88ca29fce6222654e", + "summary": "Buffer underflow vulnerability", "affected_packages": [ { "package": { @@ -1382,8 +1292,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.15.6", - "affected_version_range": "vers:nginx/>=1.0.7|<=1.0.15|>=1.1.3|<=1.15.5" + "fixed_version": "0.8.15", + "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.14" }, { "package": { @@ -1394,37 +1304,49 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.14.1", - "affected_version_range": "vers:nginx/>=1.0.7|<=1.0.15|>=1.1.3|<=1.15.5" - } - ], - "references": [ + "fixed_version": "0.7.62", + "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.14" + }, { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2018/000221.html", - "severities": [ - { - "value": "medium", - "system": "generic_textual", - "scoring_elements": "" - } - ], - "reference_id": "", - "reference_type": "" + "package": { + "name": "nginx", + "type": "nginx", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "0.6.39", + "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.14" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2018-16845", + "package": { + "name": "nginx", + "type": "nginx", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "0.5.38", + "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.14" + } + ], + "references": [ + { + "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-2629", "severities": [], - "reference_id": "CVE-2018-16845", + "reference_id": "CVE-2009-2629", "reference_type": "" }, { - "url": "https://nginx.org/download/patch.2018.mp4.txt", + "url": "https://nginx.org/download/patch.180065.txt", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://nginx.org/download/patch.2018.mp4.txt.asc", + "url": "https://nginx.org/download/patch.180065.txt.asc", "severities": [], "reference_id": "", "reference_type": "" @@ -1434,11 +1356,8 @@ "weaknesses": [] }, { - "unique_content_id": "7dd1dec4f019ce4e044852324feb9444dbc965f26c98025bc28f50294251c5c0", - "aliases": [ - "CVE-2019-9511" - ], - "summary": "Excessive CPU usage in HTTP/2 with small window updates", + "unique_content_id": "ca72fb146fcd014ee284ef66f7fc1c08", + "summary": "Memory disclosure with specially crafted backend responses", "affected_packages": [ { "package": { @@ -1449,8 +1368,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.17.3", - "affected_version_range": "vers:nginx/>=1.9.5|<=1.17.2" + "fixed_version": "1.1.17", + "affected_version_range": "vers:nginx/>=0.1.0|<=1.1.16" }, { "package": { @@ -1461,27 +1380,33 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.16.1", - "affected_version_range": "vers:nginx/>=1.9.5|<=1.17.2" + "fixed_version": "1.0.14", + "affected_version_range": "vers:nginx/>=0.1.0|<=1.1.16" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2019/000249.html", - "severities": [ - { - "value": "medium", - "system": "generic_textual", - "scoring_elements": "" - } - ], + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2012/000076.html", + "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2019-9511", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2012-1180", "severities": [], - "reference_id": "CVE-2019-9511", + "reference_id": "CVE-2012-1180", + "reference_type": "" + }, + { + "url": "https://nginx.org/download/patch.2012.memory.txt", + "severities": [], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://nginx.org/download/patch.2012.memory.txt.asc", + "severities": [], + "reference_id": "", "reference_type": "" } ], @@ -1489,11 +1414,8 @@ "weaknesses": [] }, { - "unique_content_id": "f52c1d6763864aa721f3c5d6fa201712a04cea0851085e8129014e56ba7b4bbe", - "aliases": [ - "CVE-2019-9513" - ], - "summary": "Excessive CPU usage in HTTP/2 with priority changes", + "unique_content_id": "cb70875e6e02b2d41dd8876b4729bf84", + "summary": "Excessive memory usage in HTTP/2", "affected_packages": [ { "package": { @@ -1504,8 +1426,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.17.3", - "affected_version_range": "vers:nginx/>=1.9.5|<=1.17.2" + "fixed_version": "1.15.6", + "affected_version_range": "vers:nginx/>=1.9.5|<=1.15.5" }, { "package": { @@ -1516,13 +1438,13 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.16.1", - "affected_version_range": "vers:nginx/>=1.9.5|<=1.17.2" + "fixed_version": "1.14.1", + "affected_version_range": "vers:nginx/>=1.9.5|<=1.15.5" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2019/000249.html", + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2018/000220.html", "severities": [ { "value": "low", @@ -1534,9 +1456,9 @@ "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2019-9513", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2018-16843", "severities": [], - "reference_id": "CVE-2019-9513", + "reference_id": "CVE-2018-16843", "reference_type": "" } ], @@ -1544,11 +1466,8 @@ "weaknesses": [] }, { - "unique_content_id": "fcb04608ea5442dbf70575273074915efc16a95be9d8c84d5f3146f6917b3fb1", - "aliases": [ - "CVE-2019-9516" - ], - "summary": "Excessive memory usage in HTTP/2 with zero length headers", + "unique_content_id": "ce87032bced3f187b1c0fbacc52b8c16", + "summary": "SSL session reuse vulnerability", "affected_packages": [ { "package": { @@ -1559,8 +1478,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.17.3", - "affected_version_range": "vers:nginx/>=1.9.5|<=1.17.2" + "fixed_version": "1.7.5", + "affected_version_range": "vers:nginx/>=0.5.6|<=1.7.4" }, { "package": { @@ -1571,16 +1490,16 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.16.1", - "affected_version_range": "vers:nginx/>=1.9.5|<=1.17.2" + "fixed_version": "1.6.2", + "affected_version_range": "vers:nginx/>=0.5.6|<=1.7.4" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2019/000249.html", + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2014/000147.html", "severities": [ { - "value": "low", + "value": "medium", "system": "generic_textual", "scoring_elements": "" } @@ -1589,9 +1508,9 @@ "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2019-9516", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3616", "severities": [], - "reference_id": "CVE-2019-9516", + "reference_id": "CVE-2014-3616", "reference_type": "" } ], @@ -1599,11 +1518,8 @@ "weaknesses": [] }, { - "unique_content_id": "b141e948fdfecc52a52fd4111fff37b57216a7f8fd1421df478db15e620a4571", - "aliases": [ - "CVE-2021-23017" - ], - "summary": "1-byte memory overwrite in resolver", + "unique_content_id": "cf47abf58659080601c4cd87a119a769", + "summary": "Excessive CPU usage in HTTP/2", "affected_packages": [ { "package": { @@ -1614,8 +1530,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.21.0", - "affected_version_range": "vers:nginx/>=0.6.18|<=1.20.0" + "fixed_version": "1.15.6", + "affected_version_range": "vers:nginx/>=1.9.5|<=1.15.5" }, { "package": { @@ -1626,16 +1542,16 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.20.1", - "affected_version_range": "vers:nginx/>=0.6.18|<=1.20.0" + "fixed_version": "1.14.1", + "affected_version_range": "vers:nginx/>=1.9.5|<=1.15.5" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2021/000300.html", + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2018/000220.html", "severities": [ { - "value": "medium", + "value": "low", "system": "generic_textual", "scoring_elements": "" } @@ -1644,21 +1560,9 @@ "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2021-23017", - "severities": [], - "reference_id": "CVE-2021-23017", - "reference_type": "" - }, - { - "url": "https://nginx.org/download/patch.2021.resolver.txt", - "severities": [], - "reference_id": "", - "reference_type": "" - }, - { - "url": "https://nginx.org/download/patch.2021.resolver.txt.asc", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2018-16844", "severities": [], - "reference_id": "", + "reference_id": "CVE-2018-16844", "reference_type": "" } ], @@ -1666,11 +1570,8 @@ "weaknesses": [] }, { - "unique_content_id": "516f2188bdac91f9372ec3e200c4e754179f61fb8bf3a4613d97ebb569e46831", - "aliases": [ - "CVE-2022-41741" - ], - "summary": "Memory corruption in the ngx_http_mp4_module", + "unique_content_id": "d403898b9315a9ec88d9a401af5352fb", + "summary": "Buffer overflow in resolver", "affected_packages": [ { "package": { @@ -1681,8 +1582,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.23.2", - "affected_version_range": "vers:nginx/>=1.0.7|<=1.0.15|>=1.1.3|<=1.23.1" + "fixed_version": "1.1.8", + "affected_version_range": "vers:nginx/>=0.6.18|<=1.1.7" }, { "package": { @@ -1693,39 +1594,15 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.22.1", - "affected_version_range": "vers:nginx/>=1.0.7|<=1.0.15|>=1.1.3|<=1.23.1" + "fixed_version": "1.0.10", + "affected_version_range": "vers:nginx/>=0.6.18|<=1.1.7" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2022/RBRRON6PYBJJM2XIAPQBFBVLR4Q6IHRA.html", - "severities": [ - { - "value": "medium", - "system": "generic_textual", - "scoring_elements": "" - } - ], - "reference_id": "", - "reference_type": "" - }, - { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2022-41741", - "severities": [], - "reference_id": "CVE-2022-41741", - "reference_type": "" - }, - { - "url": "https://nginx.org/download/patch.2022.mp4.txt", - "severities": [], - "reference_id": "", - "reference_type": "" - }, - { - "url": "https://nginx.org/download/patch.2022.mp4.txt.asc", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2011-4315", "severities": [], - "reference_id": "", + "reference_id": "CVE-2011-4315", "reference_type": "" } ], @@ -1733,11 +1610,8 @@ "weaknesses": [] }, { - "unique_content_id": "743193c823a19a8eea1eeb8bb5ea6c3314ca6350b8d6ba0bcf2ac29d2e99ab11", - "aliases": [ - "CVE-2022-41742" - ], - "summary": "Memory disclosure in the ngx_http_mp4_module", + "unique_content_id": "dab2e1aa4777dbcd579905643982aab1", + "summary": "Null pointer dereference vulnerability", "affected_packages": [ { "package": { @@ -1748,8 +1622,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.23.2", - "affected_version_range": "vers:nginx/>=1.0.7|<=1.0.15|>=1.1.3|<=1.23.1" + "fixed_version": "0.8.14", + "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.13" }, { "package": { @@ -1760,52 +1634,9 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.22.1", - "affected_version_range": "vers:nginx/>=1.0.7|<=1.0.15|>=1.1.3|<=1.23.1" - } - ], - "references": [ - { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2022/RBRRON6PYBJJM2XIAPQBFBVLR4Q6IHRA.html", - "severities": [ - { - "value": "medium", - "system": "generic_textual", - "scoring_elements": "" - } - ], - "reference_id": "", - "reference_type": "" - }, - { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2022-41742", - "severities": [], - "reference_id": "CVE-2022-41742", - "reference_type": "" - }, - { - "url": "https://nginx.org/download/patch.2022.mp4.txt", - "severities": [], - "reference_id": "", - "reference_type": "" + "fixed_version": "0.7.62", + "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.13" }, - { - "url": "https://nginx.org/download/patch.2022.mp4.txt.asc", - "severities": [], - "reference_id": "", - "reference_type": "" - } - ], - "date_published": null, - "weaknesses": [] - }, - { - "unique_content_id": "702a79bf8a92e5ce967d5d540f03d225e05906df0cb641c5538e0e8b8045aa89", - "aliases": [ - "CVE-2024-24989" - ], - "summary": "NULL pointer dereference in HTTP/3", - "affected_packages": [ { "package": { "name": "nginx", @@ -1815,34 +1646,9 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.25.4", - "affected_version_range": "vers:nginx/1.25.3" - } - ], - "references": [ - { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2024/NW6MNW34VZ6HDIHH5YFBIJYZJN7FGNAV.html", - "severities": [], - "reference_id": "", - "reference_type": "" + "fixed_version": "0.6.39", + "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.13" }, - { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2024-24989", - "severities": [], - "reference_id": "CVE-2024-24989", - "reference_type": "" - } - ], - "date_published": null, - "weaknesses": [] - }, - { - "unique_content_id": "71ee7b435e15272f8531b568d58f82e33cfb3881f3ee80b5cae1788183f91827", - "aliases": [ - "CVE-2024-24990" - ], - "summary": "Use-after-free in HTTP/3", - "affected_packages": [ { "package": { "name": "nginx", @@ -1852,21 +1658,27 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.25.4", - "affected_version_range": "vers:nginx/>=1.25.0|<=1.25.3" + "fixed_version": "0.5.38", + "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.13" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2024/NW6MNW34VZ6HDIHH5YFBIJYZJN7FGNAV.html", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-3896", + "severities": [], + "reference_id": "CVE-2009-3896", + "reference_type": "" + }, + { + "url": "https://nginx.org/download/patch.null.pointer.txt", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2024-24990", + "url": "https://nginx.org/download/patch.null.pointer.txt.asc", "severities": [], - "reference_id": "CVE-2024-24990", + "reference_id": "", "reference_type": "" } ], @@ -1874,11 +1686,8 @@ "weaknesses": [] }, { - "unique_content_id": "041e081a630681e36df17fc2471cd58a789dce20b54dce62c66900baceb7d771", - "aliases": [ - "CVE-2024-31079" - ], - "summary": "Stack overflow and use-after-free in HTTP/3", + "unique_content_id": "dad2ebc242641f6a276b00769ef57efa", + "summary": "Memory corruption in the ngx_http_mp4_module", "affected_packages": [ { "package": { @@ -1889,8 +1698,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.27.0", - "affected_version_range": "vers:nginx/>=1.25.0|<=1.25.5|1.26.0" + "fixed_version": "1.23.2", + "affected_version_range": "vers:nginx/>=1.0.7|<=1.0.15|>=1.1.3|<=1.23.1" }, { "package": { @@ -1901,13 +1710,13 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.26.1", - "affected_version_range": "vers:nginx/>=1.25.0|<=1.25.5|1.26.0" + "fixed_version": "1.22.1", + "affected_version_range": "vers:nginx/>=1.0.7|<=1.0.15|>=1.1.3|<=1.23.1" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2024/GMY32CSHFH6VFTN76HJNX7WNEX4RLHF6.html", + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2022/RBRRON6PYBJJM2XIAPQBFBVLR4Q6IHRA.html", "severities": [ { "value": "medium", @@ -1919,9 +1728,21 @@ "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2024-31079", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2022-41741", "severities": [], - "reference_id": "CVE-2024-31079", + "reference_id": "CVE-2022-41741", + "reference_type": "" + }, + { + "url": "https://nginx.org/download/patch.2022.mp4.txt", + "severities": [], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://nginx.org/download/patch.2022.mp4.txt.asc", + "severities": [], + "reference_id": "", "reference_type": "" } ], @@ -1929,11 +1750,8 @@ "weaknesses": [] }, { - "unique_content_id": "95dab77a3ea69d6d0bac6b48719f4e1d5435af7f1f1a0c1d62aa343bed5e3f32", - "aliases": [ - "CVE-2024-32760" - ], - "summary": "Buffer overwrite in HTTP/3", + "unique_content_id": "db01da77157a7a773285dc98169416ec", + "summary": "SPDY heap buffer overflow", "affected_packages": [ { "package": { @@ -1944,8 +1762,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.27.0", - "affected_version_range": "vers:nginx/>=1.25.0|<=1.25.5|1.26.0" + "fixed_version": "1.5.12", + "affected_version_range": "vers:nginx/>=1.3.15|<=1.5.11" }, { "package": { @@ -1956,27 +1774,33 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.26.1", - "affected_version_range": "vers:nginx/>=1.25.0|<=1.25.5|1.26.0" + "fixed_version": "1.4.7", + "affected_version_range": "vers:nginx/>=1.3.15|<=1.5.11" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2024/GMY32CSHFH6VFTN76HJNX7WNEX4RLHF6.html", - "severities": [ - { - "value": "medium", - "system": "generic_textual", - "scoring_elements": "" - } - ], + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2014/000135.html", + "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2024-32760", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-0133", "severities": [], - "reference_id": "CVE-2024-32760", + "reference_id": "CVE-2014-0133", + "reference_type": "" + }, + { + "url": "https://nginx.org/download/patch.2014.spdy2.txt", + "severities": [], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://nginx.org/download/patch.2014.spdy2.txt.asc", + "severities": [], + "reference_id": "", "reference_type": "" } ], @@ -1984,11 +1808,41 @@ "weaknesses": [] }, { - "unique_content_id": "b97accb1929bfc3181c61e41c2163f051cac435ea3671b05ebf708ac24c53f15", - "aliases": [ - "CVE-2024-34161" + "unique_content_id": "e06ef4fb12b1b0817736222cc219c5be", + "summary": "Vulnerabilities with Windows 8.3 filename pseudonyms", + "affected_packages": [ + { + "package": { + "name": "nginx", + "type": "nginx", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "os=windows" + }, + "fixed_version": "0.8.33", + "affected_version_range": "vers:nginx/>=0.7.52|<=0.8.32" + }, + { + "package": { + "name": "nginx", + "type": "nginx", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "os=windows" + }, + "fixed_version": "0.7.65", + "affected_version_range": "vers:nginx/>=0.7.52|<=0.8.32" + } ], - "summary": "Memory disclosure in HTTP/3", + "references": [], + "date_published": null, + "weaknesses": [] + }, + { + "unique_content_id": "e17dde538a78c978602298541bcd29f0", + "summary": "Memory disclosure in the ngx_http_mp4_module", "affected_packages": [ { "package": { @@ -1999,8 +1853,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.27.0", - "affected_version_range": "vers:nginx/>=1.25.0|<=1.25.5|1.26.0" + "fixed_version": "1.23.2", + "affected_version_range": "vers:nginx/>=1.0.7|<=1.0.15|>=1.1.3|<=1.23.1" }, { "package": { @@ -2011,13 +1865,13 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.26.1", - "affected_version_range": "vers:nginx/>=1.25.0|<=1.25.5|1.26.0" + "fixed_version": "1.22.1", + "affected_version_range": "vers:nginx/>=1.0.7|<=1.0.15|>=1.1.3|<=1.23.1" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2024/GMY32CSHFH6VFTN76HJNX7WNEX4RLHF6.html", + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2022/RBRRON6PYBJJM2XIAPQBFBVLR4Q6IHRA.html", "severities": [ { "value": "medium", @@ -2029,9 +1883,21 @@ "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2024-34161", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2022-41742", "severities": [], - "reference_id": "CVE-2024-34161", + "reference_id": "CVE-2022-41742", + "reference_type": "" + }, + { + "url": "https://nginx.org/download/patch.2022.mp4.txt", + "severities": [], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://nginx.org/download/patch.2022.mp4.txt.asc", + "severities": [], + "reference_id": "", "reference_type": "" } ], @@ -2039,10 +1905,7 @@ "weaknesses": [] }, { - "unique_content_id": "93ffd507f57f7b01de0bc7cff479daba1c120e28d45b60a14f8fa98bdf597f4a", - "aliases": [ - "CVE-2024-35200" - ], + "unique_content_id": "e4c6a0358264fb7523f6ee40f844854f", "summary": "NULL pointer dereference in HTTP/3", "affected_packages": [ { @@ -2054,39 +1917,21 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.27.0", - "affected_version_range": "vers:nginx/>=1.25.0|<=1.25.5|1.26.0" - }, - { - "package": { - "name": "nginx", - "type": "nginx", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.26.1", - "affected_version_range": "vers:nginx/>=1.25.0|<=1.25.5|1.26.0" + "fixed_version": "1.25.4", + "affected_version_range": "vers:nginx/1.25.3" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2024/GMY32CSHFH6VFTN76HJNX7WNEX4RLHF6.html", - "severities": [ - { - "value": "medium", - "system": "generic_textual", - "scoring_elements": "" - } - ], + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2024/NW6MNW34VZ6HDIHH5YFBIJYZJN7FGNAV.html", + "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2024-35200", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2024-24989", "severities": [], - "reference_id": "CVE-2024-35200", + "reference_id": "CVE-2024-24989", "reference_type": "" } ], @@ -2094,12 +1939,8 @@ "weaknesses": [] }, { - "unique_content_id": "fc72f81267258996f729b98893890074ad6155adcc3352d30a04765977836995", - "aliases": [ - "VU#120541", - "CVE-2009-3555" - ], - "summary": "The renegotiation vulnerability in SSL protocol", + "unique_content_id": "e74396e2dc204fb095c802fe54d4d176", + "summary": "Stack-based buffer overflow with specially crafted request", "affected_packages": [ { "package": { @@ -2110,8 +1951,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.8.23", - "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.22" + "fixed_version": "1.5.0", + "affected_version_range": "vers:nginx/>=1.3.9|<=1.4.0" }, { "package": { @@ -2122,25 +1963,31 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.7.64", - "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.22" + "fixed_version": "1.4.1", + "affected_version_range": "vers:nginx/>=1.3.9|<=1.4.0" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-3555", + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2013/000112.html", "severities": [], - "reference_id": "CVE-2009-3555", + "reference_id": "", "reference_type": "" }, { - "url": "https://nginx.org/download/patch.cve-2009-3555.txt", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2013-2028", + "severities": [], + "reference_id": "CVE-2013-2028", + "reference_type": "" + }, + { + "url": "https://nginx.org/download/patch.2013.chunked.txt", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://nginx.org/download/patch.cve-2009-3555.txt.asc", + "url": "https://nginx.org/download/patch.2013.chunked.txt.asc", "severities": [], "reference_id": "", "reference_type": "" @@ -2150,12 +1997,8 @@ "weaknesses": [] }, { - "unique_content_id": "de7a819f87c93c708251b734406d2b9916fce494ab3987be40ca37426b0c2044", - "aliases": [ - "VU#180065", - "CVE-2009-2629" - ], - "summary": "Buffer underflow vulnerability", + "unique_content_id": "eb41c9a738129f7f76c5ff813d190621", + "summary": "Vulnerabilities with invalid UTF-8 sequence on Windows", "affected_packages": [ { "package": { @@ -2164,10 +2007,10 @@ "subpath": "", "version": "", "namespace": "", - "qualifiers": "" + "qualifiers": "os=windows" }, - "fixed_version": "0.8.15", - "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.14" + "fixed_version": "0.8.41", + "affected_version_range": "vers:nginx/>=0.7.52|<=0.8.40" }, { "package": { @@ -2176,11 +2019,27 @@ "subpath": "", "version": "", "namespace": "", - "qualifiers": "" + "qualifiers": "os=windows" }, - "fixed_version": "0.7.62", - "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.14" - }, + "fixed_version": "0.7.67", + "affected_version_range": "vers:nginx/>=0.7.52|<=0.8.40" + } + ], + "references": [ + { + "url": "https://nvd.nist.gov/vuln/detail/CVE-2010-2266", + "severities": [], + "reference_id": "CVE-2010-2266", + "reference_type": "" + } + ], + "date_published": null, + "weaknesses": [] + }, + { + "unique_content_id": "ef00adb6af6c2a00e81c8ec8de71eed6", + "summary": "Vulnerabilities with Windows file default stream", + "affected_packages": [ { "package": { "name": "nginx", @@ -2188,10 +2047,10 @@ "subpath": "", "version": "", "namespace": "", - "qualifiers": "" + "qualifiers": "os=windows" }, - "fixed_version": "0.6.39", - "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.14" + "fixed_version": "0.8.40", + "affected_version_range": "vers:nginx/>=0.7.52|<=0.8.39" }, { "package": { @@ -2200,29 +2059,51 @@ "subpath": "", "version": "", "namespace": "", - "qualifiers": "" + "qualifiers": "os=windows" }, - "fixed_version": "0.5.38", - "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.14" + "fixed_version": "0.7.66", + "affected_version_range": "vers:nginx/>=0.7.52|<=0.8.39" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-2629", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2010-2263", "severities": [], - "reference_id": "CVE-2009-2629", + "reference_id": "CVE-2010-2263", "reference_type": "" - }, + } + ], + "date_published": null, + "weaknesses": [] + }, + { + "unique_content_id": "f87492771be35866bf4dce017ea54dc8", + "summary": "Use-after-free in HTTP/3", + "affected_packages": [ { - "url": "https://nginx.org/download/patch.180065.txt", + "package": { + "name": "nginx", + "type": "nginx", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.25.4", + "affected_version_range": "vers:nginx/>=1.25.0|<=1.25.3" + } + ], + "references": [ + { + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2024/NW6MNW34VZ6HDIHH5YFBIJYZJN7FGNAV.html", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://nginx.org/download/patch.180065.txt.asc", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2024-24990", "severities": [], - "reference_id": "", + "reference_id": "CVE-2024-24990", "reference_type": "" } ], diff --git a/vulnerabilities/tests/test_data/openssl/security_advisories-importer-expected.json b/vulnerabilities/tests/test_data/openssl/security_advisories-importer-expected.json index d844c0c9f..fa6223064 100644 --- a/vulnerabilities/tests/test_data/openssl/security_advisories-importer-expected.json +++ b/vulnerabilities/tests/test_data/openssl/security_advisories-importer-expected.json @@ -1,59 +1,7 @@ [ { - "unique_content_id": "4ee23c143c0a01cd7035e1646adaf2222725ad2c96447ffc524eb79d1ac532dd", - "aliases": [ - "VC-OPENSSL-20141015" - ], - "summary": "OpenSSL has added support for TLS_FALLBACK_SCSV to allow applications to block the ability for a MITM attacker to force a protocol downgrade. Some client applications (such as browsers) will reconnect using a downgraded protocol to work around interoperability bugs in older servers. This could be exploited by an active man-in-the-middle to downgrade connections to SSL 3.0 even if both sides of the connection support higher protocols. SSL 3.0 contains a number of weaknesses including POODLE (CVE-2014-3566). See also https://tools.ietf.org/html/draft-ietf-tls-downgrade-scsv-00 and https://www.openssl.org/~bodo/ssl-poodle.pdf", - "affected_packages": [ - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "0.9.8zc", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za|0.9.8zb" - }, - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.0o", - "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m|1.0.0n" - }, - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.1j", - "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i" - } - ], - "references": [], - "date_published": "2014-10-15T00:00:00+00:00", - "weaknesses": [] - }, - { - "unique_content_id": "db3632c3ff2c87ef3524c93e91dc8cbeca0778583bcb08c9a8807cbb282d31cb", - "aliases": [ - "CVE-2002-0655", - "VC-OPENSSL-20020730-CVE-2002-0655" - ], - "summary": "Inproper handling of ASCII representations of integers on 64 bit platforms allowed remote attackers to cause a denial of service or possibly execute arbitrary code.", + "unique_content_id": "167751346aa8fefc0a6e3b73ccb1f1a0", + "summary": "A buffer overflow when Kerberos is enabled allowed attackers to execute arbitrary code by sending a long master key. Note that this flaw did not affect any released version of 0.9.6 or 0.9.7", "affected_packages": [ { "package": { @@ -64,15 +12,15 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.9.6e", - "affected_version_range": "vers:openssl/0.9.6|0.9.6a|0.9.6b|0.9.6c|0.9.6d" + "fixed_version": "0.9.7", + "affected_version_range": "vers:openssl/0.9.7-beta3" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2002-0655", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2002-0657", "severities": [], - "reference_id": "CVE-2002-0655", + "reference_id": "CVE-2002-0657", "reference_type": "" }, { @@ -86,12 +34,8 @@ "weaknesses": [] }, { - "unique_content_id": "f4f8760e71f028224b6bdbe5b477b90217df8ca6905036317584b92781c2a119", - "aliases": [ - "CVE-2002-0656", - "VC-OPENSSL-20020730-CVE-2002-0656" - ], - "summary": "A buffer overflow allowed remote attackers to execute arbitrary code by sending a large client master key in SSL2 or a large session ID in SSL3.", + "unique_content_id": "35448b5f7b3fba9f72b91c02f114fb54", + "summary": "Inproper handling of ASCII representations of integers on 64 bit platforms allowed remote attackers to cause a denial of service or possibly execute arbitrary code.", "affected_packages": [ { "package": { @@ -108,9 +52,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2002-0656", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2002-0655", "severities": [], - "reference_id": "CVE-2002-0656", + "reference_id": "CVE-2002-0655", "reference_type": "" }, { @@ -124,12 +68,8 @@ "weaknesses": [] }, { - "unique_content_id": "9bdebb1f707c4c32b8834d1c6d0b55faa70072728c35bc0215df164af8448367", - "aliases": [ - "CVE-2002-0657", - "VC-OPENSSL-20020730-CVE-2002-0657" - ], - "summary": "A buffer overflow when Kerberos is enabled allowed attackers to execute arbitrary code by sending a long master key. Note that this flaw did not affect any released version of 0.9.6 or 0.9.7", + "unique_content_id": "829a6d1f23353afa49ace62ba465a58f", + "summary": "A buffer overflow allowed remote attackers to execute arbitrary code by sending a large client master key in SSL2 or a large session ID in SSL3.", "affected_packages": [ { "package": { @@ -140,15 +80,15 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.9.7", - "affected_version_range": "vers:openssl/0.9.7-beta3" + "fixed_version": "0.9.6e", + "affected_version_range": "vers:openssl/0.9.6|0.9.6a|0.9.6b|0.9.6c|0.9.6d" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2002-0657", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2002-0656", "severities": [], - "reference_id": "CVE-2002-0657", + "reference_id": "CVE-2002-0656", "reference_type": "" }, { @@ -162,11 +102,7 @@ "weaknesses": [] }, { - "unique_content_id": "01616cd468b12076531c0a0453c8766381afac45b3bae651b2535336c25195c6", - "aliases": [ - "CVE-2002-0659", - "VC-OPENSSL-20020730-CVE-2002-0659" - ], + "unique_content_id": "cd2aa8fefe14c523b0f404ea639582db", "summary": "A flaw in the ASN1 library allowed remote attackers to cause a denial of service by sending invalid encodings.", "affected_packages": [ { @@ -200,11 +136,7 @@ "weaknesses": [] }, { - "unique_content_id": "49964979bdbf578d45f122df679ba527fd8fbf64cc2d077728fb1c7f506f4c7f", - "aliases": [ - "CVE-2002-1568", - "VC-OPENSSL-20020808-CVE-2002-1568" - ], + "unique_content_id": "8544420c83cf74faff35e8829adaa340", "summary": "The use of assertions when detecting buffer overflow attacks allowed remote attackers to cause a denial of service (crash) by sending certain messages to cause OpenSSL to abort from a failed assertion, as demonstrated using SSLv2 CLIENT_MASTER_KEY messages, which were not properly handled in s2_srvr.c.", "affected_packages": [ { @@ -238,11 +170,7 @@ "weaknesses": [] }, { - "unique_content_id": "9a471da876825cebb089f856300f156b2987e0ffe50686b1646bb2041e7e4c8b", - "aliases": [ - "CVE-2003-0078", - "VC-OPENSSL-20030219-CVE-2003-0078" - ], + "unique_content_id": "61d2edb3343321c505bed6e2c93025b1", "summary": "sl3_get_record in s3_pkt.c did not perform a MAC computation if an incorrect block cipher padding was used, causing an information leak (timing discrepancy) that may make it easier to launch cryptographic attacks that rely on distinguishing between padding and MAC verification errors, possibly leading to extraction of the original plaintext, aka the \"Vaudenay timing attack.\"", "affected_packages": [ { @@ -288,12 +216,8 @@ "weaknesses": [] }, { - "unique_content_id": "ea79326dc573c9da310a5d90e901d9c1c6844afbc7ba492ee6edcf3fc6ed9208", - "aliases": [ - "CVE-2003-0131", - "VC-OPENSSL-20030319-CVE-2003-0131" - ], - "summary": "The SSL and TLS components allowed remote attackers to perform an unauthorized RSA private key operation via a modified Bleichenbacher attack that uses a large number of SSL or TLS connections using PKCS #1 v1.5 padding that caused OpenSSL to leak information regarding the relationship between ciphertext and the associated plaintext, aka the \"Klima-Pokorny-Rosa attack\"", + "unique_content_id": "4fbc2d1aad1223b8ab887ce8d4d07175", + "summary": "RSA blinding was not enabled by default, which could allow local and remote attackers to obtain a server's private key by determining factors using timing differences on (1) the number of extra reductions during Montgomery reduction, and (2) the use of different integer multiplication algorithms (\"Karatsuba\" and normal).", "affected_packages": [ { "package": { @@ -322,28 +246,24 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2003-0131", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2003-0147", "severities": [], - "reference_id": "CVE-2003-0131", + "reference_id": "CVE-2003-0147", "reference_type": "" }, { - "url": "https://www.openssl.org/news/secadv/20030319.txt", + "url": "https://www.openssl.org/news/secadv/20030317.txt", "severities": [], "reference_id": "", "reference_type": "" } ], - "date_published": "2003-03-19T00:00:00+00:00", + "date_published": "2003-03-14T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "29882534d53b1efc839bf130322ad85c220fa6326b24268aeed6af66f2855d02", - "aliases": [ - "CVE-2003-0147", - "VC-OPENSSL-20030314-CVE-2003-0147" - ], - "summary": "RSA blinding was not enabled by default, which could allow local and remote attackers to obtain a server's private key by determining factors using timing differences on (1) the number of extra reductions during Montgomery reduction, and (2) the use of different integer multiplication algorithms (\"Karatsuba\" and normal).", + "unique_content_id": "a0eeb293e46b8d3bbd5029ccaa8585bd", + "summary": "The SSL and TLS components allowed remote attackers to perform an unauthorized RSA private key operation via a modified Bleichenbacher attack that uses a large number of SSL or TLS connections using PKCS #1 v1.5 padding that caused OpenSSL to leak information regarding the relationship between ciphertext and the associated plaintext, aka the \"Klima-Pokorny-Rosa attack\"", "affected_packages": [ { "package": { @@ -372,41 +292,25 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2003-0147", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2003-0131", "severities": [], - "reference_id": "CVE-2003-0147", + "reference_id": "CVE-2003-0131", "reference_type": "" }, { - "url": "https://www.openssl.org/news/secadv/20030317.txt", + "url": "https://www.openssl.org/news/secadv/20030319.txt", "severities": [], "reference_id": "", "reference_type": "" } ], - "date_published": "2003-03-14T00:00:00+00:00", + "date_published": "2003-03-19T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "2ba1e73cd00bc41e969ea310ec78534f4c6d5124ca0b871dc4ce322a4b34e232", - "aliases": [ - "CVE-2003-0543", - "VC-OPENSSL-20030930-CVE-2003-0543" - ], - "summary": "An integer overflow could allow remote attackers to cause a denial of service (crash) via an SSL client certificate with certain ASN.1 tag values.", + "unique_content_id": "23009992dbac485c71608f4cf9811ef2", + "summary": "Certain ASN.1 encodings that were rejected as invalid by the parser could trigger a bug in the deallocation of the corresponding data structure, corrupting the stack, leading to a crash.", "affected_packages": [ - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "0.9.6k", - "affected_version_range": "vers:openssl/0.9.6|0.9.6a|0.9.6b|0.9.6c|0.9.6d|0.9.6e|0.9.6f|0.9.6g|0.9.6h|0.9.6i|0.9.6j" - }, { "package": { "name": "openssl", @@ -422,9 +326,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2003-0543", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2003-0545", "severities": [], - "reference_id": "CVE-2003-0543", + "reference_id": "CVE-2003-0545", "reference_type": "" }, { @@ -438,12 +342,8 @@ "weaknesses": [] }, { - "unique_content_id": "038ee7715473ae9e8184e755bbc864397d9e9c4bdc7b878782197d5f445085ac", - "aliases": [ - "CVE-2003-0544", - "VC-OPENSSL-20030930-CVE-2003-0544" - ], - "summary": "Incorrect tracking of the number of characters in certain ASN.1 inputs could allow remote attackers to cause a denial of service (crash) by sending an SSL client certificate that causes OpenSSL to read past the end of a buffer when the long form is used.", + "unique_content_id": "525144b2cfc83c2afb4746cbb043f665", + "summary": "An integer overflow could allow remote attackers to cause a denial of service (crash) via an SSL client certificate with certain ASN.1 tag values.", "affected_packages": [ { "package": { @@ -454,8 +354,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.9.7c", - "affected_version_range": "vers:openssl/0.9.7|0.9.7a|0.9.7b" + "fixed_version": "0.9.6k", + "affected_version_range": "vers:openssl/0.9.6|0.9.6a|0.9.6b|0.9.6c|0.9.6d|0.9.6e|0.9.6f|0.9.6g|0.9.6h|0.9.6i|0.9.6j" }, { "package": { @@ -466,15 +366,15 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.9.6k", - "affected_version_range": "vers:openssl/0.9.6|0.9.6a|0.9.6b|0.9.6c|0.9.6d|0.9.6e|0.9.6f|0.9.6g|0.9.6h|0.9.6i|0.9.6j" + "fixed_version": "0.9.7c", + "affected_version_range": "vers:openssl/0.9.7|0.9.7a|0.9.7b" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2003-0544", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2003-0543", "severities": [], - "reference_id": "CVE-2003-0544", + "reference_id": "CVE-2003-0543", "reference_type": "" }, { @@ -488,12 +388,8 @@ "weaknesses": [] }, { - "unique_content_id": "e510e167dfcfce7357fe0616e7ae6ff525c3c2325ea6e0011c06d1300f1d7c10", - "aliases": [ - "CVE-2003-0545", - "VC-OPENSSL-20030930-CVE-2003-0545" - ], - "summary": "Certain ASN.1 encodings that were rejected as invalid by the parser could trigger a bug in the deallocation of the corresponding data structure, corrupting the stack, leading to a crash.", + "unique_content_id": "b20ae6e077855796c5fa2ea663a88269", + "summary": "Incorrect tracking of the number of characters in certain ASN.1 inputs could allow remote attackers to cause a denial of service (crash) by sending an SSL client certificate that causes OpenSSL to read past the end of a buffer when the long form is used.", "affected_packages": [ { "package": { @@ -506,13 +402,25 @@ }, "fixed_version": "0.9.7c", "affected_version_range": "vers:openssl/0.9.7|0.9.7a|0.9.7b" + }, + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "0.9.6k", + "affected_version_range": "vers:openssl/0.9.6|0.9.6a|0.9.6b|0.9.6c|0.9.6d|0.9.6e|0.9.6f|0.9.6g|0.9.6h|0.9.6i|0.9.6j" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2003-0545", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2003-0544", "severities": [], - "reference_id": "CVE-2003-0545", + "reference_id": "CVE-2003-0544", "reference_type": "" }, { @@ -526,11 +434,7 @@ "weaknesses": [] }, { - "unique_content_id": "fb504a9108cb16e440dc0db440f4bae47f2683838b518db42a371fc0453d6a88", - "aliases": [ - "CVE-2003-0851", - "VC-OPENSSL-20031104-CVE-2003-0851" - ], + "unique_content_id": "47507506fbd9633ba7a6429dc0db28b5", "summary": "A flaw in OpenSSL 0.9.6k (only) would cause certain ASN.1 sequences to trigger a large recursion. On platforms such as Windows this large recursion cannot be handled correctly and so the bug causes OpenSSL to crash. A remote attacker could exploit this flaw if they can send arbitrary ASN.1 sequences which would cause OpenSSL to crash. This could be performed for example by sending a client certificate to a SSL/TLS enabled server which is configured to accept them.", "affected_packages": [ { @@ -564,11 +468,7 @@ "weaknesses": [] }, { - "unique_content_id": "a467aec230d90bf340b7325fe9207425c4d35680a470268682407639819c56f6", - "aliases": [ - "CVE-2004-0079", - "VC-OPENSSL-20040317-CVE-2004-0079" - ], + "unique_content_id": "2c802d89f18645aa477b635d3a5242ad", "summary": "The Codenomicon TLS Test Tool uncovered a null-pointer assignment in the do_change_cipher_spec() function. A remote attacker could perform a carefully crafted SSL/TLS handshake against a server that used the OpenSSL library in such a way as to cause a crash.", "affected_packages": [ { @@ -614,11 +514,7 @@ "weaknesses": [] }, { - "unique_content_id": "7a9fed2602761c2ae8073bce2e5e1dfa60cb84b83c4fe6e05906bbbaf5e46c7a", - "aliases": [ - "CVE-2004-0081", - "VC-OPENSSL-20040317-CVE-2004-0081" - ], + "unique_content_id": "6f23a0db775050dc33df47c7cc883b11", "summary": "The Codenomicon TLS Test Tool found that some unknown message types were handled incorrectly, allowing a remote attacker to cause a denial of service (infinite loop).", "affected_packages": [ { @@ -652,11 +548,7 @@ "weaknesses": [] }, { - "unique_content_id": "9d9976f31462bb2e67fbf400706c1d2b0299c697e42bf4d3b8dd8e57a37d8e6a", - "aliases": [ - "CVE-2004-0112", - "VC-OPENSSL-20040317-CVE-2004-0112" - ], + "unique_content_id": "cb0e8758b89ae43b1ed34bfb3c0b3b56", "summary": "A flaw in SSL/TLS handshaking code when using Kerberos ciphersuites. A remote attacker could perform a carefully crafted SSL/TLS handshake against a server configured to use Kerberos ciphersuites in such a way as to cause OpenSSL to crash. Most applications have no ability to use Kerberos ciphersuites and will therefore be unaffected.", "affected_packages": [ { @@ -690,11 +582,7 @@ "weaknesses": [] }, { - "unique_content_id": "5b55cf4a1e9c3add130bf345864834163a6924f0165a25458ddf710b31d56b70", - "aliases": [ - "CVE-2004-0975", - "VC-OPENSSL-20040930-CVE-2004-0975" - ], + "unique_content_id": "de61ebaf88fec68edc50b1bbc3c82f15", "summary": "The der_chop script created temporary files insecurely which could allow local users to overwrite files via a symlink attack on temporary files. Note that it is quite unlikely that a user would be using the redundant der_chop script, and this script was removed from the OpenSSL distribution.", "affected_packages": [ { @@ -740,11 +628,7 @@ "weaknesses": [] }, { - "unique_content_id": "cba43db55e749a2cd6a8e2b4a8859b0cfb99c57ebb384b08ff64687b69982e0c", - "aliases": [ - "CVE-2005-2969", - "VC-OPENSSL-20051011-CVE-2005-2969" - ], + "unique_content_id": "9cc871a9e62ad5ca419397816ae02f3f", "summary": "A deprecated option, SSL_OP_MISE_SSLV2_RSA_PADDING, could allow an attacker acting as a \"man in the middle\" to force a connection to downgrade to SSL 2.0 even if both parties support better protocols.", "affected_packages": [ { @@ -802,12 +686,8 @@ "weaknesses": [] }, { - "unique_content_id": "9257f845c847e35c7d1aa8587eac8fecc3e42ea36da4a73525adfc9c552d92d6", - "aliases": [ - "CVE-2006-2937", - "VC-OPENSSL-20060928-CVE-2006-2937" - ], - "summary": "During the parsing of certain invalid ASN.1 structures an error condition is mishandled. This can result in an infinite loop which consumes system memory", + "unique_content_id": "509415f8d684ef69f274426ff454ee18", + "summary": "Daniel Bleichenbacher discovered an attack on PKCS #1 v1.5 signatures where under certain circumstances it may be possible for an attacker to forge a PKCS #1 v1.5 signature that would be incorrectly verified by OpenSSL.", "affected_packages": [ { "package": { @@ -818,8 +698,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.9.7l", - "affected_version_range": "vers:openssl/0.9.7|0.9.7a|0.9.7b|0.9.7c|0.9.7d|0.9.7e|0.9.7f|0.9.7g|0.9.7h|0.9.7i|0.9.7j|0.9.7k" + "fixed_version": "0.9.7k", + "affected_version_range": "vers:openssl/0.9.7|0.9.7a|0.9.7b|0.9.7c|0.9.7d|0.9.7e|0.9.7f|0.9.7g|0.9.7h|0.9.7i|0.9.7j" }, { "package": { @@ -830,33 +710,41 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.9.8d", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c" + "fixed_version": "0.9.8c", + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b" + }, + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": null, + "affected_version_range": "vers:openssl/0.9.6|0.9.6a|0.9.6b|0.9.6c|0.9.6d|0.9.6e|0.9.6f|0.9.6g|0.9.6h|0.9.6i|0.9.6j|0.9.6k|0.9.6l|0.9.6m" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2006-2937", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2006-4339", "severities": [], - "reference_id": "CVE-2006-2937", + "reference_id": "CVE-2006-4339", "reference_type": "" }, { - "url": "https://www.openssl.org/news/secadv/20060928.txt", + "url": "https://www.openssl.org/news/secadv/20060905.txt", "severities": [], "reference_id": "", "reference_type": "" } ], - "date_published": "2006-09-28T00:00:00+00:00", + "date_published": "2006-09-05T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "1012d0129bc2bf8d506f3a5abe83570b93979b82add79f0167a08320e397d181", - "aliases": [ - "CVE-2006-2940", - "VC-OPENSSL-20060928-CVE-2006-2940" - ], + "unique_content_id": "1ed97c8f77a2948144952bbf2df0d15f", "summary": "Certain types of public key can take disproportionate amounts of time to process. This could be used by an attacker in a denial of service attack.", "affected_packages": [ { @@ -914,11 +802,7 @@ "weaknesses": [] }, { - "unique_content_id": "8280b343c51657b22636bc717abb349ca3c44f0c053bc1e4a5f0b36440229d47", - "aliases": [ - "CVE-2006-3738", - "VC-OPENSSL-20060928-CVE-2006-3738" - ], + "unique_content_id": "275102d3f86e163b329b3bd7e4032658", "summary": "A buffer overflow was discovered in the SSL_get_shared_ciphers() utility function. An attacker could send a list of ciphers to an application that uses this function and overrun a buffer.", "affected_packages": [ { @@ -976,12 +860,8 @@ "weaknesses": [] }, { - "unique_content_id": "d40f47b16b42d15836f11963090ae9bd8ee81396815649437c05a3763f5c0028", - "aliases": [ - "CVE-2006-4339", - "VC-OPENSSL-20060905-CVE-2006-4339" - ], - "summary": "Daniel Bleichenbacher discovered an attack on PKCS #1 v1.5 signatures where under certain circumstances it may be possible for an attacker to forge a PKCS #1 v1.5 signature that would be incorrectly verified by OpenSSL.", + "unique_content_id": "65804b3824faa47750e76089a0851d29", + "summary": "A flaw in the SSLv2 client code was discovered. When a client application used OpenSSL to create an SSLv2 connection to a malicious server, that server could cause the client to crash.", "affected_packages": [ { "package": { @@ -992,8 +872,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.9.7k", - "affected_version_range": "vers:openssl/0.9.7|0.9.7a|0.9.7b|0.9.7c|0.9.7d|0.9.7e|0.9.7f|0.9.7g|0.9.7h|0.9.7i|0.9.7j" + "fixed_version": "0.9.7l", + "affected_version_range": "vers:openssl/0.9.7|0.9.7a|0.9.7b|0.9.7c|0.9.7d|0.9.7e|0.9.7f|0.9.7g|0.9.7h|0.9.7i|0.9.7j|0.9.7k" }, { "package": { @@ -1004,8 +884,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.9.8c", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b" + "fixed_version": "0.9.8d", + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c" }, { "package": { @@ -1022,28 +902,24 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2006-4339", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2006-4343", "severities": [], - "reference_id": "CVE-2006-4339", + "reference_id": "CVE-2006-4343", "reference_type": "" }, { - "url": "https://www.openssl.org/news/secadv/20060905.txt", + "url": "https://www.openssl.org/news/secadv/20060928.txt", "severities": [], "reference_id": "", "reference_type": "" } ], - "date_published": "2006-09-05T00:00:00+00:00", + "date_published": "2006-09-28T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "6ce834bf29c1216739243c40e4e7e13563b6e7ee37195b59489542cdae28c644", - "aliases": [ - "CVE-2006-4343", - "VC-OPENSSL-20060928-CVE-2006-4343" - ], - "summary": "A flaw in the SSLv2 client code was discovered. When a client application used OpenSSL to create an SSLv2 connection to a malicious server, that server could cause the client to crash.", + "unique_content_id": "95ecb527c6494eb3dc0e22337c257b02", + "summary": "During the parsing of certain invalid ASN.1 structures an error condition is mishandled. This can result in an infinite loop which consumes system memory", "affected_packages": [ { "package": { @@ -1068,25 +944,13 @@ }, "fixed_version": "0.9.8d", "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c" - }, - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": null, - "affected_version_range": "vers:openssl/0.9.6|0.9.6a|0.9.6b|0.9.6c|0.9.6d|0.9.6e|0.9.6f|0.9.6g|0.9.6h|0.9.6i|0.9.6j|0.9.6k|0.9.6l|0.9.6m" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2006-4343", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2006-2937", "severities": [], - "reference_id": "CVE-2006-4343", + "reference_id": "CVE-2006-2937", "reference_type": "" }, { @@ -1100,12 +964,8 @@ "weaknesses": [] }, { - "unique_content_id": "af7a8ad59af270f7ef97f3219807aacf3e5ef68c009a1a127593c7ed0371393d", - "aliases": [ - "CVE-2007-4995", - "VC-OPENSSL-20071012-CVE-2007-4995" - ], - "summary": "A flaw in DTLS support. An attacker could create a malicious client or server that could trigger a heap overflow. This is possibly exploitable to run arbitrary code, but it has not been verified.", + "unique_content_id": "987af90a510832e0adfe428cf642f8b3", + "summary": "A flaw was found in the SSL_get_shared_ciphers() utility function. An attacker could send a list of ciphers to an application that used this function and overrun a buffer with a single byte. Few applications make use of this vulnerable function and generally it is used only when applications are compiled for debugging.", "affected_packages": [ { "package": { @@ -1122,9 +982,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2007-4995", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2007-5135", "severities": [], - "reference_id": "CVE-2007-4995", + "reference_id": "CVE-2007-5135", "reference_type": "" }, { @@ -1138,12 +998,8 @@ "weaknesses": [] }, { - "unique_content_id": "6e1fe5317b3377fba03774a136517301651a683c7bd40e56718a77b14718f8ba", - "aliases": [ - "CVE-2007-5135", - "VC-OPENSSL-20071012-CVE-2007-5135" - ], - "summary": "A flaw was found in the SSL_get_shared_ciphers() utility function. An attacker could send a list of ciphers to an application that used this function and overrun a buffer with a single byte. Few applications make use of this vulnerable function and generally it is used only when applications are compiled for debugging.", + "unique_content_id": "df251bb60bdec54891d4de225180f2ee", + "summary": "A flaw in DTLS support. An attacker could create a malicious client or server that could trigger a heap overflow. This is possibly exploitable to run arbitrary code, but it has not been verified.", "affected_packages": [ { "package": { @@ -1160,9 +1016,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2007-5135", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2007-4995", "severities": [], - "reference_id": "CVE-2007-5135", + "reference_id": "CVE-2007-4995", "reference_type": "" }, { @@ -1176,11 +1032,7 @@ "weaknesses": [] }, { - "unique_content_id": "31901d67d2f1a8a6e0558d82580f7223d7f5d8986fa025f202bbc2f8bfbcf282", - "aliases": [ - "CVE-2008-0891", - "VC-OPENSSL-20080528-CVE-2008-0891" - ], + "unique_content_id": "2583bf8ccba8c985bab919b69ccc00e5", "summary": "Testing using the Codenomicon TLS test suite discovered a flaw in the handling of server name extension data in OpenSSL 0.9.8f and OpenSSL 0.9.8g. If OpenSSL has been compiled using the non-default TLS server name extensions, a remote attacker could send a carefully crafted packet to a server application using OpenSSL and cause it to crash.", "affected_packages": [ { @@ -1214,11 +1066,7 @@ "weaknesses": [] }, { - "unique_content_id": "0a025dba94a703c96c56234016505ec5bb2424a29bb0881b837d2a7e0fc0c9a4", - "aliases": [ - "CVE-2008-1672", - "VC-OPENSSL-20080528-CVE-2008-1672" - ], + "unique_content_id": "707840f8f10854ba4abf1409b159f35d", "summary": "Testing using the Codenomicon TLS test suite discovered a flaw if the 'Server Key exchange message' is omitted from a TLS handshake in OpenSSL 0.9.8f and OpenSSL 0.9.8g. If a client connects to a malicious server with particular cipher suites, the server could cause the client to crash.", "affected_packages": [ { @@ -1252,11 +1100,7 @@ "weaknesses": [] }, { - "unique_content_id": "7537c1d90dd6c6ff6c065a4a9b2ebd8f7060d69f1c2f4e8d1029c6cd17dbac0c", - "aliases": [ - "CVE-2008-5077", - "VC-OPENSSL-20090107-CVE-2008-5077" - ], + "unique_content_id": "a52c691f587165864b42caa4be445576", "summary": "The Google Security Team discovered several functions inside OpenSSL incorrectly checked the result after calling the EVP_VerifyFinal function, allowing a malformed signature to be treated as a good signature rather than as an error. This issue affected the signature checks on DSA and ECDSA keys used with SSL/TLS. One way to exploit this flaw would be for a remote attacker who is in control of a malicious server or who can use a 'man in the middle' attack to present a malformed SSL/TLS signature from a certificate chain to a vulnerable client, bypassing validation.", "affected_packages": [ { @@ -1290,12 +1134,8 @@ "weaknesses": [] }, { - "unique_content_id": "ec18943f7b002b1a3999bfb8b71078f6c0cc14fadd2a226accc81b7e3c07b57d", - "aliases": [ - "CVE-2009-0590", - "VC-OPENSSL-20090325-CVE-2009-0590" - ], - "summary": "The function ASN1_STRING_print_ex() when used to print a BMPString or UniversalString will crash with an invalid memory access if the encoded length of the string is illegal. Any OpenSSL application which prints out the contents of a certificate could be affected by this bug, including SSL servers, clients and S/MIME software.", + "unique_content_id": "e872aef605740cacbb7547101151f4c7", + "summary": "Fix denial of service flaw due in the DTLS implementation. A remote attacker could use this flaw to cause a DTLS server to crash.", "affected_packages": [ { "package": { @@ -1306,33 +1146,29 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.9.8k", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j" + "fixed_version": "0.9.8m", + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-0590", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-1387", "severities": [], - "reference_id": "CVE-2009-0590", + "reference_id": "CVE-2009-1387", "reference_type": "" }, { - "url": "https://www.openssl.org/news/secadv/20090325.txt", + "url": "https://web.archive.org/web/20100710092848/https://rt.openssl.org/Ticket/Display.html?id=1838", "severities": [], "reference_id": "", "reference_type": "" } ], - "date_published": "2009-03-25T00:00:00+00:00", + "date_published": "2009-02-05T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "2ca10b0c5e2883828105f49783b0369798b610871a821fd020a9cd541a82539e", - "aliases": [ - "CVE-2009-0591", - "VC-OPENSSL-20090325-CVE-2009-0591" - ], + "unique_content_id": "2b44645ffc6197aaeb99296cc87b3258", "summary": "The function CMS_verify() does not correctly handle an error condition involving malformed signed attributes. This will cause an invalid set of signed attributes to appear valid and content digests will not be checked.", "affected_packages": [ { @@ -1366,11 +1202,7 @@ "weaknesses": [] }, { - "unique_content_id": "f414a498973b8e2d69129426ea6a5e3201efd1b8c5f9f6a4f8f3cba543701cb3", - "aliases": [ - "CVE-2009-0789", - "VC-OPENSSL-20090325-CVE-2009-0789" - ], + "unique_content_id": "4fffdc4369dd44a30fae0836347f91de", "summary": "When a malformed ASN1 structure is received it's contents are freed up and zeroed and an error condition returned. On a small number of platforms where sizeof(long) < sizeof(void *) (for example WIN64) this can cause an invalid memory access later resulting in a crash when some invalid structures are read, for example RSA public keys.", "affected_packages": [ { @@ -1404,12 +1236,8 @@ "weaknesses": [] }, { - "unique_content_id": "12e1eced51b649340678cf2d6e9b206e411c2fcd76c9a2d2f4c358b4ce480589", - "aliases": [ - "CVE-2009-1377", - "VC-OPENSSL-20090512-CVE-2009-1377" - ], - "summary": "Fix a denial of service flaw in the DTLS implementation. Records are buffered if they arrive with a future epoch to be processed after finishing the corresponding handshake. There is currently no limitation to this buffer allowing an attacker to perform a DOS attack to a DTLS server by sending records with future epochs until there is no memory left.", + "unique_content_id": "6ec3760bac617981cc8cd2369115f10e", + "summary": "The function ASN1_STRING_print_ex() when used to print a BMPString or UniversalString will crash with an invalid memory access if the encoded length of the string is illegal. Any OpenSSL application which prints out the contents of a certificate could be affected by this bug, including SSL servers, clients and S/MIME software.", "affected_packages": [ { "package": { @@ -1420,39 +1248,29 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.9.8m", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l" + "fixed_version": "0.9.8k", + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-1377", - "severities": [], - "reference_id": "CVE-2009-1377", - "reference_type": "" - }, - { - "url": "https://github.com/openssl/openssl/commit/88b48dc68024dcc437da4296c9fb04419b0ccbe1", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-0590", "severities": [], - "reference_id": "", + "reference_id": "CVE-2009-0590", "reference_type": "" }, { - "url": "https://web.archive.org/web/20120306065500/http://rt.openssl.org/Ticket/Display.html?id=1930&user=guest&pass=guest", + "url": "https://www.openssl.org/news/secadv/20090325.txt", "severities": [], "reference_id": "", "reference_type": "" } ], - "date_published": "2009-05-12T00:00:00+00:00", + "date_published": "2009-03-25T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "bac66dcd2f0ad0469f600dbec41e0ec28219aab575fd5319a4f6d71675deda30", - "aliases": [ - "CVE-2009-1378", - "VC-OPENSSL-20090512-CVE-2009-1378" - ], + "unique_content_id": "868b6df2d8ffc22c9f9d83fd7da54401", "summary": "Fix a denial of service flaw in the DTLS implementation. In dtls1_process_out_of_seq_message() the check if the current message is already buffered was missing. For every new message was memory allocated, allowing an attacker to perform an denial of service attack against a DTLS server by sending out of seq handshake messages until there is no memory left.", "affected_packages": [ { @@ -1492,11 +1310,7 @@ "weaknesses": [] }, { - "unique_content_id": "bd12a0b86dcdd5a9a410597243f1700603dd5cd3ca6f0c40ab08aaeafd7d4edf", - "aliases": [ - "CVE-2009-1379", - "VC-OPENSSL-20090512-CVE-2009-1379" - ], + "unique_content_id": "9233bcc1b091ea2d0fe8d8a2820191f5", "summary": "Use-after-free vulnerability in the dtls1_retrieve_buffered_fragment function could cause a client accessing a malicious DTLS server to crash.", "affected_packages": [ { @@ -1536,12 +1350,8 @@ "weaknesses": [] }, { - "unique_content_id": "b28a70e21f739116e19415a8ce53ecc95060ceacba347960a8292cc70a46762b", - "aliases": [ - "CVE-2009-1386", - "VC-OPENSSL-20090602-CVE-2009-1386" - ], - "summary": "Fix a NULL pointer dereference if a DTLS server recieved ChangeCipherSpec as first record. A remote attacker could use this flaw to cause a DTLS server to crash", + "unique_content_id": "e250eb725e8ae34ba3933779594935f6", + "summary": "Fix a denial of service flaw in the DTLS implementation. Records are buffered if they arrive with a future epoch to be processed after finishing the corresponding handshake. There is currently no limitation to this buffer allowing an attacker to perform a DOS attack to a DTLS server by sending records with future epochs until there is no memory left.", "affected_packages": [ { "package": { @@ -1552,34 +1362,36 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.9.8i", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h" + "fixed_version": "0.9.8m", + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-1386", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-1377", "severities": [], - "reference_id": "CVE-2009-1386", + "reference_id": "CVE-2009-1377", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/1cbf663a6c89dcf8f7706d30a8bae675e2e0199a", + "url": "https://github.com/openssl/openssl/commit/88b48dc68024dcc437da4296c9fb04419b0ccbe1", + "severities": [], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://web.archive.org/web/20120306065500/http://rt.openssl.org/Ticket/Display.html?id=1930&user=guest&pass=guest", "severities": [], "reference_id": "", "reference_type": "" } ], - "date_published": "2009-06-02T00:00:00+00:00", + "date_published": "2009-05-12T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "42f716c07ad6ec9ae3eaece55884154a042ca5fe1ebc7abc0b6bd1e56aabe942", - "aliases": [ - "CVE-2009-1387", - "VC-OPENSSL-20090205-CVE-2009-1387" - ], - "summary": "Fix denial of service flaw due in the DTLS implementation. A remote attacker could use this flaw to cause a DTLS server to crash.", + "unique_content_id": "0097aaf34c70d34f665917931de0a380", + "summary": "Fix a NULL pointer dereference if a DTLS server recieved ChangeCipherSpec as first record. A remote attacker could use this flaw to cause a DTLS server to crash", "affected_packages": [ { "package": { @@ -1590,34 +1402,30 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.9.8m", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l" + "fixed_version": "0.9.8i", + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-1387", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-1386", "severities": [], - "reference_id": "CVE-2009-1387", + "reference_id": "CVE-2009-1386", "reference_type": "" }, { - "url": "https://web.archive.org/web/20100710092848/https://rt.openssl.org/Ticket/Display.html?id=1838", + "url": "https://github.com/openssl/openssl/commit/1cbf663a6c89dcf8f7706d30a8bae675e2e0199a", "severities": [], "reference_id": "", "reference_type": "" } ], - "date_published": "2009-02-05T00:00:00+00:00", + "date_published": "2009-06-02T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "850ee33c668bfb81f14d0412e4339312cfc05088304246c02b4ec3cf8274f1b1", - "aliases": [ - "CVE-2009-3245", - "VC-OPENSSL-20100223-CVE-2009-3245" - ], - "summary": "It was discovered that OpenSSL did not always check the return value of the bn_wexpand() function. An attacker able to trigger a memory allocation failure in that function could cause an application using the OpenSSL library to crash or, possibly, execute arbitrary code", + "unique_content_id": "3ede4a6de30467e840dadb6b1a2f94fc", + "summary": "Implement RFC5746 to address vulnerabilities in SSL/TLS renegotiation.", "affected_packages": [ { "package": { @@ -1634,28 +1442,24 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-3245", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-3555", "severities": [], - "reference_id": "CVE-2009-3245", + "reference_id": "CVE-2009-3555", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/7e4cae1d2f555cbe9226b377aff4b56c9f7ddd4d", + "url": "https://www.openssl.org/news/secadv/20091111.txt", "severities": [], "reference_id": "", "reference_type": "" } ], - "date_published": "2010-02-23T00:00:00+00:00", + "date_published": "2009-11-05T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "e4c27c5b08884c79d2350038aa3ea44e57ac58d20ea4dcf682658288b7ec4268", - "aliases": [ - "CVE-2009-3555", - "VC-OPENSSL-20091105-CVE-2009-3555" - ], - "summary": "Implement RFC5746 to address vulnerabilities in SSL/TLS renegotiation.", + "unique_content_id": "91d6f4b44c2f61e0b1d98cbec9e4633d", + "summary": "A memory leak in the zlib_stateful_finish function in crypto/comp/c_zlib.c allows remote attackers to cause a denial of service via vectors that trigger incorrect calls to the CRYPTO_cleanup_all_ex_data function.", "affected_packages": [ { "package": { @@ -1672,28 +1476,24 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-3555", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-4355", "severities": [], - "reference_id": "CVE-2009-3555", + "reference_id": "CVE-2009-4355", "reference_type": "" }, { - "url": "https://www.openssl.org/news/secadv/20091111.txt", + "url": "https://github.com/openssl/openssl/commit/1b31b5ad560b16e2fe1cad54a755e3e6b5e778a3", "severities": [], "reference_id": "", "reference_type": "" } ], - "date_published": "2009-11-05T00:00:00+00:00", + "date_published": "2010-01-13T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "61e80d10d33dde52fc3c7bc32f19fe3763bffef204240f578b490986e1ce7aff", - "aliases": [ - "CVE-2009-4355", - "VC-OPENSSL-20100113-CVE-2009-4355" - ], - "summary": "A memory leak in the zlib_stateful_finish function in crypto/comp/c_zlib.c allows remote attackers to cause a denial of service via vectors that trigger incorrect calls to the CRYPTO_cleanup_all_ex_data function.", + "unique_content_id": "f07be07de5fe8173dc2934d11c36c94d", + "summary": "A missing return value check flaw was discovered in OpenSSL, that could possibly cause OpenSSL to call a Kerberos library function with invalid arguments, resulting in a NULL pointer dereference crash in the MIT Kerberos library. In certain configurations, a remote attacker could use this flaw to crash a TLS/SSL server using OpenSSL by requesting Kerberos cipher suites during the TLS handshake", "affected_packages": [ { "package": { @@ -1704,34 +1504,30 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.9.8m", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l" + "fixed_version": "0.9.8n", + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-4355", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2010-0433", "severities": [], - "reference_id": "CVE-2009-4355", + "reference_id": "CVE-2010-0433", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/1b31b5ad560b16e2fe1cad54a755e3e6b5e778a3", + "url": "https://github.com/openssl/openssl/commit/cca1cd9a3447dd067503e4a85ebd1679ee78a48e", "severities": [], "reference_id": "", "reference_type": "" } ], - "date_published": "2010-01-13T00:00:00+00:00", + "date_published": "2010-01-19T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "08e65d1f3043871ffe0f802544cb08ac0822cf486e7cb9aebb04b301c46b986c", - "aliases": [ - "CVE-2010-0433", - "VC-OPENSSL-20100119-CVE-2010-0433" - ], - "summary": "A missing return value check flaw was discovered in OpenSSL, that could possibly cause OpenSSL to call a Kerberos library function with invalid arguments, resulting in a NULL pointer dereference crash in the MIT Kerberos library. In certain configurations, a remote attacker could use this flaw to crash a TLS/SSL server using OpenSSL by requesting Kerberos cipher suites during the TLS handshake", + "unique_content_id": "e4f35efada1573e600eeb3f197a9654e", + "summary": "It was discovered that OpenSSL did not always check the return value of the bn_wexpand() function. An attacker able to trigger a memory allocation failure in that function could cause an application using the OpenSSL library to crash or, possibly, execute arbitrary code", "affected_packages": [ { "package": { @@ -1742,33 +1538,29 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.9.8n", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m" + "fixed_version": "0.9.8m", + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2010-0433", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-3245", "severities": [], - "reference_id": "CVE-2010-0433", + "reference_id": "CVE-2009-3245", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/cca1cd9a3447dd067503e4a85ebd1679ee78a48e", + "url": "https://github.com/openssl/openssl/commit/7e4cae1d2f555cbe9226b377aff4b56c9f7ddd4d", "severities": [], "reference_id": "", "reference_type": "" } ], - "date_published": "2010-01-19T00:00:00+00:00", + "date_published": "2010-02-23T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "bd7aef7bfdb58b2311644f5ef6b9fba6252b4ee4823061cce018f34f38e61ac6", - "aliases": [ - "CVE-2010-0740", - "VC-OPENSSL-20100324-CVE-2010-0740" - ], + "unique_content_id": "94276d565fb0e1af8800da5df17f96be", "summary": "In TLS connections, certain incorrectly formatted records can cause an OpenSSL client or server to crash due to a read attempt at NULL.", "affected_packages": [ { @@ -1802,12 +1594,8 @@ "weaknesses": [] }, { - "unique_content_id": "f7669cb060a5572fa05fd4e5dcbb589def9270038f39957489fe982c2b723713", - "aliases": [ - "CVE-2010-0742", - "VC-OPENSSL-20100601-CVE-2010-0742" - ], - "summary": "A flaw in the handling of CMS structures containing OriginatorInfo was found which could lead to a write to invalid memory address or double free. CMS support is disabled by default in OpenSSL 0.9.8 versions.", + "unique_content_id": "bfee13b4a1f7df094ab9f172cf3556c9", + "summary": "An invalid Return value check in pkey_rsa_verifyrecover was discovered. When verification recovery fails for RSA keys an uninitialised buffer with an undefined length is returned instead of an error code. This could lead to an information leak.", "affected_packages": [ { "package": { @@ -1820,25 +1608,13 @@ }, "fixed_version": "1.0.0a", "affected_version_range": "vers:openssl/1.0.0" - }, - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "0.9.8o", - "affected_version_range": "vers:openssl/0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2010-0742", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2010-1633", "severities": [], - "reference_id": "CVE-2010-0742", + "reference_id": "CVE-2010-1633", "reference_type": "" }, { @@ -1852,12 +1628,8 @@ "weaknesses": [] }, { - "unique_content_id": "806fa09aede3c5095c3bf55d4973cc6160bf7786a6efe3201815ceeb30cccf2b", - "aliases": [ - "CVE-2010-1633", - "VC-OPENSSL-20100601-CVE-2010-1633" - ], - "summary": "An invalid Return value check in pkey_rsa_verifyrecover was discovered. When verification recovery fails for RSA keys an uninitialised buffer with an undefined length is returned instead of an error code. This could lead to an information leak.", + "unique_content_id": "fdfe8fe89fb08b0cedb50a64445793f9", + "summary": "A flaw in the handling of CMS structures containing OriginatorInfo was found which could lead to a write to invalid memory address or double free. CMS support is disabled by default in OpenSSL 0.9.8 versions.", "affected_packages": [ { "package": { @@ -1870,13 +1642,25 @@ }, "fixed_version": "1.0.0a", "affected_version_range": "vers:openssl/1.0.0" + }, + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "0.9.8o", + "affected_version_range": "vers:openssl/0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2010-1633", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2010-0742", "severities": [], - "reference_id": "CVE-2010-1633", + "reference_id": "CVE-2010-0742", "reference_type": "" }, { @@ -1890,11 +1674,7 @@ "weaknesses": [] }, { - "unique_content_id": "7b65ee41c2d48ae2fc3ab1c1935814347695def01407b8da246cab5018fd4f01", - "aliases": [ - "CVE-2010-3864", - "VC-OPENSSL-20101116-CVE-2010-3864" - ], + "unique_content_id": "76c3ba83fe766ac2a084b0bd3de847f5", "summary": "A flaw in the OpenSSL TLS server extension code parsing which on affected servers can be exploited in a buffer overrun attack. Any OpenSSL based TLS server is vulnerable if it is multi-threaded and uses OpenSSL's internal caching mechanism. Servers that are multi-process and/or disable internal session caching are NOT affected.", "affected_packages": [ { @@ -1940,11 +1720,7 @@ "weaknesses": [] }, { - "unique_content_id": "e0c32279e2afef8a7c959758dd603e340e8b3ae83744f2af395802b4d7152546", - "aliases": [ - "CVE-2010-4180", - "VC-OPENSSL-20101202-CVE-2010-4180" - ], + "unique_content_id": "316f2dc208adb956396af86e8d35c818", "summary": "A flaw in the OpenSSL SSL/TLS server code where an old bug workaround allows malicious clients to modify the stored session cache ciphersuite. In some cases the ciphersuite can be downgraded to a weaker one on subsequent connections. This issue only affects OpenSSL based SSL/TLS server if it uses OpenSSL's internal caching mechanisms and the SSL_OP_NETSCAPE_REUSE_CIPHER_CHANGE_BUG flag (many applications enable this by using the SSL_OP_ALL option).", "affected_packages": [ { @@ -1990,11 +1766,7 @@ "weaknesses": [] }, { - "unique_content_id": "93fa5cf53d6cabf247c30a66821d9a5e07a1013f64a2417d5e26ac28581c4301", - "aliases": [ - "CVE-2010-4252", - "VC-OPENSSL-20101202-CVE-2010-4252" - ], + "unique_content_id": "a65500a311ab1c4e556fa47df1b487e1", "summary": "An error in OpenSSL's experimental J-PAKE implementation which could lead to successful validation by someone with no knowledge of the shared secret. The OpenSSL Team still consider the implementation of J-PAKE to be experimental and is not compiled by default.", "affected_packages": [ { @@ -2028,61 +1800,7 @@ "weaknesses": [] }, { - "unique_content_id": "23f38bdcf51ed382203722a20b7d4821569824f9d019c122bf958aa76dd50613", - "aliases": [ - "CVE-2010-5298", - "VC-OPENSSL-20140408-CVE-2010-5298" - ], - "summary": "A race condition in the ssl3_read_bytes function can allow remote attackers to inject data across sessions or cause a denial of service. This flaw only affects multithreaded applications using OpenSSL 1.0.0 and 1.0.1, where SSL_MODE_RELEASE_BUFFERS is enabled, which is not the default and not common.", - "affected_packages": [ - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.0m", - "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l" - }, - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.1h", - "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g" - } - ], - "references": [ - { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2010-5298", - "severities": [], - "reference_id": "CVE-2010-5298", - "reference_type": "" - }, - { - "url": "https://www.openssl.org/news/secadv/20140605.txt", - "severities": [], - "reference_id": "", - "reference_type": "" - } - ], - "date_published": "2014-04-08T00:00:00+00:00", - "weaknesses": [] - }, - { - "unique_content_id": "b9846e705257211137a5d75434ca61d87844c9fae7bc25a5a943b397a57a32c2", - "aliases": [ - "CVE-2011-0014", - "VC-OPENSSL-20110208-CVE-2011-0014" - ], + "unique_content_id": "4d2690fa788437a1517d397eabb14249", "summary": "A buffer over-read flaw was discovered in the way OpenSSL parsed the Certificate Status Request TLS extensions in ClientHello TLS handshake messages. A remote attacker could possibly use this flaw to crash an SSL server using the affected OpenSSL functionality.", "affected_packages": [ { @@ -2128,11 +1846,7 @@ "weaknesses": [] }, { - "unique_content_id": "ceda83e23c529430797c0b2affbe99cfbd68a5919628c3a8921070972ad425d3", - "aliases": [ - "CVE-2011-3207", - "VC-OPENSSL-20110906-CVE-2011-3207" - ], + "unique_content_id": "220a4682b4ef1cc32a29898f3057b9b3", "summary": "Under certain circumstances OpenSSL's internal certificate verification routines can incorrectly accept a CRL whose nextUpdate field is in the past. Applications are only affected by the CRL checking vulnerability if they enable OpenSSL's internal CRL checking which is off by default. Applications which use their own custom CRL checking (such as Apache) are not affected.", "affected_packages": [ { @@ -2166,11 +1880,7 @@ "weaknesses": [] }, { - "unique_content_id": "63385b83187d8305d4b3a99688f51116e1e99e77469a4de02e39611bbc58cf10", - "aliases": [ - "CVE-2011-3210", - "VC-OPENSSL-20110906-CVE-2011-3210" - ], + "unique_content_id": "990e85544590d4e2411449cfbc182afd", "summary": "OpenSSL server code for ephemeral ECDH ciphersuites is not thread-safe, and furthermore can crash if a client violates the protocol by sending handshake messages in incorrect order. Only server-side applications that specifically support ephemeral ECDH ciphersuites are affected, and only if ephemeral ECDH ciphersuites are enabled in the configuration.", "affected_packages": [ { @@ -2216,12 +1926,8 @@ "weaknesses": [] }, { - "unique_content_id": "392d936885fcdae2fb2b4200be4c4dbe8cb7fef88164723777c37de37b84d573", - "aliases": [ - "CVE-2011-4108", - "VC-OPENSSL-20120104-CVE-2011-4108" - ], - "summary": "OpenSSL was susceptable an extension of the Vaudenay padding oracle attack on CBC mode encryption which enables an efficient plaintext recovery attack against the OpenSSL implementation of DTLS by exploiting timing differences arising during decryption processing.", + "unique_content_id": "48361e01b38b28352705c300f7ee407b", + "summary": "Support for handshake restarts for server gated cryptograpy (SGC) can be used in a denial-of-service attack.", "affected_packages": [ { "package": { @@ -2250,9 +1956,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2011-4108", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2011-4619", "severities": [], - "reference_id": "CVE-2011-4108", + "reference_id": "CVE-2011-4619", "reference_type": "" }, { @@ -2266,11 +1972,7 @@ "weaknesses": [] }, { - "unique_content_id": "617f7a0525e9e761eae4eb9c93e690fabebd6717a3295b104064c694207f1897", - "aliases": [ - "CVE-2011-4109", - "VC-OPENSSL-20120104-CVE-2011-4109" - ], + "unique_content_id": "5459b1f4a775b3122cdb0ec3ad815b3d", "summary": "If X509_V_FLAG_POLICY_CHECK is set in OpenSSL 0.9.8, then a policy check failure can lead to a double-free. The bug does not occur unless this flag is set. Users of OpenSSL 1.0.0 are not affected", "affected_packages": [ { @@ -2304,12 +2006,8 @@ "weaknesses": [] }, { - "unique_content_id": "985ab2093b4bed8444751c8a5f106add9b1f71fefbe400f56ff4a34d7fc29d00", - "aliases": [ - "CVE-2011-4576", - "VC-OPENSSL-20120104-CVE-2011-4576" - ], - "summary": "OpenSSL failed to clear the bytes used as block cipher padding in SSL 3.0 records which could leak the contents of memory in some circumstances.", + "unique_content_id": "578281e8060ac1dc67b9d229e4b003ab", + "summary": "RFC 3779 data can be included in certificates, and if it is malformed, may trigger an assertion failure. This could be used in a denial-of-service attack. Builds of OpenSSL are only vulnerable if configured with \"enable-rfc3779\", which is not a default.", "affected_packages": [ { "package": { @@ -2338,9 +2036,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2011-4576", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2011-4577", "severities": [], - "reference_id": "CVE-2011-4576", + "reference_id": "CVE-2011-4577", "reference_type": "" }, { @@ -2354,12 +2052,8 @@ "weaknesses": [] }, { - "unique_content_id": "a75d293b72e75c3618655c718811f59a039e176e1592a13e7fc6a723dd4003d6", - "aliases": [ - "CVE-2011-4577", - "VC-OPENSSL-20120104-CVE-2011-4577" - ], - "summary": "RFC 3779 data can be included in certificates, and if it is malformed, may trigger an assertion failure. This could be used in a denial-of-service attack. Builds of OpenSSL are only vulnerable if configured with \"enable-rfc3779\", which is not a default.", + "unique_content_id": "5af91d2aece046ccf3bc688d3dff09d5", + "summary": "A flaw in the fix to CVE-2011-4108 can be exploited in a denial of service attack. Only DTLS applications are affected.", "affected_packages": [ { "package": { @@ -2370,8 +2064,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.9.8s", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r" + "fixed_version": "0.9.8t", + "affected_version_range": "vers:openssl/0.9.8s" }, { "package": { @@ -2382,19 +2076,19 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.0f", - "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e" + "fixed_version": "1.0.0g", + "affected_version_range": "vers:openssl/1.0.0f" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2011-4577", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2012-0050", "severities": [], - "reference_id": "CVE-2011-4577", + "reference_id": "CVE-2012-0050", "reference_type": "" }, { - "url": "https://www.openssl.org/news/secadv/20120104.txt", + "url": "https://www.openssl.org/news/secadv/20120118.txt", "severities": [], "reference_id": "", "reference_type": "" @@ -2404,12 +2098,8 @@ "weaknesses": [] }, { - "unique_content_id": "c10f7480d6e0decea7f1d9b9884ea97b04025caa0c39bbc1338955d9ac46b48d", - "aliases": [ - "CVE-2011-4619", - "VC-OPENSSL-20120104-CVE-2011-4619" - ], - "summary": "Support for handshake restarts for server gated cryptograpy (SGC) can be used in a denial-of-service attack.", + "unique_content_id": "6a353734271d92996f12a08fde03f7bb", + "summary": "OpenSSL was susceptable an extension of the Vaudenay padding oracle attack on CBC mode encryption which enables an efficient plaintext recovery attack against the OpenSSL implementation of DTLS by exploiting timing differences arising during decryption processing.", "affected_packages": [ { "package": { @@ -2438,9 +2128,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2011-4619", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2011-4108", "severities": [], - "reference_id": "CVE-2011-4619", + "reference_id": "CVE-2011-4108", "reference_type": "" }, { @@ -2454,12 +2144,8 @@ "weaknesses": [] }, { - "unique_content_id": "b98fd56170c94c5fe71a1823c88ad50a789a513aa656f1cef217a11c83d645b7", - "aliases": [ - "CVE-2012-0027", - "VC-OPENSSL-20120104-CVE-2012-0027" - ], - "summary": "A malicious TLS client can send an invalid set of GOST parameters which will cause the server to crash due to lack of error checking. This could be used in a denial-of-service attack. Only users of the OpenSSL GOST ENGINE are affected by this bug.", + "unique_content_id": "85e39cd316fb40cbdc47d19d1f93fade", + "summary": "OpenSSL failed to clear the bytes used as block cipher padding in SSL 3.0 records which could leak the contents of memory in some circumstances.", "affected_packages": [ { "package": { @@ -2470,15 +2156,27 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.0f", - "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e" - } - ], - "references": [ + "fixed_version": "0.9.8s", + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r" + }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2012-0027", + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.0f", + "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e" + } + ], + "references": [ + { + "url": "https://nvd.nist.gov/vuln/detail/CVE-2011-4576", "severities": [], - "reference_id": "CVE-2012-0027", + "reference_id": "CVE-2011-4576", "reference_type": "" }, { @@ -2492,12 +2190,8 @@ "weaknesses": [] }, { - "unique_content_id": "525a3a5ff9914fd1388fdd071f143b794e6c642f2e45beb7d7d0bc49a78057a3", - "aliases": [ - "CVE-2012-0050", - "VC-OPENSSL-20120104-CVE-2012-0050" - ], - "summary": "A flaw in the fix to CVE-2011-4108 can be exploited in a denial of service attack. Only DTLS applications are affected.", + "unique_content_id": "9c9b9e8b9a5f1a355656382f71722432", + "summary": "A malicious TLS client can send an invalid set of GOST parameters which will cause the server to crash due to lack of error checking. This could be used in a denial-of-service attack. Only users of the OpenSSL GOST ENGINE are affected by this bug.", "affected_packages": [ { "package": { @@ -2508,31 +2202,19 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.9.8t", - "affected_version_range": "vers:openssl/0.9.8s" - }, - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.0g", - "affected_version_range": "vers:openssl/1.0.0f" + "fixed_version": "1.0.0f", + "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2012-0050", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2012-0027", "severities": [], - "reference_id": "CVE-2012-0050", + "reference_id": "CVE-2012-0027", "reference_type": "" }, { - "url": "https://www.openssl.org/news/secadv/20120118.txt", + "url": "https://www.openssl.org/news/secadv/20120104.txt", "severities": [], "reference_id": "", "reference_type": "" @@ -2542,11 +2224,7 @@ "weaknesses": [] }, { - "unique_content_id": "c60189dfbd7ddb73a1d2a470d59fa6fcb7bedad776a8d717a0cbca7d3b416095", - "aliases": [ - "CVE-2012-0884", - "VC-OPENSSL-20120312-CVE-2012-0884" - ], + "unique_content_id": "6744bf2a3fd6eba6b18d59fb648d443b", "summary": "A weakness in the OpenSSL CMS and PKCS #7 code can be exploited using Bleichenbacher's attack on PKCS #1 v1.5 RSA padding also known as the million message attack (MMA). Only users of CMS, PKCS #7, or S/MIME decryption operations are affected, SSL/TLS applications are not affected by this issue.", "affected_packages": [ { @@ -2592,11 +2270,7 @@ "weaknesses": [] }, { - "unique_content_id": "9d1e4715f7138b1a78fbf5251551b5d200ccd9ec52515b1b2939757df362997b", - "aliases": [ - "CVE-2012-2110", - "VC-OPENSSL-20120419-CVE-2012-2110" - ], + "unique_content_id": "c4e836c345751d38a3bff43c10e5a655", "summary": "Multiple numeric conversion errors, leading to a buffer overflow, were found in the way OpenSSL parsed ASN.1 (Abstract Syntax Notation One) data from BIO (OpenSSL's I/O abstraction) inputs. Specially-crafted DER (Distinguished Encoding Rules) encoded data read from a file or other BIO input could cause an application using the OpenSSL library to crash or, potentially, execute arbitrary code.", "affected_packages": [ { @@ -2654,11 +2328,7 @@ "weaknesses": [] }, { - "unique_content_id": "ea921fcdf273dfa8a452dab36604e137574b2bd9234e81b08a4885a267939e64", - "aliases": [ - "CVE-2012-2131", - "VC-OPENSSL-20120424-CVE-2012-2131" - ], + "unique_content_id": "7451866670acf0bd4a5f0c9d74bdfb18", "summary": "It was discovered that the fix for CVE-2012-2110 released on 19 Apr 2012 was not sufficient to correct the issue for OpenSSL 0.9.8. This issue only affects OpenSSL 0.9.8v. OpenSSL 1.0.1a and 1.0.0i already contain a patch sufficient to correct CVE-2012-2110.", "affected_packages": [ { @@ -2692,11 +2362,7 @@ "weaknesses": [] }, { - "unique_content_id": "0fd2dc9500a45c761c7a6ddadcaca6403b0dcaefd25ec7c8a9a2e4dba0211efe", - "aliases": [ - "CVE-2012-2333", - "VC-OPENSSL-20120510-CVE-2012-2333" - ], + "unique_content_id": "9a9efe32bb6fb903c9814b808b7f0206", "summary": "An integer underflow flaw, leading to a buffer over-read, was found in the way OpenSSL handled TLS 1.1, TLS 1.2, and DTLS (Datagram Transport Layer Security) application data record lengths when using a block cipher in CBC (cipher-block chaining) mode. A malicious TLS 1.1, TLS 1.2, or DTLS client or server could use this flaw to crash its connection peer.", "affected_packages": [ { @@ -2754,13 +2420,33 @@ "weaknesses": [] }, { - "unique_content_id": "37fd821acfb83d5e24554010a0319b02b5c7c1c552d4dba2918bb1047836ed2c", - "aliases": [ - "CVE-2012-2686", - "VC-OPENSSL-20130205-CVE-2012-2686" - ], - "summary": "A flaw in the OpenSSL handling of CBC ciphersuites in TLS 1.1 and TLS 1.2 on AES-NI supporting platforms can be exploited in a DoS attack.", + "unique_content_id": "d1003ac6fdcb1a2a4d7bca936e239b42", + "summary": "A weakness in the handling of CBC ciphersuites in SSL, TLS and DTLS which could lead to plaintext recovery by exploiting timing differences arising during MAC processing.", "affected_packages": [ + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "0.9.8y", + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x" + }, + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.0k", + "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j" + }, { "package": { "name": "openssl", @@ -2776,9 +2462,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2012-2686", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2013-0169", "severities": [], - "reference_id": "CVE-2012-2686", + "reference_id": "CVE-2013-0169", "reference_type": "" }, { @@ -2788,15 +2474,11 @@ "reference_type": "" } ], - "date_published": "2013-02-05T00:00:00+00:00", + "date_published": "2013-02-04T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "fcd18f8ddd7c4c680932ce9d21da72cd35ad71fe163ce5734f136cf4d1913002", - "aliases": [ - "CVE-2013-0166", - "VC-OPENSSL-20130205-CVE-2013-0166" - ], + "unique_content_id": "084bb9ad1da9dafc260f041cfdaf868e", "summary": "A flaw in the OpenSSL handling of OCSP response verification can be exploited in a denial of service attack.", "affected_packages": [ { @@ -2854,37 +2536,9 @@ "weaknesses": [] }, { - "unique_content_id": "274bafa8474e5913afcb27cc6ffde809fb6f6ba505f13df3234f8ee946e218ee", - "aliases": [ - "CVE-2013-0169", - "VC-OPENSSL-20130204-CVE-2013-0169" - ], - "summary": "A weakness in the handling of CBC ciphersuites in SSL, TLS and DTLS which could lead to plaintext recovery by exploiting timing differences arising during MAC processing.", + "unique_content_id": "9c755e2b9ac36e9d77e7aa63ca6b91e5", + "summary": "A flaw in the OpenSSL handling of CBC ciphersuites in TLS 1.1 and TLS 1.2 on AES-NI supporting platforms can be exploited in a DoS attack.", "affected_packages": [ - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "0.9.8y", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x" - }, - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.0k", - "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j" - }, { "package": { "name": "openssl", @@ -2900,9 +2554,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2013-0169", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2012-2686", "severities": [], - "reference_id": "CVE-2013-0169", + "reference_id": "CVE-2012-2686", "reference_type": "" }, { @@ -2912,17 +2566,25 @@ "reference_type": "" } ], - "date_published": "2013-02-04T00:00:00+00:00", + "date_published": "2013-02-05T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "98a0e5556bb1bf1ef2d84156a75154a169ffed9e73af5bedc7e7d76c7e2dda3c", - "aliases": [ - "CVE-2013-4353", - "VC-OPENSSL-20140106-CVE-2013-4353" - ], - "summary": "A carefully crafted invalid TLS handshake could crash OpenSSL with a NULL pointer exception. A malicious server could use this flaw to crash a connecting client. This issue only affected OpenSSL 1.0.1 versions.", + "unique_content_id": "cd972700acea991417121019f009bac1", + "summary": "A flaw in DTLS handling can cause an application using OpenSSL and DTLS to crash. This is not a vulnerability for OpenSSL prior to 1.0.0.", "affected_packages": [ + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.0l", + "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k" + }, { "package": { "name": "openssl", @@ -2938,27 +2600,23 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2013-4353", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2013-6450", "severities": [], - "reference_id": "CVE-2013-4353", + "reference_id": "CVE-2013-6450", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/197e0ea817ad64820789d86711d55ff50d71f631", + "url": "https://github.com/openssl/openssl/commit/3462896", "severities": [], "reference_id": "", "reference_type": "" } ], - "date_published": "2014-01-06T00:00:00+00:00", + "date_published": "2013-12-13T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "7628f9cd3cb03285c9bfdbb9b7dc222f54c2e5ae9498bce55eb751f6dfce660d", - "aliases": [ - "CVE-2013-6449", - "VC-OPENSSL-20131214-CVE-2013-6449" - ], + "unique_content_id": "bd7c16b098a35e13b1659e8c4934253d", "summary": "A flaw in OpenSSL can cause an application using OpenSSL to crash when using TLS version 1.2. This issue only affected OpenSSL 1.0.1 versions.", "affected_packages": [ { @@ -2992,25 +2650,9 @@ "weaknesses": [] }, { - "unique_content_id": "bc2e1522ce53f1d9658df6561b069413fd1a1e237b8d127da67a245315e1763f", - "aliases": [ - "CVE-2013-6450", - "VC-OPENSSL-20131213-CVE-2013-6450" - ], - "summary": "A flaw in DTLS handling can cause an application using OpenSSL and DTLS to crash. This is not a vulnerability for OpenSSL prior to 1.0.0.", + "unique_content_id": "0e3a3a12e8060b9395fe7b48a7276377", + "summary": "A carefully crafted invalid TLS handshake could crash OpenSSL with a NULL pointer exception. A malicious server could use this flaw to crash a connecting client. This issue only affected OpenSSL 1.0.1 versions.", "affected_packages": [ - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.0l", - "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k" - }, { "package": { "name": "openssl", @@ -3026,27 +2668,23 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2013-6450", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2013-4353", "severities": [], - "reference_id": "CVE-2013-6450", + "reference_id": "CVE-2013-4353", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/3462896", + "url": "https://github.com/openssl/openssl/commit/197e0ea817ad64820789d86711d55ff50d71f631", "severities": [], "reference_id": "", "reference_type": "" } ], - "date_published": "2013-12-13T00:00:00+00:00", + "date_published": "2014-01-06T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "84057cab1e58fea9c99a32830b1f9459f608e4a1842a5c621e56d7570923cad5", - "aliases": [ - "CVE-2014-0076", - "VC-OPENSSL-20140214-CVE-2014-0076" - ], + "unique_content_id": "5d7762928fe0665ff593f8b93f0f7c2d", "summary": "Fix for the attack described in the paper \"Recovering OpenSSL ECDSA Nonces Using the FLUSH+RELOAD Cache Side-channel Attack\"", "affected_packages": [ { @@ -3116,11 +2754,7 @@ "weaknesses": [] }, { - "unique_content_id": "757f04cde75470cb2bec8053f5fc874a82bae6b35945ec483df2e28eeb0cfc78", - "aliases": [ - "CVE-2014-0160", - "VC-OPENSSL-20140407-CVE-2014-0160" - ], + "unique_content_id": "3b9f07c3f3fc9a3177b7cba6994626f2", "summary": "A missing bounds check in the handling of the TLS heartbeat extension can be used to reveal up to 64kB of memory to a connected client or server (a.k.a. Heartbleed). This issue did not affect versions of OpenSSL prior to 1.0.1.", "affected_packages": [ { @@ -3154,25 +2788,9 @@ "weaknesses": [] }, { - "unique_content_id": "e09b36d835f2209f6be06a5138c917c4210c32191bef1c9dc5a2faa1f8850e32", - "aliases": [ - "CVE-2014-0195", - "VC-OPENSSL-20140605-CVE-2014-0195" - ], - "summary": "A buffer overrun attack can be triggered by sending invalid DTLS fragments to an OpenSSL DTLS client or server. This is potentially exploitable to run arbitrary code on a vulnerable client or server. Only applications using OpenSSL as a DTLS client or server affected.", + "unique_content_id": "5c73df85af33b3649d0f8f5cf48465d3", + "summary": "A race condition in the ssl3_read_bytes function can allow remote attackers to inject data across sessions or cause a denial of service. This flaw only affects multithreaded applications using OpenSSL 1.0.0 and 1.0.1, where SSL_MODE_RELEASE_BUFFERS is enabled, which is not the default and not common.", "affected_packages": [ - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "0.9.8za", - "affected_version_range": "vers:openssl/0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y" - }, { "package": { "name": "openssl", @@ -3200,9 +2818,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-0195", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2010-5298", "severities": [], - "reference_id": "CVE-2014-0195", + "reference_id": "CVE-2010-5298", "reference_type": "" }, { @@ -3212,15 +2830,11 @@ "reference_type": "" } ], - "date_published": "2014-06-05T00:00:00+00:00", + "date_published": "2014-04-08T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "156f765a217953dbd4da2ecb89c9f1998f67752ff9a12bbb575d396f7f8902a2", - "aliases": [ - "CVE-2014-0198", - "VC-OPENSSL-20140421-CVE-2014-0198" - ], + "unique_content_id": "ee4174c785ef4de123c8f5c8c4fbf9b2", "summary": "A flaw in the do_ssl3_write function can allow remote attackers to cause a denial of service via a NULL pointer dereference. This flaw only affects OpenSSL 1.0.0 and 1.0.1 where SSL_MODE_RELEASE_BUFFERS is enabled, which is not the default and not common.", "affected_packages": [ { @@ -3266,12 +2880,8 @@ "weaknesses": [] }, { - "unique_content_id": "1220fb598061d81d0d92e10093d9cf1e9de722b48ce1e08513ff839410106623", - "aliases": [ - "CVE-2014-0221", - "VC-OPENSSL-20140605-CVE-2014-0221" - ], - "summary": "By sending an invalid DTLS handshake to an OpenSSL DTLS client the code can be made to recurse eventually crashing in a DoS attack. Only applications using OpenSSL as a DTLS client are affected.", + "unique_content_id": "4e8f724565b6429137ea959defa72090", + "summary": "OpenSSL TLS clients enabling anonymous ECDH ciphersuites are subject to a denial of service attack.", "affected_packages": [ { "package": { @@ -3312,9 +2922,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-0221", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3470", "severities": [], - "reference_id": "CVE-2014-0221", + "reference_id": "CVE-2014-3470", "reference_type": "" }, { @@ -3324,15 +2934,11 @@ "reference_type": "" } ], - "date_published": "2014-06-05T00:00:00+00:00", + "date_published": "2014-05-30T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "8e650cb3afbf00bdf5312f07bb03de889b8709b53e66779f3ff6664d49f060cb", - "aliases": [ - "CVE-2014-0224", - "VC-OPENSSL-20140605-CVE-2014-0224" - ], + "unique_content_id": "a73f61be805e75d9468e11afb3158d45", "summary": "An attacker can force the use of weak keying material in OpenSSL SSL/TLS clients and servers. This can be exploited by a Man-in-the-middle (MITM) attack where the attacker can decrypt and modify traffic from the attacked client and server.", "affected_packages": [ { @@ -3390,12 +2996,8 @@ "weaknesses": [] }, { - "unique_content_id": "3af893757d5d17f3214542da1f1511d519cfbcda8bc5691a205aadb469f130f3", - "aliases": [ - "CVE-2014-3470", - "VC-OPENSSL-20140530-CVE-2014-3470" - ], - "summary": "OpenSSL TLS clients enabling anonymous ECDH ciphersuites are subject to a denial of service attack.", + "unique_content_id": "bc12de8c2221021ccb7c3659b08cd3f5", + "summary": "By sending an invalid DTLS handshake to an OpenSSL DTLS client the code can be made to recurse eventually crashing in a DoS attack. Only applications using OpenSSL as a DTLS client are affected.", "affected_packages": [ { "package": { @@ -3436,9 +3038,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3470", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-0221", "severities": [], - "reference_id": "CVE-2014-3470", + "reference_id": "CVE-2014-0221", "reference_type": "" }, { @@ -3448,16 +3050,12 @@ "reference_type": "" } ], - "date_published": "2014-05-30T00:00:00+00:00", + "date_published": "2014-06-05T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "73269c9023356431d683604381118286fb8aeddcd87d0151e488b7255fa89f2c", - "aliases": [ - "CVE-2014-3505", - "VC-OPENSSL-20140806-CVE-2014-3505" - ], - "summary": "A Double Free was found when processing DTLS packets. An attacker can force an error condition which causes openssl to crash whilst processing DTLS packets due to memory being freed twice. This could lead to a Denial of Service attack.", + "unique_content_id": "da21b7edec2a01bd2495586e3e344a2c", + "summary": "A buffer overrun attack can be triggered by sending invalid DTLS fragments to an OpenSSL DTLS client or server. This is potentially exploitable to run arbitrary code on a vulnerable client or server. Only applications using OpenSSL as a DTLS client or server affected.", "affected_packages": [ { "package": { @@ -3468,8 +3066,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.9.8zb", - "affected_version_range": "vers:openssl/0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za" + "fixed_version": "0.9.8za", + "affected_version_range": "vers:openssl/0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y" }, { "package": { @@ -3480,8 +3078,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.0n", - "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m" + "fixed_version": "1.0.0m", + "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l" }, { "package": { @@ -3492,34 +3090,30 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.1i", - "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h" + "fixed_version": "1.0.1h", + "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3505", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-0195", "severities": [], - "reference_id": "CVE-2014-3505", + "reference_id": "CVE-2014-0195", "reference_type": "" }, { - "url": "https://www.openssl.org/news/secadv/20140806.txt", + "url": "https://www.openssl.org/news/secadv/20140605.txt", "severities": [], "reference_id": "", "reference_type": "" } ], - "date_published": "2014-08-06T00:00:00+00:00", + "date_published": "2014-06-05T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "c96902798094fef86133d6163da3a0ef8e16161941fb0c9987451c3856334da2", - "aliases": [ - "CVE-2014-3506", - "VC-OPENSSL-20140806-CVE-2014-3506" - ], - "summary": "A DTLS flaw leading to memory exhaustion was found. An attacker can force openssl to consume large amounts of memory whilst processing DTLS handshake messages. This could lead to a Denial of Service attack.", + "unique_content_id": "073034548d58e9674b4080cd0c36f8cb", + "summary": "A flaw in handling DTLS anonymous EC(DH) ciphersuites was found. OpenSSL DTLS clients enabling anonymous (EC)DH ciphersuites are subject to a denial of service attack. A malicious server can crash the client with a null pointer dereference (read) by specifying an anonymous (EC)DH ciphersuite and sending carefully crafted handshake messages.", "affected_packages": [ { "package": { @@ -3560,9 +3154,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3506", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3510", "severities": [], - "reference_id": "CVE-2014-3506", + "reference_id": "CVE-2014-3510", "reference_type": "" }, { @@ -3576,12 +3170,8 @@ "weaknesses": [] }, { - "unique_content_id": "2947a778fbea64d8f99d370af3a8d0169602ff5adff88d86ccf57a09c3fb556c", - "aliases": [ - "CVE-2014-3507", - "VC-OPENSSL-20140806-CVE-2014-3507" - ], - "summary": "A DTLS memory leak from zero-length fragments was found. By sending carefully crafted DTLS packets an attacker could cause OpenSSL to leak memory. This could lead to a Denial of Service attack.", + "unique_content_id": "27f89a41dfab2654a12a2d701b68ad9c", + "summary": "A DTLS flaw leading to memory exhaustion was found. An attacker can force openssl to consume large amounts of memory whilst processing DTLS handshake messages. This could lead to a Denial of Service attack.", "affected_packages": [ { "package": { @@ -3593,7 +3183,7 @@ "qualifiers": "" }, "fixed_version": "0.9.8zb", - "affected_version_range": "vers:openssl/0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za" + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za" }, { "package": { @@ -3605,7 +3195,7 @@ "qualifiers": "" }, "fixed_version": "1.0.0n", - "affected_version_range": "vers:openssl/1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m" + "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m" }, { "package": { @@ -3622,9 +3212,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3507", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3506", "severities": [], - "reference_id": "CVE-2014-3507", + "reference_id": "CVE-2014-3506", "reference_type": "" }, { @@ -3638,12 +3228,8 @@ "weaknesses": [] }, { - "unique_content_id": "e111b3c925ff4930bf9df47e3c68ad219bfc78029011f026f5db9dfcb3623cba", - "aliases": [ - "CVE-2014-3508", - "VC-OPENSSL-20140806-CVE-2014-3508" - ], - "summary": "A flaw in OBJ_obj2txt may cause pretty printing functions such as X509_name_oneline, X509_name_print_ex, to leak some information from the stack. Applications may be affected if they echo pretty printing output to the attacker. OpenSSL SSL/TLS clients and servers themselves are not affected.", + "unique_content_id": "48ecff4dbadf3f99198fcfb4138048d8", + "summary": "A DTLS memory leak from zero-length fragments was found. By sending carefully crafted DTLS packets an attacker could cause OpenSSL to leak memory. This could lead to a Denial of Service attack.", "affected_packages": [ { "package": { @@ -3655,7 +3241,7 @@ "qualifiers": "" }, "fixed_version": "0.9.8zb", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za" + "affected_version_range": "vers:openssl/0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za" }, { "package": { @@ -3667,7 +3253,7 @@ "qualifiers": "" }, "fixed_version": "1.0.0n", - "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m" + "affected_version_range": "vers:openssl/1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m" }, { "package": { @@ -3684,9 +3270,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3508", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3507", "severities": [], - "reference_id": "CVE-2014-3508", + "reference_id": "CVE-2014-3507", "reference_type": "" }, { @@ -3700,25 +3286,9 @@ "weaknesses": [] }, { - "unique_content_id": "d999097e03330b37701e1a362f85711c43272b4fc8606896b221ff2c09a6f5cb", - "aliases": [ - "CVE-2014-3509", - "VC-OPENSSL-20140806-CVE-2014-3509" - ], - "summary": "A race condition was found in ssl_parse_serverhello_tlsext. If a multithreaded client connects to a malicious server using a resumed session and the server sends an ec point format extension, it could write up to 255 bytes to freed memory.", + "unique_content_id": "4c289b7168ed3ac1dc649dd94e296ee2", + "summary": "A SRP buffer overrun was found. A malicious client or server can send invalid SRP parameters and overrun an internal buffer. Only applications which are explicitly set up for SRP use are affected.", "affected_packages": [ - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.0n", - "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m" - }, { "package": { "name": "openssl", @@ -3734,9 +3304,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3509", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3512", "severities": [], - "reference_id": "CVE-2014-3509", + "reference_id": "CVE-2014-3512", "reference_type": "" }, { @@ -3750,12 +3320,8 @@ "weaknesses": [] }, { - "unique_content_id": "f3294bb2b90c0dac71eb21010721728aa9fbaf64cd7b1aff3bbe97099e5db16e", - "aliases": [ - "CVE-2014-3510", - "VC-OPENSSL-20140806-CVE-2014-3510" - ], - "summary": "A flaw in handling DTLS anonymous EC(DH) ciphersuites was found. OpenSSL DTLS clients enabling anonymous (EC)DH ciphersuites are subject to a denial of service attack. A malicious server can crash the client with a null pointer dereference (read) by specifying an anonymous (EC)DH ciphersuite and sending carefully crafted handshake messages.", + "unique_content_id": "913ba8a6e88c02283428f89a6d24952b", + "summary": "A Double Free was found when processing DTLS packets. An attacker can force an error condition which causes openssl to crash whilst processing DTLS packets due to memory being freed twice. This could lead to a Denial of Service attack.", "affected_packages": [ { "package": { @@ -3767,7 +3333,7 @@ "qualifiers": "" }, "fixed_version": "0.9.8zb", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za" + "affected_version_range": "vers:openssl/0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za" }, { "package": { @@ -3796,9 +3362,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3510", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3505", "severities": [], - "reference_id": "CVE-2014-3510", + "reference_id": "CVE-2014-3505", "reference_type": "" }, { @@ -3812,11 +3378,7 @@ "weaknesses": [] }, { - "unique_content_id": "99661f6b61c2befbf0a840ac395f67ae171810c041a0400837c4e202fff1c6ef", - "aliases": [ - "CVE-2014-3511", - "VC-OPENSSL-20140806-CVE-2014-3511" - ], + "unique_content_id": "a5d66943f85ab01f18b1181d5dccceb3", "summary": "A flaw in the OpenSSL SSL/TLS server code causes the server to negotiate TLS 1.0 instead of higher protocol versions when the ClientHello message is badly fragmented. This allows a man-in-the-middle attacker to force a downgrade to TLS 1.0 even if both the server and the client support a higher protocol version, by modifying the client's TLS records.", "affected_packages": [ { @@ -3850,13 +3412,55 @@ "weaknesses": [] }, { - "unique_content_id": "52bc0907465cbad85c1cf82eecf18885bbbe24de573bda4cdb9f8367f269a783", - "aliases": [ - "CVE-2014-3512", - "VC-OPENSSL-20140806-CVE-2014-3512" + "unique_content_id": "cca76ec7e4ca1da60dc37bfb7065a74d", + "summary": "A crash was found affecting SRP ciphersuites used in a Server Hello message. The issue affects OpenSSL clients and allows a malicious server to crash the client with a null pointer dereference (read) by specifying an SRP ciphersuite even though it was not properly negotiated with the client. This could lead to a Denial of Service.", + "affected_packages": [ + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.1i", + "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h" + } ], - "summary": "A SRP buffer overrun was found. A malicious client or server can send invalid SRP parameters and overrun an internal buffer. Only applications which are explicitly set up for SRP use are affected.", + "references": [ + { + "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-5139", + "severities": [], + "reference_id": "CVE-2014-5139", + "reference_type": "" + }, + { + "url": "https://www.openssl.org/news/secadv/20140806.txt", + "severities": [], + "reference_id": "", + "reference_type": "" + } + ], + "date_published": "2014-08-06T00:00:00+00:00", + "weaknesses": [] + }, + { + "unique_content_id": "db4e7a865c812a2f137555357a4ea54a", + "summary": "A race condition was found in ssl_parse_serverhello_tlsext. If a multithreaded client connects to a malicious server using a resumed session and the server sends an ec point format extension, it could write up to 255 bytes to freed memory.", "affected_packages": [ + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.0n", + "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m" + }, { "package": { "name": "openssl", @@ -3872,9 +3476,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3512", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3509", "severities": [], - "reference_id": "CVE-2014-3512", + "reference_id": "CVE-2014-3509", "reference_type": "" }, { @@ -3888,11 +3492,65 @@ "weaknesses": [] }, { - "unique_content_id": "c54c6fed589f1ca8024f1917126aae2983baa39871610960f380e5340ce50252", - "aliases": [ - "CVE-2014-3513", - "VC-OPENSSL-20141015-CVE-2014-3513" + "unique_content_id": "e1e9269594db16c804a566e20f436cd2", + "summary": "A flaw in OBJ_obj2txt may cause pretty printing functions such as X509_name_oneline, X509_name_print_ex, to leak some information from the stack. Applications may be affected if they echo pretty printing output to the attacker. OpenSSL SSL/TLS clients and servers themselves are not affected.", + "affected_packages": [ + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "0.9.8zb", + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za" + }, + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.0n", + "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m" + }, + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.1i", + "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h" + } + ], + "references": [ + { + "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3508", + "severities": [], + "reference_id": "CVE-2014-3508", + "reference_type": "" + }, + { + "url": "https://www.openssl.org/news/secadv/20140806.txt", + "severities": [], + "reference_id": "", + "reference_type": "" + } ], + "date_published": "2014-08-06T00:00:00+00:00", + "weaknesses": [] + }, + { + "unique_content_id": "3dbf91d5443471c2da6cf221eddf9898", "summary": "A flaw in the DTLS SRTP extension parsing code allows an attacker, who sends a carefully crafted handshake message, to cause OpenSSL to fail to free up to 64k of memory causing a memory leak. This could be exploited in a Denial Of Service attack. This issue affects OpenSSL 1.0.1 server implementations for both SSL/TLS and DTLS regardless of whether SRTP is used or configured. Implementations of OpenSSL that have been compiled with OPENSSL_NO_SRTP defined are not affected.", "affected_packages": [ { @@ -3932,11 +3590,7 @@ "weaknesses": [] }, { - "unique_content_id": "dcced98f8929707dec2045556ad27a5f407f0a6da5b0de6bb9cb0bf6c4eba16c", - "aliases": [ - "CVE-2014-3567", - "VC-OPENSSL-20141015-CVE-2014-3567" - ], + "unique_content_id": "63bf7bb20dcd1c7a3214c025ea53c1da", "summary": "When an OpenSSL SSL/TLS/DTLS server receives a session ticket the integrity of that ticket is first verified. In the event of a session ticket integrity check failing, OpenSSL will fail to free memory causing a memory leak. By sending a large number of invalid session tickets an attacker could exploit this issue in a Denial Of Service attack.", "affected_packages": [ { @@ -4000,11 +3654,52 @@ "weaknesses": [] }, { - "unique_content_id": "0441ee6483168f14e3eb89495aa9144a146935020e60b4fafdd5de9dc52fbb05", - "aliases": [ - "CVE-2014-3568", - "VC-OPENSSL-20141015-CVE-2014-3568" + "unique_content_id": "88aac050ad73754e929805f2ab5e64e7", + "summary": "OpenSSL has added support for TLS_FALLBACK_SCSV to allow applications to block the ability for a MITM attacker to force a protocol downgrade. Some client applications (such as browsers) will reconnect using a downgraded protocol to work around interoperability bugs in older servers. This could be exploited by an active man-in-the-middle to downgrade connections to SSL 3.0 even if both sides of the connection support higher protocols. SSL 3.0 contains a number of weaknesses including POODLE (CVE-2014-3566). See also https://tools.ietf.org/html/draft-ietf-tls-downgrade-scsv-00 and https://www.openssl.org/~bodo/ssl-poodle.pdf", + "affected_packages": [ + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "0.9.8zc", + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za|0.9.8zb" + }, + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.0o", + "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m|1.0.0n" + }, + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.1j", + "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i" + } ], + "references": [], + "date_published": "2014-10-15T00:00:00+00:00", + "weaknesses": [] + }, + { + "unique_content_id": "ba13f3aea682e9e1c5fab3672da07088", "summary": "When OpenSSL is configured with \"no-ssl3\" as a build option, servers could accept and complete a SSL 3.0 handshake, and clients could be configured to send them.", "affected_packages": [ { @@ -4068,11 +3763,7 @@ "weaknesses": [] }, { - "unique_content_id": "753342c985991295f308ceffe0455636ac19375dc81d8e311fa5cf1d23473dd5", - "aliases": [ - "CVE-2014-3569", - "VC-OPENSSL-20141021-CVE-2014-3569" - ], + "unique_content_id": "d615f85fc740c95b6b98e150b56d1ae3", "summary": "When openssl is built with the no-ssl3 option and a SSL v3 ClientHello is received the ssl method would be set to NULL which could later result in a NULL pointer dereference.", "affected_packages": [ { @@ -4136,12 +3827,8 @@ "weaknesses": [] }, { - "unique_content_id": "4247eafd0646ef018955aac7a30d2c023512a5b5f3a1803427473090a57766e5", - "aliases": [ - "CVE-2014-3570", - "VC-OPENSSL-20150108-CVE-2014-3570" - ], - "summary": "Bignum squaring (BN_sqr) may produce incorrect results on some platforms, including x86_64. This bug occurs at random with a very low probability, and is not known to be exploitable in any way, though its exact impact is difficult to determine. The following has been determined: *) The probability of BN_sqr producing an incorrect result at random is very low: 1/2^64 on the single affected 32-bit platform (MIPS) and 1/2^128 on affected 64-bit platforms. *) On most platforms, RSA follows a different code path and RSA operations are not affected at all. For the remaining platforms (e.g. OpenSSL built without assembly support), pre-existing countermeasures thwart bug attacks [1]. *) Static ECDH is theoretically affected: it is possible to construct elliptic curve points that would falsely appear to be on the given curve. However, there is no known computationally feasible way to construct such points with low order, and so the security of static ECDH private keys is believed to be unaffected. *) Other routines known to be theoretically affected are modular exponentiation, primality testing, DSA, RSA blinding, JPAKE and SRP. No exploits are known and straightforward bug attacks fail - either the attacker cannot control when the bug triggers, or no private key material is involved.", + "unique_content_id": "16d87492de289b2cbfd7ba3ef7e106fc", + "summary": "An OpenSSL client will accept a handshake using an ephemeral ECDH ciphersuite using an ECDSA certificate if the server key exchange message is omitted. This effectively removes forward secrecy from the ciphersuite.", "affected_packages": [ { "package": { @@ -4182,9 +3869,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3570", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3572", "severities": [], - "reference_id": "CVE-2014-3570", + "reference_id": "CVE-2014-3572", "reference_type": "" }, { @@ -4200,16 +3887,12 @@ "reference_type": "" } ], - "date_published": "2015-01-08T00:00:00+00:00", + "date_published": "2015-01-05T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "bf213d08073d8ca6d471398fe7b23b4ee5111732d9f7976e6ed6740944653e2d", - "aliases": [ - "CVE-2014-3571", - "VC-OPENSSL-20150105-CVE-2014-3571" - ], - "summary": "A carefully crafted DTLS message can cause a segmentation fault in OpenSSL due to a NULL pointer dereference. This could lead to a Denial Of Service attack.", + "unique_content_id": "3f5c428c988da21fcf75625d7764c31e", + "summary": "OpenSSL accepts several non-DER-variations of certificate signature algorithm and signature encodings. OpenSSL also does not enforce a match between the signature algorithm between the signed and unsigned portions of the certificate. By modifying the contents of the signature algorithm or the encoding of the signature, it is possible to change the certificate's fingerprint. This does not allow an attacker to forge certificates, and does not affect certificate verification or OpenSSL servers/clients in any other way. It also does not affect common revocation mechanisms. Only custom applications that rely on the uniqueness of the fingerprint (e.g. certificate blacklists) may be affected.", "affected_packages": [ { "package": { @@ -4250,16 +3933,16 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3571", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-8275", "severities": [], - "reference_id": "CVE-2014-3571", + "reference_id": "CVE-2014-8275", "reference_type": "" }, { "url": "https://www.openssl.org/news/secadv/20150108.txt", "severities": [ { - "value": "Moderate", + "value": "Low", "system": "generic_textual", "scoring_elements": "" } @@ -4272,12 +3955,8 @@ "weaknesses": [] }, { - "unique_content_id": "38ffe37c3e05fc10c74d621c2a23b78e2b3238c88a8cec376705a04e80131162", - "aliases": [ - "CVE-2014-3572", - "VC-OPENSSL-20150105-CVE-2014-3572" - ], - "summary": "An OpenSSL client will accept a handshake using an ephemeral ECDH ciphersuite using an ECDSA certificate if the server key exchange message is omitted. This effectively removes forward secrecy from the ciphersuite.", + "unique_content_id": "b80715d645997362b4be69a335b46cd5", + "summary": "A carefully crafted DTLS message can cause a segmentation fault in OpenSSL due to a NULL pointer dereference. This could lead to a Denial Of Service attack.", "affected_packages": [ { "package": { @@ -4318,16 +3997,16 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3572", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3571", "severities": [], - "reference_id": "CVE-2014-3572", + "reference_id": "CVE-2014-3571", "reference_type": "" }, { "url": "https://www.openssl.org/news/secadv/20150108.txt", "severities": [ { - "value": "Low", + "value": "Moderate", "system": "generic_textual", "scoring_elements": "" } @@ -4340,12 +4019,8 @@ "weaknesses": [] }, { - "unique_content_id": "173da4e79bb96a760519a18feb9667b22c727def897afd7cab56b2fc840ff141", - "aliases": [ - "CVE-2014-5139", - "VC-OPENSSL-20140806-CVE-2014-5139" - ], - "summary": "A crash was found affecting SRP ciphersuites used in a Server Hello message. The issue affects OpenSSL clients and allows a malicious server to crash the client with a null pointer dereference (read) by specifying an SRP ciphersuite even though it was not properly negotiated with the client. This could lead to a Denial of Service.", + "unique_content_id": "ecbce64df0cdd160db419c6db1cd9dc4", + "summary": "An OpenSSL client will accept the use of an RSA temporary key in a non-export RSA key exchange ciphersuite. A server could present a weak temporary key and downgrade the security of the session.", "affected_packages": [ { "package": { @@ -4356,35 +4031,9 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.1i", - "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h" - } - ], - "references": [ - { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-5139", - "severities": [], - "reference_id": "CVE-2014-5139", - "reference_type": "" + "fixed_version": "0.9.8zd", + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za|0.9.8zb|0.9.8zc" }, - { - "url": "https://www.openssl.org/news/secadv/20140806.txt", - "severities": [], - "reference_id": "", - "reference_type": "" - } - ], - "date_published": "2014-08-06T00:00:00+00:00", - "weaknesses": [] - }, - { - "unique_content_id": "c6cf5f33fdcc803e66a88537cf41831c6b88ce19c6c320843d5c59c63c148c83", - "aliases": [ - "CVE-2014-8176", - "VC-OPENSSL-20150611-CVE-2014-8176" - ], - "summary": "This vulnerability does not affect current versions of OpenSSL. It existed in previous OpenSSL versions and was fixed in June 2014. If a DTLS peer receives application data between the ChangeCipherSpec and Finished messages, buffering of such data may cause an invalid free, resulting in a segmentation fault or potentially, memory corruption.", - "affected_packages": [ { "package": { "name": "openssl", @@ -4394,8 +4043,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.9.8za", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y" + "fixed_version": "1.0.0p", + "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m|1.0.0n|1.0.0o" }, { "package": { @@ -4406,34 +4055,22 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.0m", - "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l" + "fixed_version": "1.0.1k", + "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j" + } + ], + "references": [ + { + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0204", + "severities": [], + "reference_id": "CVE-2015-0204", + "reference_type": "" }, { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.1h", - "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g" - } - ], - "references": [ - { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-8176", - "severities": [], - "reference_id": "CVE-2014-8176", - "reference_type": "" - }, - { - "url": "https://www.openssl.org/news/secadv/20150611.txt", + "url": "https://www.openssl.org/news/secadv/20150108.txt", "severities": [ { - "value": "Moderate", + "value": "Low", "system": "generic_textual", "scoring_elements": "" } @@ -4442,29 +4079,13 @@ "reference_type": "" } ], - "date_published": "2015-06-11T00:00:00+00:00", + "date_published": "2015-01-06T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "a7261d54aab29faf70f12bbfbdd3f3e78cf2beebfeb915dbd7a29714a8955fed", - "aliases": [ - "CVE-2014-8275", - "VC-OPENSSL-20150105-CVE-2014-8275" - ], - "summary": "OpenSSL accepts several non-DER-variations of certificate signature algorithm and signature encodings. OpenSSL also does not enforce a match between the signature algorithm between the signed and unsigned portions of the certificate. By modifying the contents of the signature algorithm or the encoding of the signature, it is possible to change the certificate's fingerprint. This does not allow an attacker to forge certificates, and does not affect certificate verification or OpenSSL servers/clients in any other way. It also does not affect common revocation mechanisms. Only custom applications that rely on the uniqueness of the fingerprint (e.g. certificate blacklists) may be affected.", + "unique_content_id": "14a72a501af8865388558895f94f4719", + "summary": "An OpenSSL server will accept a DH certificate for client authentication without the certificate verify message. This effectively allows a client to authenticate without the use of a private key. This only affects servers which trust a client certificate authority which issues certificates containing DH keys: these are extremely rare and hardly ever encountered.", "affected_packages": [ - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "0.9.8zd", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za|0.9.8zb|0.9.8zc" - }, { "package": { "name": "openssl", @@ -4492,9 +4113,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-8275", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0205", "severities": [], - "reference_id": "CVE-2014-8275", + "reference_id": "CVE-2015-0205", "reference_type": "" }, { @@ -4510,29 +4131,13 @@ "reference_type": "" } ], - "date_published": "2015-01-05T00:00:00+00:00", + "date_published": "2015-01-08T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "60f12268a60e39fd28c928e89af1f4038210aff7ad1f1fd748b8968ca65dfbdd", - "aliases": [ - "CVE-2015-0204", - "VC-OPENSSL-20150106-CVE-2015-0204" - ], - "summary": "An OpenSSL client will accept the use of an RSA temporary key in a non-export RSA key exchange ciphersuite. A server could present a weak temporary key and downgrade the security of the session.", + "unique_content_id": "ae55e9f4f7210581875a2de83cc058ec", + "summary": "A memory leak can occur in the dtls1_buffer_record function under certain conditions. In particular this could occur if an attacker sent repeated DTLS records with the same sequence number but for the next epoch. The memory leak could be exploited by an attacker in a Denial of Service attack through memory exhaustion.", "affected_packages": [ - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "0.9.8zd", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za|0.9.8zb|0.9.8zc" - }, { "package": { "name": "openssl", @@ -4560,16 +4165,16 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0204", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0206", "severities": [], - "reference_id": "CVE-2015-0204", + "reference_id": "CVE-2015-0206", "reference_type": "" }, { "url": "https://www.openssl.org/news/secadv/20150108.txt", "severities": [ { - "value": "Low", + "value": "Moderate", "system": "generic_textual", "scoring_elements": "" } @@ -4578,17 +4183,25 @@ "reference_type": "" } ], - "date_published": "2015-01-06T00:00:00+00:00", + "date_published": "2015-01-08T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "f5bac2344614e13386f702b70ba31694e5db10133151e5372c410e6fbff702ca", - "aliases": [ - "CVE-2015-0205", - "VC-OPENSSL-20150108-CVE-2015-0205" - ], - "summary": "An OpenSSL server will accept a DH certificate for client authentication without the certificate verify message. This effectively allows a client to authenticate without the use of a private key. This only affects servers which trust a client certificate authority which issues certificates containing DH keys: these are extremely rare and hardly ever encountered.", + "unique_content_id": "f2f9de1344eacac2f17f6642b9655651", + "summary": "Bignum squaring (BN_sqr) may produce incorrect results on some platforms, including x86_64. This bug occurs at random with a very low probability, and is not known to be exploitable in any way, though its exact impact is difficult to determine. The following has been determined: *) The probability of BN_sqr producing an incorrect result at random is very low: 1/2^64 on the single affected 32-bit platform (MIPS) and 1/2^128 on affected 64-bit platforms. *) On most platforms, RSA follows a different code path and RSA operations are not affected at all. For the remaining platforms (e.g. OpenSSL built without assembly support), pre-existing countermeasures thwart bug attacks [1]. *) Static ECDH is theoretically affected: it is possible to construct elliptic curve points that would falsely appear to be on the given curve. However, there is no known computationally feasible way to construct such points with low order, and so the security of static ECDH private keys is believed to be unaffected. *) Other routines known to be theoretically affected are modular exponentiation, primality testing, DSA, RSA blinding, JPAKE and SRP. No exploits are known and straightforward bug attacks fail - either the attacker cannot control when the bug triggers, or no private key material is involved.", "affected_packages": [ + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "0.9.8zd", + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za|0.9.8zb|0.9.8zc" + }, { "package": { "name": "openssl", @@ -4616,9 +4229,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0205", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3570", "severities": [], - "reference_id": "CVE-2015-0205", + "reference_id": "CVE-2014-3570", "reference_type": "" }, { @@ -4638,12 +4251,8 @@ "weaknesses": [] }, { - "unique_content_id": "f94fa5bd638308939b95d4d520dd8e57678c3f4709d63229a75fe3868c15446d", - "aliases": [ - "CVE-2015-0206", - "VC-OPENSSL-20150108-CVE-2015-0206" - ], - "summary": "A memory leak can occur in the dtls1_buffer_record function under certain conditions. In particular this could occur if an attacker sent repeated DTLS records with the same sequence number but for the next epoch. The memory leak could be exploited by an attacker in a Denial of Service attack through memory exhaustion.", + "unique_content_id": "fde824bdb24f286066693f15a53a9c11", + "summary": "X509_to_X509_REQ NULL pointer deref. The function X509_to_X509_REQ will crash with a NULL pointer dereference if the certificate key is invalid. This function is rarely used in practice.", "affected_packages": [ { "package": { @@ -4654,8 +4263,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.0p", - "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m|1.0.0n|1.0.0o" + "fixed_version": "0.9.8zf", + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za|0.9.8zb|0.9.8zc|0.9.8zd|0.9.8ze" }, { "package": { @@ -4666,41 +4275,21 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.1k", - "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j" - } - ], - "references": [ - { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0206", - "severities": [], - "reference_id": "CVE-2015-0206", - "reference_type": "" + "fixed_version": "1.0.0r", + "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m|1.0.0n|1.0.0o|1.0.0p|1.0.0q" }, { - "url": "https://www.openssl.org/news/secadv/20150108.txt", - "severities": [ - { - "value": "Moderate", - "system": "generic_textual", - "scoring_elements": "" - } - ], - "reference_id": "", - "reference_type": "" - } - ], - "date_published": "2015-01-08T00:00:00+00:00", - "weaknesses": [] - }, - { - "unique_content_id": "45236a4d12fbe78a8b2d6a428b53a890bddcc1dedee31cb6d41b20af54e9bbb3", - "aliases": [ - "CVE-2015-0207", - "VC-OPENSSL-20150319-CVE-2015-0207" - ], - "summary": "Segmentation fault in DTLSv1_listen. A defect in the implementation of DTLSv1_listen means that state is preserved in the SSL object from one invocation to the next that can lead to a segmentation fault. Errors processing the initial ClientHello can trigger this scenario. An example of such an error could be that a DTLS1.0 only client is attempting to connect to a DTLS1.2 only server.", - "affected_packages": [ + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.1m", + "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l" + }, { "package": { "name": "openssl", @@ -4716,16 +4305,16 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0207", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0288", "severities": [], - "reference_id": "CVE-2015-0207", + "reference_id": "CVE-2015-0288", "reference_type": "" }, { "url": "https://www.openssl.org/news/secadv/20150319.txt", "severities": [ { - "value": "Moderate", + "value": "Low", "system": "generic_textual", "scoring_elements": "" } @@ -4734,16 +4323,12 @@ "reference_type": "" } ], - "date_published": "2015-03-19T00:00:00+00:00", + "date_published": "2015-03-02T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "774c0aaa394ae3ac59c32105e791fec7c71c602f342a042afd485ed819983fc6", - "aliases": [ - "CVE-2015-0208", - "VC-OPENSSL-20150319-CVE-2015-0208" - ], - "summary": "Segmentation fault for invalid PSS parameters. The signature verification routines will crash with a NULL pointer dereference if presented with an ASN.1 signature using the RSA PSS algorithm and invalid parameters. Since these routines are used to verify certificate signature algorithms this can be used to crash any certificate verification operation and exploited in a DoS attack. Any application which performs certificate verification is vulnerable including OpenSSL clients and servers which enable client authentication.", + "unique_content_id": "92852e9f71e2d4220063d01c7e871d0f", + "summary": "Under certain conditions an OpenSSL 1.0.2 client can complete a handshake with an unseeded PRNG. If the handshake succeeds then the client random that has been used will have been generated from a PRNG with insufficient entropy and therefore the output may be predictable.", "affected_packages": [ { "package": { @@ -4760,16 +4345,16 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0208", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0285", "severities": [], - "reference_id": "CVE-2015-0208", + "reference_id": "CVE-2015-0285", "reference_type": "" }, { "url": "https://www.openssl.org/news/secadv/20150319.txt", "severities": [ { - "value": "Moderate", + "value": "Low", "system": "generic_textual", "scoring_elements": "" } @@ -4778,16 +4363,12 @@ "reference_type": "" } ], - "date_published": "2015-03-19T00:00:00+00:00", + "date_published": "2015-03-10T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "f53c9570c9efdac69f3e8300699223b0497562e4c9fb9398fdf2f29ba05efb53", - "aliases": [ - "CVE-2015-0209", - "VC-OPENSSL-20150319-CVE-2015-0209" - ], - "summary": "Use After Free following d2i_ECPrivatekey error. A malformed EC private key file consumed via the d2i_ECPrivateKey function could cause a use after free condition. This, in turn, could cause a double free in several private key parsing functions (such as d2i_PrivateKey or EVP_PKCS82PKEY) and could lead to a DoS attack or memory corruption for applications that receive EC private keys from untrusted sources. This scenario is considered rare.", + "unique_content_id": "037837042ea4921162841a8a572dedb7", + "summary": "A vulnerability existed in previous versions of OpenSSL related to the processing of base64 encoded data. Any code path that reads base64 data from an untrusted source could be affected (such as the PEM processing routines). Maliciously crafted base 64 data could trigger a segmenation fault or memory corruption.", "affected_packages": [ { "package": { @@ -4798,8 +4379,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.9.8zf", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za|0.9.8zb|0.9.8zc|0.9.8zd|0.9.8ze" + "fixed_version": "0.9.8za", + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y" }, { "package": { @@ -4810,8 +4391,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.0r", - "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m|1.0.0n|1.0.0o|1.0.0p|1.0.0q" + "fixed_version": "1.0.0m", + "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l" }, { "package": { @@ -4822,9 +4403,37 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.1m", - "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l" + "fixed_version": "1.0.1h", + "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g" + } + ], + "references": [ + { + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0292", + "severities": [], + "reference_id": "CVE-2015-0292", + "reference_type": "" }, + { + "url": "https://www.openssl.org/news/secadv/20150319.txt", + "severities": [ + { + "value": "Moderate", + "system": "generic_textual", + "scoring_elements": "" + } + ], + "reference_id": "", + "reference_type": "" + } + ], + "date_published": "2015-03-19T00:00:00+00:00", + "weaknesses": [] + }, + { + "unique_content_id": "5d5cb3ddc2d7d372e96fc9e7eb0e6172", + "summary": "Segmentation fault for invalid PSS parameters. The signature verification routines will crash with a NULL pointer dereference if presented with an ASN.1 signature using the RSA PSS algorithm and invalid parameters. Since these routines are used to verify certificate signature algorithms this can be used to crash any certificate verification operation and exploited in a DoS attack. Any application which performs certificate verification is vulnerable including OpenSSL clients and servers which enable client authentication.", + "affected_packages": [ { "package": { "name": "openssl", @@ -4840,16 +4449,16 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0209", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0208", "severities": [], - "reference_id": "CVE-2015-0209", + "reference_id": "CVE-2015-0208", "reference_type": "" }, { "url": "https://www.openssl.org/news/secadv/20150319.txt", "severities": [ { - "value": "Low", + "value": "Moderate", "system": "generic_textual", "scoring_elements": "" } @@ -4862,12 +4471,8 @@ "weaknesses": [] }, { - "unique_content_id": "ae10e26137b18ce0f074a9e88ad800799cfa131e4c9075c49e5cd736bd4ae7ef", - "aliases": [ - "CVE-2015-0285", - "VC-OPENSSL-20150310-CVE-2015-0285" - ], - "summary": "Under certain conditions an OpenSSL 1.0.2 client can complete a handshake with an unseeded PRNG. If the handshake succeeds then the client random that has been used will have been generated from a PRNG with insufficient entropy and therefore the output may be predictable.", + "unique_content_id": "66636a0c48ff0f39676cc43ff2fad975", + "summary": "Segmentation fault in DTLSv1_listen. A defect in the implementation of DTLSv1_listen means that state is preserved in the SSL object from one invocation to the next that can lead to a segmentation fault. Errors processing the initial ClientHello can trigger this scenario. An example of such an error could be that a DTLS1.0 only client is attempting to connect to a DTLS1.2 only server.", "affected_packages": [ { "package": { @@ -4884,16 +4489,16 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0285", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0207", "severities": [], - "reference_id": "CVE-2015-0285", + "reference_id": "CVE-2015-0207", "reference_type": "" }, { "url": "https://www.openssl.org/news/secadv/20150319.txt", "severities": [ { - "value": "Low", + "value": "Moderate", "system": "generic_textual", "scoring_elements": "" } @@ -4902,16 +4507,12 @@ "reference_type": "" } ], - "date_published": "2015-03-10T00:00:00+00:00", + "date_published": "2015-03-19T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "fcff8a052ccf49c48dbe7f8d5a88a485de1a213799585647c2124b98ae5ccb52", - "aliases": [ - "CVE-2015-0286", - "VC-OPENSSL-20150319-CVE-2015-0286" - ], - "summary": "Segmentation fault in ASN1_TYPE_cmp. The function ASN1_TYPE_cmp will crash with an invalid read if an attempt is made to compare ASN.1 boolean types. Since ASN1_TYPE_cmp is used to check certificate signature algorithm consistency this can be used to crash any certificate verification operation and exploited in a DoS attack. Any application which performs certificate verification is vulnerable including OpenSSL clients and servers which enable client authentication.", + "unique_content_id": "6b326dde327d1535193796cfd337f305", + "summary": "DoS via reachable assert in SSLv2 servers. A malicious client can trigger an OPENSSL_assert in servers that both support SSLv2 and enable export cipher suites by sending a specially crafted SSLv2 CLIENT-MASTER-KEY message.", "affected_packages": [ { "package": { @@ -4923,7 +4524,7 @@ "qualifiers": "" }, "fixed_version": "0.9.8zf", - "affected_version_range": "vers:openssl/0.9.8zd|0.9.8ze" + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za|0.9.8zb|0.9.8zc|0.9.8zd|0.9.8ze" }, { "package": { @@ -4964,9 +4565,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0286", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0293", "severities": [], - "reference_id": "CVE-2015-0286", + "reference_id": "CVE-2015-0293", "reference_type": "" }, { @@ -4986,11 +4587,7 @@ "weaknesses": [] }, { - "unique_content_id": "3964ca62faf5fd2df7ebf079fc420e480621026d27cc10f9de31e2738a05936c", - "aliases": [ - "CVE-2015-0287", - "VC-OPENSSL-20150319-CVE-2015-0287" - ], + "unique_content_id": "7f14c539a7b1d7b62b178e81a164ca57", "summary": "ASN.1 structure reuse memory corruption. Reusing a structure in ASN.1 parsing may allow an attacker to cause memory corruption via an invalid write. Such reuse is and has been strongly discouraged and is believed to be rare.", "affected_packages": [ { @@ -5066,49 +4663,9 @@ "weaknesses": [] }, { - "unique_content_id": "751dcb76349de0d4bd85b5a27c52b97bf0f472fc1bd4b3a334c67afd762a0bf1", - "aliases": [ - "CVE-2015-0288", - "VC-OPENSSL-20150302-CVE-2015-0288" - ], - "summary": "X509_to_X509_REQ NULL pointer deref. The function X509_to_X509_REQ will crash with a NULL pointer dereference if the certificate key is invalid. This function is rarely used in practice.", + "unique_content_id": "87c491358b43983d41be3e34f577787f", + "summary": "Empty CKE with client auth and DHE. If client auth is used then a server can seg fault in the event of a DHE ciphersuite being selected and a zero length ClientKeyExchange message being sent by the client. This could be exploited in a DoS attack.", "affected_packages": [ - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "0.9.8zf", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za|0.9.8zb|0.9.8zc|0.9.8zd|0.9.8ze" - }, - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.0r", - "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m|1.0.0n|1.0.0o|1.0.0p|1.0.0q" - }, - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.1m", - "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l" - }, { "package": { "name": "openssl", @@ -5124,16 +4681,16 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0288", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-1787", "severities": [], - "reference_id": "CVE-2015-0288", + "reference_id": "CVE-2015-1787", "reference_type": "" }, { "url": "https://www.openssl.org/news/secadv/20150319.txt", "severities": [ { - "value": "Low", + "value": "Moderate", "system": "generic_textual", "scoring_elements": "" } @@ -5142,53 +4699,13 @@ "reference_type": "" } ], - "date_published": "2015-03-02T00:00:00+00:00", + "date_published": "2015-03-19T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "66bbf7b524be1160d1805c966d32418f2dd42b204296d6b885939dafb1ce52f5", - "aliases": [ - "CVE-2015-0289", - "VC-OPENSSL-20150319-CVE-2015-0289" - ], - "summary": "PKCS#7 NULL pointer dereference. The PKCS#7 parsing code does not handle missing outer ContentInfo correctly. An attacker can craft malformed ASN.1-encoded PKCS#7 blobs with missing content and trigger a NULL pointer dereference on parsing. Applications that verify PKCS#7 signatures, decrypt PKCS#7 data or otherwise parse PKCS#7 structures from untrusted sources are affected. OpenSSL clients and servers are not affected.", + "unique_content_id": "8c8ab1d205efac4fa9eeb6888a73d02b", + "summary": "Multiblock corrupted pointer. OpenSSL 1.0.2 introduced the \"multiblock\" performance improvement. This feature only applies on 64 bit x86 architecture platforms that support AES NI instructions. A defect in the implementation of \"multiblock\" can cause OpenSSL's internal write buffer to become incorrectly set to NULL when using non-blocking IO. Typically, when the user application is using a socket BIO for writing, this will only result in a failed connection. However if some other BIO is used then it is likely that a segmentation fault will be triggered, thus enabling a potential DoS attack.", "affected_packages": [ - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "0.9.8zf", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za|0.9.8zb|0.9.8zc|0.9.8zd|0.9.8ze" - }, - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.0r", - "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m|1.0.0n|1.0.0o|1.0.0p|1.0.0q" - }, - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.1m", - "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l" - }, { "package": { "name": "openssl", @@ -5204,9 +4721,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0289", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0290", "severities": [], - "reference_id": "CVE-2015-0289", + "reference_id": "CVE-2015-0290", "reference_type": "" }, { @@ -5226,12 +4743,8 @@ "weaknesses": [] }, { - "unique_content_id": "23b9fdf2b9a73946210388721d4df0de3a020ac58b1e6669c3696b33a602ec98", - "aliases": [ - "CVE-2015-0290", - "VC-OPENSSL-20150319-CVE-2015-0290" - ], - "summary": "Multiblock corrupted pointer. OpenSSL 1.0.2 introduced the \"multiblock\" performance improvement. This feature only applies on 64 bit x86 architecture platforms that support AES NI instructions. A defect in the implementation of \"multiblock\" can cause OpenSSL's internal write buffer to become incorrectly set to NULL when using non-blocking IO. Typically, when the user application is using a socket BIO for writing, this will only result in a failed connection. However if some other BIO is used then it is likely that a segmentation fault will be triggered, thus enabling a potential DoS attack.", + "unique_content_id": "9c790e8e82381b71bd62ae5a2403aa43", + "summary": "ClientHello sigalgs DoS. If a client connects to an OpenSSL 1.0.2 server and renegotiates with an invalid signature algorithms extension a NULL pointer dereference will occur. This can be exploited in a DoS attack against the server.", "affected_packages": [ { "package": { @@ -5248,16 +4761,16 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0290", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0291", "severities": [], - "reference_id": "CVE-2015-0290", + "reference_id": "CVE-2015-0291", "reference_type": "" }, { "url": "https://www.openssl.org/news/secadv/20150319.txt", "severities": [ { - "value": "Moderate", + "value": "High", "system": "generic_textual", "scoring_elements": "" } @@ -5270,13 +4783,45 @@ "weaknesses": [] }, { - "unique_content_id": "6bdb68d814ff5f69711b93446eb25ddf133d6fbb35bab358bb97b3c423bb5811", - "aliases": [ - "CVE-2015-0291", - "VC-OPENSSL-20150319-CVE-2015-0291" - ], - "summary": "ClientHello sigalgs DoS. If a client connects to an OpenSSL 1.0.2 server and renegotiates with an invalid signature algorithms extension a NULL pointer dereference will occur. This can be exploited in a DoS attack against the server.", + "unique_content_id": "a6996bfe711e793b22ceb3d47c975099", + "summary": "PKCS#7 NULL pointer dereference. The PKCS#7 parsing code does not handle missing outer ContentInfo correctly. An attacker can craft malformed ASN.1-encoded PKCS#7 blobs with missing content and trigger a NULL pointer dereference on parsing. Applications that verify PKCS#7 signatures, decrypt PKCS#7 data or otherwise parse PKCS#7 structures from untrusted sources are affected. OpenSSL clients and servers are not affected.", "affected_packages": [ + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "0.9.8zf", + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za|0.9.8zb|0.9.8zc|0.9.8zd|0.9.8ze" + }, + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.0r", + "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m|1.0.0n|1.0.0o|1.0.0p|1.0.0q" + }, + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.1m", + "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l" + }, { "package": { "name": "openssl", @@ -5292,16 +4837,16 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0291", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0289", "severities": [], - "reference_id": "CVE-2015-0291", + "reference_id": "CVE-2015-0289", "reference_type": "" }, { "url": "https://www.openssl.org/news/secadv/20150319.txt", "severities": [ { - "value": "High", + "value": "Moderate", "system": "generic_textual", "scoring_elements": "" } @@ -5314,12 +4859,8 @@ "weaknesses": [] }, { - "unique_content_id": "9928809c0f0a04e7ae6a89ccefdce3eb83e34e047f3470f7778b42182c3b0a3e", - "aliases": [ - "CVE-2015-0292", - "VC-OPENSSL-20150319-CVE-2015-0292" - ], - "summary": "A vulnerability existed in previous versions of OpenSSL related to the processing of base64 encoded data. Any code path that reads base64 data from an untrusted source could be affected (such as the PEM processing routines). Maliciously crafted base 64 data could trigger a segmenation fault or memory corruption.", + "unique_content_id": "b91a75f67326a148c90e6ad45ba11839", + "summary": "Use After Free following d2i_ECPrivatekey error. A malformed EC private key file consumed via the d2i_ECPrivateKey function could cause a use after free condition. This, in turn, could cause a double free in several private key parsing functions (such as d2i_PrivateKey or EVP_PKCS82PKEY) and could lead to a DoS attack or memory corruption for applications that receive EC private keys from untrusted sources. This scenario is considered rare.", "affected_packages": [ { "package": { @@ -5330,8 +4871,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.9.8za", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y" + "fixed_version": "0.9.8zf", + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za|0.9.8zb|0.9.8zc|0.9.8zd|0.9.8ze" }, { "package": { @@ -5342,8 +4883,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.0m", - "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l" + "fixed_version": "1.0.0r", + "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m|1.0.0n|1.0.0o|1.0.0p|1.0.0q" }, { "package": { @@ -5354,22 +4895,34 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.1h", - "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g" + "fixed_version": "1.0.1m", + "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l" + }, + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.2a", + "affected_version_range": "vers:openssl/1.0.2" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0292", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0209", "severities": [], - "reference_id": "CVE-2015-0292", + "reference_id": "CVE-2015-0209", "reference_type": "" }, { "url": "https://www.openssl.org/news/secadv/20150319.txt", "severities": [ { - "value": "Moderate", + "value": "Low", "system": "generic_textual", "scoring_elements": "" } @@ -5382,12 +4935,8 @@ "weaknesses": [] }, { - "unique_content_id": "d86068f891546989943214dbe20bceca4d29250299395048df1666ebef7ede03", - "aliases": [ - "CVE-2015-0293", - "VC-OPENSSL-20150319-CVE-2015-0293" - ], - "summary": "DoS via reachable assert in SSLv2 servers. A malicious client can trigger an OPENSSL_assert in servers that both support SSLv2 and enable export cipher suites by sending a specially crafted SSLv2 CLIENT-MASTER-KEY message.", + "unique_content_id": "d0946aba30cf839fdbc468685b6bd683", + "summary": "Segmentation fault in ASN1_TYPE_cmp. The function ASN1_TYPE_cmp will crash with an invalid read if an attempt is made to compare ASN.1 boolean types. Since ASN1_TYPE_cmp is used to check certificate signature algorithm consistency this can be used to crash any certificate verification operation and exploited in a DoS attack. Any application which performs certificate verification is vulnerable including OpenSSL clients and servers which enable client authentication.", "affected_packages": [ { "package": { @@ -5399,7 +4948,7 @@ "qualifiers": "" }, "fixed_version": "0.9.8zf", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za|0.9.8zb|0.9.8zc|0.9.8zd|0.9.8ze" + "affected_version_range": "vers:openssl/0.9.8zd|0.9.8ze" }, { "package": { @@ -5440,9 +4989,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0293", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0286", "severities": [], - "reference_id": "CVE-2015-0293", + "reference_id": "CVE-2015-0286", "reference_type": "" }, { @@ -5462,12 +5011,8 @@ "weaknesses": [] }, { - "unique_content_id": "e2e31fceb4d827820c9a6c2c0144827a16d464ad33bd6139cf5e5c7389864c4c", - "aliases": [ - "CVE-2015-1787", - "VC-OPENSSL-20150319-CVE-2015-1787" - ], - "summary": "Empty CKE with client auth and DHE. If client auth is used then a server can seg fault in the event of a DHE ciphersuite being selected and a zero length ClientKeyExchange message being sent by the client. This could be exploited in a DoS attack.", + "unique_content_id": "c9cffc6fc71a28da39de00bca06f0ce3", + "summary": "If a NewSessionTicket is received by a multi-threaded client when attempting to reuse a previous ticket then a race condition can occur potentially leading to a double free of the ticket data.", "affected_packages": [ { "package": { @@ -5478,22 +5023,58 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.2a", - "affected_version_range": "vers:openssl/1.0.2" + "fixed_version": "0.9.8zg", + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za|0.9.8zb|0.9.8zc|0.9.8zd|0.9.8ze|0.9.8zf" + }, + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.0s", + "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m|1.0.0n|1.0.0o|1.0.0p|1.0.0q|1.0.0r" + }, + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.1n", + "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l|1.0.1m" + }, + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.2b", + "affected_version_range": "vers:openssl/1.0.2|1.0.2a" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-1787", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-1791", "severities": [], - "reference_id": "CVE-2015-1787", + "reference_id": "CVE-2015-1791", "reference_type": "" }, { - "url": "https://www.openssl.org/news/secadv/20150319.txt", + "url": "https://www.openssl.org/news/secadv/20150611.txt", "severities": [ { - "value": "Moderate", + "value": "Low", "system": "generic_textual", "scoring_elements": "" } @@ -5502,16 +5083,12 @@ "reference_type": "" } ], - "date_published": "2015-03-19T00:00:00+00:00", + "date_published": "2015-06-02T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "be2ba7ab66a7f53457702397f237a1894566b9d27e7d776969c121a98b0b48c3", - "aliases": [ - "CVE-2015-1788", - "VC-OPENSSL-20150611-CVE-2015-1788" - ], - "summary": "When processing an ECParameters structure OpenSSL enters an infinite loop if the curve specified is over a specially malformed binary polynomial field. This can be used to perform denial of service against any system which processes public keys, certificate requests or certificates. This includes TLS clients and TLS servers with client authentication enabled.", + "unique_content_id": "154f6f04f63ee6fba925180ed9e059c1", + "summary": "X509_cmp_time does not properly check the length of the ASN1_TIME string and can read a few bytes out of bounds. In addition, X509_cmp_time accepts an arbitrary number of fractional seconds in the time string. An attacker can use this to craft malformed certificates and CRLs of various sizes and potentially cause a segmentation fault, resulting in a DoS on applications that verify certificates or CRLs. TLS clients that verify CRLs are affected. TLS clients and servers with client authentication enabled may be affected if they use custom verification callbacks.", "affected_packages": [ { "package": { @@ -5522,8 +5099,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.9.8s", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r" + "fixed_version": "0.9.8zg", + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za|0.9.8zb|0.9.8zc|0.9.8zd|0.9.8ze|0.9.8zf" }, { "package": { @@ -5534,8 +5111,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.0e", - "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d" + "fixed_version": "1.0.0s", + "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m|1.0.0n|1.0.0o|1.0.0p|1.0.0q|1.0.0r" }, { "package": { @@ -5564,9 +5141,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-1788", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-1789", "severities": [], - "reference_id": "CVE-2015-1788", + "reference_id": "CVE-2015-1789", "reference_type": "" }, { @@ -5586,12 +5163,8 @@ "weaknesses": [] }, { - "unique_content_id": "6693aa99959f40abe75da63ee98844b32d6c80ee49cd880d7211f82f39bff9bf", - "aliases": [ - "CVE-2015-1789", - "VC-OPENSSL-20150611-CVE-2015-1789" - ], - "summary": "X509_cmp_time does not properly check the length of the ASN1_TIME string and can read a few bytes out of bounds. In addition, X509_cmp_time accepts an arbitrary number of fractional seconds in the time string. An attacker can use this to craft malformed certificates and CRLs of various sizes and potentially cause a segmentation fault, resulting in a DoS on applications that verify certificates or CRLs. TLS clients that verify CRLs are affected. TLS clients and servers with client authentication enabled may be affected if they use custom verification callbacks.", + "unique_content_id": "1d42619f9d572e6c6f831da1d4b5347c", + "summary": "This vulnerability does not affect current versions of OpenSSL. It existed in previous OpenSSL versions and was fixed in June 2014. If a DTLS peer receives application data between the ChangeCipherSpec and Finished messages, buffering of such data may cause an invalid free, resulting in a segmentation fault or potentially, memory corruption.", "affected_packages": [ { "package": { @@ -5602,20 +5175,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.9.8zg", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za|0.9.8zb|0.9.8zc|0.9.8zd|0.9.8ze|0.9.8zf" - }, - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.0s", - "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m|1.0.0n|1.0.0o|1.0.0p|1.0.0q|1.0.0r" + "fixed_version": "0.9.8za", + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y" }, { "package": { @@ -5626,8 +5187,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.1n", - "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l|1.0.1m" + "fixed_version": "1.0.0m", + "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l" }, { "package": { @@ -5638,15 +5199,15 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.2b", - "affected_version_range": "vers:openssl/1.0.2|1.0.2a" + "fixed_version": "1.0.1h", + "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-1789", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-8176", "severities": [], - "reference_id": "CVE-2015-1789", + "reference_id": "CVE-2014-8176", "reference_type": "" }, { @@ -5666,11 +5227,7 @@ "weaknesses": [] }, { - "unique_content_id": "0d6ca333ae5301c543aa3d5fee659526e6e7df19d6cd23503b080d44f393be29", - "aliases": [ - "CVE-2015-1790", - "VC-OPENSSL-20150611-CVE-2015-1790" - ], + "unique_content_id": "2b988a60b7d38da17ad12c1d84455a70", "summary": "The PKCS#7 parsing code does not handle missing inner EncryptedContent correctly. An attacker can craft malformed ASN.1-encoded PKCS#7 blobs with missing content and trigger a NULL pointer dereference on parsing. Applications that decrypt PKCS#7 data or otherwise parse PKCS#7 structures from untrusted sources are affected. OpenSSL clients and servers are not affected.", "affected_packages": [ { @@ -5746,12 +5303,8 @@ "weaknesses": [] }, { - "unique_content_id": "2ad006bcecf434794b6cafb90c3e60eda8f3465baf0d60adf2eb0547f6075427", - "aliases": [ - "CVE-2015-1791", - "VC-OPENSSL-20150602-CVE-2015-1791" - ], - "summary": "If a NewSessionTicket is received by a multi-threaded client when attempting to reuse a previous ticket then a race condition can occur potentially leading to a double free of the ticket data.", + "unique_content_id": "303206c390cb78e168c8425d3c6d2c91", + "summary": "When verifying a signedData message the CMS code can enter an infinite loop if presented with an unknown hash function OID. This can be used to perform denial of service against any system which verifies signedData messages using the CMS code.", "affected_packages": [ { "package": { @@ -5804,16 +5357,16 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-1791", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-1792", "severities": [], - "reference_id": "CVE-2015-1791", + "reference_id": "CVE-2015-1792", "reference_type": "" }, { "url": "https://www.openssl.org/news/secadv/20150611.txt", "severities": [ { - "value": "Low", + "value": "Moderate", "system": "generic_textual", "scoring_elements": "" } @@ -5822,16 +5375,12 @@ "reference_type": "" } ], - "date_published": "2015-06-02T00:00:00+00:00", + "date_published": "2015-06-11T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "fbbe723124334c66dbc53652a1a157264900e602a74ed731a2223c212d189f15", - "aliases": [ - "CVE-2015-1792", - "VC-OPENSSL-20150611-CVE-2015-1792" - ], - "summary": "When verifying a signedData message the CMS code can enter an infinite loop if presented with an unknown hash function OID. This can be used to perform denial of service against any system which verifies signedData messages using the CMS code.", + "unique_content_id": "742341fd7596524c221d7ac8aa8025de", + "summary": "When processing an ECParameters structure OpenSSL enters an infinite loop if the curve specified is over a specially malformed binary polynomial field. This can be used to perform denial of service against any system which processes public keys, certificate requests or certificates. This includes TLS clients and TLS servers with client authentication enabled.", "affected_packages": [ { "package": { @@ -5842,8 +5391,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.9.8zg", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za|0.9.8zb|0.9.8zc|0.9.8zd|0.9.8ze|0.9.8zf" + "fixed_version": "0.9.8s", + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r" }, { "package": { @@ -5854,8 +5403,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.0s", - "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m|1.0.0n|1.0.0o|1.0.0p|1.0.0q|1.0.0r" + "fixed_version": "1.0.0e", + "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d" }, { "package": { @@ -5884,9 +5433,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-1792", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-1788", "severities": [], - "reference_id": "CVE-2015-1792", + "reference_id": "CVE-2015-1788", "reference_type": "" }, { @@ -5906,11 +5455,7 @@ "weaknesses": [] }, { - "unique_content_id": "77f031f81329fda29782191d97de2003d3b4fadda5cae0ddf20bcd4ba0958c6e", - "aliases": [ - "CVE-2015-1793", - "VC-OPENSSL-20150709-CVE-2015-1793" - ], + "unique_content_id": "78795bf94381c0a1772ed444fb576c91", "summary": "An error in the implementation of the alternative certificate chain logic could allow an attacker to cause certain checks on untrusted certificates to be bypassed, such as the CA flag, enabling them to use a valid leaf certificate to act as a CA and \"issue\" an invalid certificate.", "affected_packages": [ { @@ -5962,11 +5507,7 @@ "weaknesses": [] }, { - "unique_content_id": "6b2da461b684884127216718edee478e331e8b64439b5c98f36f9284ead68922", - "aliases": [ - "CVE-2015-1794", - "VC-OPENSSL-20150811-CVE-2015-1794" - ], + "unique_content_id": "34e7fc0f12a532fb0e3f133767651b82", "summary": "If a client receives a ServerKeyExchange for an anonymous DH ciphersuite with the value of p set to 0 then a seg fault can occur leading to a possible denial of service attack.", "affected_packages": [ { @@ -6006,13 +5547,21 @@ "weaknesses": [] }, { - "unique_content_id": "7c4c38c81c872cfcb7ae77bc45b1a78760ddda5aa1ebf6e061d41443c7a0870a", - "aliases": [ - "CVE-2015-3193", - "VC-OPENSSL-20151203-CVE-2015-3193" - ], - "summary": "There is a carry propagating bug in the x86_64 Montgomery squaring procedure. No EC algorithms are affected. Analysis suggests that attacks against RSA and DSA as a result of this defect would be very difficult to perform and are not believed likely. Attacks against DH are considered just feasible (although very difficult) because most of the work necessary to deduce information about a private key may be performed offline. The amount of resources required for such an attack would be very significant and likely only accessible to a limited number of attackers. An attacker would additionally need online access to an unpatched system using the target private key in a scenario with persistent DH parameters and a private key that is shared between multiple clients. For example this can occur by default in OpenSSL DHE based SSL/TLS ciphersuites.", + "unique_content_id": "3c8cc92c8be75ecbbf22aa5caa33bfa9", + "summary": "The signature verification routines will crash with a NULL pointer dereference if presented with an ASN.1 signature using the RSA PSS algorithm and absent mask generation function parameter. Since these routines are used to verify certificate signature algorithms this can be used to crash any certificate verification operation and exploited in a DoS attack. Any application which performs certificate verification is vulnerable including OpenSSL clients and servers which enable client authentication.", "affected_packages": [ + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.1q", + "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l|1.0.1m|1.0.1n|1.0.1o|1.0.1p" + }, { "package": { "name": "openssl", @@ -6028,9 +5577,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-3193", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-3194", "severities": [], - "reference_id": "CVE-2015-3193", + "reference_id": "CVE-2015-3194", "reference_type": "" }, { @@ -6050,12 +5599,8 @@ "weaknesses": [] }, { - "unique_content_id": "826e677a591d0d5e808454bc70f127fab4629f8ee5c2f16bc03c03740fc52661", - "aliases": [ - "CVE-2015-3194", - "VC-OPENSSL-20151203-CVE-2015-3194" - ], - "summary": "The signature verification routines will crash with a NULL pointer dereference if presented with an ASN.1 signature using the RSA PSS algorithm and absent mask generation function parameter. Since these routines are used to verify certificate signature algorithms this can be used to crash any certificate verification operation and exploited in a DoS attack. Any application which performs certificate verification is vulnerable including OpenSSL clients and servers which enable client authentication.", + "unique_content_id": "58623263c1b67d72553e0282afd5d03a", + "summary": "If PSK identity hints are received by a multi-threaded client then the values are wrongly updated in the parent SSL_CTX structure. This can result in a race condition potentially leading to a double free of the identify hint data.", "affected_packages": [ { "package": { @@ -6066,8 +5611,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.1q", - "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l|1.0.1m|1.0.1n|1.0.1o|1.0.1p" + "fixed_version": "1.0.0t", + "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0h|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m|1.0.0n|1.0.0o|1.0.0p|1.0.0q|1.0.0r|1.0.0s" }, { "package": { @@ -6078,22 +5623,34 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.2e", - "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d" + "fixed_version": "1.0.1p", + "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l|1.0.1m|1.0.1n|1.0.1o" + }, + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.2d", + "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-3194", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-3196", "severities": [], - "reference_id": "CVE-2015-3194", + "reference_id": "CVE-2015-3196", "reference_type": "" }, { "url": "https://www.openssl.org/news/secadv/20151203.txt", "severities": [ { - "value": "Moderate", + "value": "Low", "system": "generic_textual", "scoring_elements": "" } @@ -6106,11 +5663,7 @@ "weaknesses": [] }, { - "unique_content_id": "7eedb5c223cb23e47aa9ce69cf53869fc975e43e734731737790f5355c57c46f", - "aliases": [ - "CVE-2015-3195", - "VC-OPENSSL-20151203-CVE-2015-3195" - ], + "unique_content_id": "aa54b531fb7b90075a099e3d74098089", "summary": "When presented with a malformed X509_ATTRIBUTE structure OpenSSL will leak memory. This structure is used by the PKCS#7 and CMS routines so any application which reads PKCS#7 or CMS data from untrusted sources is affected. SSL/TLS is not affected.", "affected_packages": [ { @@ -6186,12 +5739,8 @@ "weaknesses": [] }, { - "unique_content_id": "434c0477ef7b438f9b58ddb4cf5d072f24523f7231cf77fb3d492dc0ae358d03", - "aliases": [ - "CVE-2015-3196", - "VC-OPENSSL-20151203-CVE-2015-3196" - ], - "summary": "If PSK identity hints are received by a multi-threaded client then the values are wrongly updated in the parent SSL_CTX structure. This can result in a race condition potentially leading to a double free of the identify hint data.", + "unique_content_id": "c49999301ee8aa01a9ddd428979f0bc4", + "summary": "There is a carry propagating bug in the x86_64 Montgomery squaring procedure. No EC algorithms are affected. Analysis suggests that attacks against RSA and DSA as a result of this defect would be very difficult to perform and are not believed likely. Attacks against DH are considered just feasible (although very difficult) because most of the work necessary to deduce information about a private key may be performed offline. The amount of resources required for such an attack would be very significant and likely only accessible to a limited number of attackers. An attacker would additionally need online access to an unpatched system using the target private key in a scenario with persistent DH parameters and a private key that is shared between multiple clients. For example this can occur by default in OpenSSL DHE based SSL/TLS ciphersuites.", "affected_packages": [ { "package": { @@ -6202,46 +5751,22 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.0t", - "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0h|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m|1.0.0n|1.0.0o|1.0.0p|1.0.0q|1.0.0r|1.0.0s" - }, - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.1p", - "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l|1.0.1m|1.0.1n|1.0.1o" - }, - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.2d", - "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c" + "fixed_version": "1.0.2e", + "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-3196", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-3193", "severities": [], - "reference_id": "CVE-2015-3196", + "reference_id": "CVE-2015-3193", "reference_type": "" }, { "url": "https://www.openssl.org/news/secadv/20151203.txt", "severities": [ { - "value": "Low", + "value": "Moderate", "system": "generic_textual", "scoring_elements": "" } @@ -6254,11 +5779,7 @@ "weaknesses": [] }, { - "unique_content_id": "582bb190d8800ea86907f44769c22a29e8f34079c0b2ce5e09052db99707480b", - "aliases": [ - "CVE-2015-3197", - "VC-OPENSSL-20160128-CVE-2015-3197" - ], + "unique_content_id": "a1b7aec7c53c8018f9f0fc9118de71b4", "summary": "A malicious client can negotiate SSLv2 ciphers that have been disabled on the server and complete SSLv2 handshakes even if all SSLv2 ciphers have been disabled, provided that the SSLv2 protocol was not also disabled via SSL_OP_NO_SSLv2.", "affected_packages": [ { @@ -6310,11 +5831,7 @@ "weaknesses": [] }, { - "unique_content_id": "202a2aec8d017aab9c615cfdaf94d9a7137c18c8e41c2d999025759310199b81", - "aliases": [ - "CVE-2016-0701", - "VC-OPENSSL-20160128-CVE-2016-0701" - ], + "unique_content_id": "bb0ba32b691bb5c4273824bad2f457a9", "summary": "Historically OpenSSL usually only ever generated DH parameters based on \"safe\" primes. More recently (in version 1.0.2) support was provided for generating X9.42 style parameter files such as those required for RFC 5114 support. The primes used in such files may not be \"safe\". Where an application is using DH configured with parameters based on primes that are not \"safe\" then an attacker could use this fact to find a peer's private DH exponent. This attack requires that the attacker complete multiple handshakes in which the peer uses the same private DH exponent. For example this could be used to discover a TLS server's private DH exponent if it's reusing the private DH exponent or it's using a static DH ciphersuite. OpenSSL provides the option SSL_OP_SINGLE_DH_USE for ephemeral DH (DHE) in TLS. It is not on by default. If the option is not set then the server reuses the same private DH exponent for the life of the server process and would be vulnerable to this attack. It is believed that many popular applications do set this option and would therefore not be at risk. OpenSSL before 1.0.2f will reuse the key if: - SSL_CTX_set_tmp_dh()/SSL_set_tmp_dh() is used and SSL_OP_SINGLE_DH_USE is not set. - SSL_CTX_set_tmp_dh_callback()/SSL_set_tmp_dh_callback() is used, and both the parameters and the key are set and SSL_OP_SINGLE_DH_USE is not used. This is an undocumted feature and parameter files don't contain the key. - Static DH ciphersuites are used. The key is part of the certificate and so it will always reuse it. This is only supported in 1.0.2. It will not reuse the key for DHE ciphers suites if: - SSL_OP_SINGLE_DH_USE is set - SSL_CTX_set_tmp_dh_callback()/SSL_set_tmp_dh_callback() is used and the callback does not provide the key, only the parameters. The callback is almost always used like this. Non-safe primes are generated by OpenSSL when using: - genpkey with the dh_rfc5114 option. This will write an X9.42 style file including the prime-order subgroup size \"q\". This is supported since the 1.0.2 version. Older versions can't read files generated in this way. - dhparam with the -dsaparam option. This has always been documented as requiring the single use. The fix for this issue adds an additional check where a \"q\" parameter is available (as is the case in X9.42 based parameters). This detects the only known attack, and is the only possible defense for static DH ciphersuites. This could have some performance impact. Additionally the SSL_OP_SINGLE_DH_USE option has been switched on by default and cannot be disabled. This could have some performance impact.", "affected_packages": [ { @@ -6354,12 +5871,8 @@ "weaknesses": [] }, { - "unique_content_id": "c2f87f5ea625ae3e87ab3a3ec82e47995b16601835ef7be500414932928f3c69", - "aliases": [ - "CVE-2016-0702", - "VC-OPENSSL-20160301-CVE-2016-0702" - ], - "summary": "A side-channel attack was found which makes use of cache-bank conflicts on the Intel Sandy-Bridge microarchitecture which could lead to the recovery of RSA keys. The ability to exploit this issue is limited as it relies on an attacker who has control of code in a thread running on the same hyper-threaded core as the victim thread which is performing decryptions.", + "unique_content_id": "1e32ac05e706f05b60d0c367814faf5b", + "summary": "A double free bug was discovered when OpenSSL parses malformed DSA private keys and could lead to a DoS attack or memory corruption for applications that receive DSA private keys from untrusted sources. This scenario is considered rare.", "affected_packages": [ { "package": { @@ -6388,96 +5901,16 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-0702", - "severities": [], - "reference_id": "CVE-2016-0702", - "reference_type": "" - }, - { - "url": "https://www.openssl.org/news/secadv/20160301.txt", - "severities": [ - { - "value": "Low", - "system": "generic_textual", - "scoring_elements": "" - } - ], - "reference_id": "", - "reference_type": "" - } - ], - "date_published": "2016-03-01T00:00:00+00:00", - "weaknesses": [] - }, - { - "unique_content_id": "0dc285b8adde395581c94e422ef09ae80752d8b5b7e8177bee2bd05a9044f07c", - "aliases": [ - "CVE-2016-0703", - "VC-OPENSSL-20160301-CVE-2016-0703" - ], - "summary": "This issue only affected versions of OpenSSL prior to March 19th 2015 at which time the code was refactored to address vulnerability CVE-2015-0293. s2_srvr.c did not enforce that clear-key-length is 0 for non-export ciphers. If clear-key bytes are present for these ciphers, they *displace* encrypted-key bytes. This leads to an efficient divide-and-conquer key recovery attack: if an eavesdropper has intercepted an SSLv2 handshake, they can use the server as an oracle to determine the SSLv2 master-key, using only 16 connections to the server and negligible computation. More importantly, this leads to a more efficient version of DROWN that is effective against non-export ciphersuites, and requires no significant computation.", - "affected_packages": [ - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "0.9.8zf", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za|0.9.8zb|0.9.8zc|0.9.8zd|0.9.8ze" - }, - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.0r", - "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m|1.0.0n|1.0.0o|1.0.0p|1.0.0q" - }, - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.1m", - "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l" - }, - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.2a", - "affected_version_range": "vers:openssl/1.0.2" - } - ], - "references": [ - { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-0703", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-0705", "severities": [], - "reference_id": "CVE-2016-0703", + "reference_id": "CVE-2016-0705", "reference_type": "" }, { "url": "https://www.openssl.org/news/secadv/20160301.txt", "severities": [ { - "value": "High", + "value": "Low", "system": "generic_textual", "scoring_elements": "" } @@ -6490,12 +5923,8 @@ "weaknesses": [] }, { - "unique_content_id": "e7bba3f95fb4b39e7b5f6a6297935e8cfcadbbabda552ee1b06e65e9282ab672", - "aliases": [ - "CVE-2016-0704", - "VC-OPENSSL-20160301-CVE-2016-0704" - ], - "summary": "This issue only affected versions of OpenSSL prior to March 19th 2015 at which time the code was refactored to address the vulnerability CVE-2015-0293. s2_srvr.c overwrite the wrong bytes in the master-key when applying Bleichenbacher protection for export cipher suites. This provides a Bleichenbacher oracle, and could potentially allow more efficient variants of the DROWN attack.", + "unique_content_id": "356419ba58928dd92651de3bd8726759", + "summary": "This issue only affected versions of OpenSSL prior to March 19th 2015 at which time the code was refactored to address vulnerability CVE-2015-0293. s2_srvr.c did not enforce that clear-key-length is 0 for non-export ciphers. If clear-key bytes are present for these ciphers, they *displace* encrypted-key bytes. This leads to an efficient divide-and-conquer key recovery attack: if an eavesdropper has intercepted an SSLv2 handshake, they can use the server as an oracle to determine the SSLv2 master-key, using only 16 connections to the server and negligible computation. More importantly, this leads to a more efficient version of DROWN that is effective against non-export ciphersuites, and requires no significant computation.", "affected_packages": [ { "package": { @@ -6548,16 +5977,16 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-0704", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-0703", "severities": [], - "reference_id": "CVE-2016-0704", + "reference_id": "CVE-2016-0703", "reference_type": "" }, { "url": "https://www.openssl.org/news/secadv/20160301.txt", "severities": [ { - "value": "Moderate", + "value": "High", "system": "generic_textual", "scoring_elements": "" } @@ -6570,12 +5999,8 @@ "weaknesses": [] }, { - "unique_content_id": "8f5d81b6201854025eba1228dc3dbb1562bdefdd101afb131581d6c49722d872", - "aliases": [ - "CVE-2016-0705", - "VC-OPENSSL-20160301-CVE-2016-0705" - ], - "summary": "A double free bug was discovered when OpenSSL parses malformed DSA private keys and could lead to a DoS attack or memory corruption for applications that receive DSA private keys from untrusted sources. This scenario is considered rare.", + "unique_content_id": "4f983dc0849c0739895c99ff8042ef0f", + "summary": "A cross-protocol attack was discovered that could lead to decryption of TLS sessions by using a server supporting SSLv2 and EXPORT cipher suites as a Bleichenbacher RSA padding oracle. Note that traffic between clients and non-vulnerable servers can be decrypted provided another server supporting SSLv2 and EXPORT ciphers (even with a different protocol such as SMTP, IMAP or POP) shares the RSA keys of the non-vulnerable server. This vulnerability is known as DROWN (CVE-2016-0800). Recovering one session key requires the attacker to perform approximately 2^50 computation, as well as thousands of connections to the affected server. A more efficient variant of the DROWN attack exists against unpatched OpenSSL servers using versions that predate 1.0.2a, 1.0.1m, 1.0.0r and 0.9.8zf released on 19/Mar/2015 (see CVE-2016-0703 below). Users can avoid this issue by disabling the SSLv2 protocol in all their SSL/TLS servers, if they've not done so already. Disabling all SSLv2 ciphers is also sufficient, provided the patches for CVE-2015-3197 (fixed in OpenSSL 1.0.1r and 1.0.2f) have been deployed. Servers that have not disabled the SSLv2 protocol, and are not patched for CVE-2015-3197 are vulnerable to DROWN even if all SSLv2 ciphers are nominally disabled, because malicious clients can force the use of SSLv2 with EXPORT ciphers. OpenSSL 1.0.2g and 1.0.1s deploy the following mitigation against DROWN: SSLv2 is now by default disabled at build-time. Builds that are not configured with \"enable-ssl2\" will not support SSLv2. Even if \"enable-ssl2\" is used, users who want to negotiate SSLv2 via the version-flexible SSLv23_method() will need to explicitly call either of: SSL_CTX_clear_options(ctx, SSL_OP_NO_SSLv2); or SSL_clear_options(ssl, SSL_OP_NO_SSLv2); as appropriate. Even if either of those is used, or the application explicitly uses the version-specific SSLv2_method() or its client or server variants, SSLv2 ciphers vulnerable to exhaustive search key recovery have been removed. Specifically, the SSLv2 40-bit EXPORT ciphers, and SSLv2 56-bit DES are no longer available. In addition, weak ciphers in SSLv3 and up are now disabled in default builds of OpenSSL. Builds that are not configured with \"enable-weak-ssl-ciphers\" will not provide any \"EXPORT\" or \"LOW\" strength ciphers.", "affected_packages": [ { "package": { @@ -6604,16 +6029,16 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-0705", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-0800", "severities": [], - "reference_id": "CVE-2016-0705", + "reference_id": "CVE-2016-0800", "reference_type": "" }, { "url": "https://www.openssl.org/news/secadv/20160301.txt", "severities": [ { - "value": "Low", + "value": "High", "system": "generic_textual", "scoring_elements": "" } @@ -6626,12 +6051,8 @@ "weaknesses": [] }, { - "unique_content_id": "54d1b0ccbb4b663c9a43e3d2a6be131b6b5a0413fbb5f22cee822ed6936d94fe", - "aliases": [ - "CVE-2016-0797", - "VC-OPENSSL-20160301-CVE-2016-0797" - ], - "summary": "In the BN_hex2bn function the number of hex digits is calculated using an int value |i|. Later |bn_expand| is called with a value of |i * 4|. For large values of |i| this can result in |bn_expand| not allocating any memory because |i * 4| is negative. This can leave the internal BIGNUM data field as NULL leading to a subsequent NULL ptr deref. For very large values of |i|, the calculation |i * 4| could be a positive value smaller than |i|. In this case memory is allocated to the internal BIGNUM data field, but it is insufficiently sized leading to heap corruption. A similar issue exists in BN_dec2bn. This could have security consequences if BN_hex2bn/BN_dec2bn is ever called by user applications with very large untrusted hex/dec data. This is anticipated to be a rare occurrence. All OpenSSL internal usage of these functions use data that is not expected to be untrusted, e.g. config file data or application command line arguments. If user developed applications generate config file data based on untrusted data then it is possible that this could also lead to security consequences. This is also anticipated to be rare.", + "unique_content_id": "5115d9fca6da89c0f09b18c66063043e", + "summary": "A side-channel attack was found which makes use of cache-bank conflicts on the Intel Sandy-Bridge microarchitecture which could lead to the recovery of RSA keys. The ability to exploit this issue is limited as it relies on an attacker who has control of code in a thread running on the same hyper-threaded core as the victim thread which is performing decryptions.", "affected_packages": [ { "package": { @@ -6660,9 +6081,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-0797", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-0702", "severities": [], - "reference_id": "CVE-2016-0797", + "reference_id": "CVE-2016-0702", "reference_type": "" }, { @@ -6682,11 +6103,7 @@ "weaknesses": [] }, { - "unique_content_id": "5cee408201ad50518a04c7597ae547e01069a3e3a71411bb5d03665d395c9c3f", - "aliases": [ - "CVE-2016-0798", - "VC-OPENSSL-20160301-CVE-2016-0798" - ], + "unique_content_id": "56718964514021ad2571d5e9bb4e1ba9", "summary": "The SRP user database lookup method SRP_VBASE_get_by_user had confusing memory management semantics; the returned pointer was sometimes newly allocated, and sometimes owned by the callee. The calling code has no way of distinguishing these two cases. Specifically, SRP servers that configure a secret seed to hide valid login information are vulnerable to a memory leak: an attacker connecting with an invalid username can cause a memory leak of around 300 bytes per connection. Servers that do not configure SRP, or configure SRP but do not configure a seed are not vulnerable. In Apache, the seed directive is known as SSLSRPUnknownUserSeed. To mitigate the memory leak, the seed handling in SRP_VBASE_get_by_user is now disabled even if the user has configured a seed. Applications are advised to migrate to SRP_VBASE_get1_by_user. However, note that OpenSSL makes no strong guarantees about the indistinguishability of valid and invalid logins. In particular, computations are currently not carried out in constant time.", "affected_packages": [ { @@ -6738,11 +6155,7 @@ "weaknesses": [] }, { - "unique_content_id": "84fcbdaee2028d10d0a154f4562e0212135d6cce3bfd9eda6b933c8e302f6351", - "aliases": [ - "CVE-2016-0799", - "VC-OPENSSL-20160301-CVE-2016-0799" - ], + "unique_content_id": "65ffc54cdd6e37ee324ff207835500d6", "summary": "The internal |fmtstr| function used in processing a \"%s\" format string in the BIO_*printf functions could overflow while calculating the length of a string and cause an OOB read when printing very long strings. Additionally the internal |doapr_outch| function can attempt to write to an OOB memory location (at an offset from the NULL pointer) in the event of a memory allocation failure. In 1.0.2 and below this could be caused where the size of a buffer to be allocated is greater than INT_MAX. E.g. this could be in processing a very long \"%s\" format string. Memory leaks can also occur. The first issue may mask the second issue dependent on compiler behaviour. These problems could enable attacks where large amounts of untrusted data is passed to the BIO_*printf functions. If applications use these functions in this way then they could be vulnerable. OpenSSL itself uses these functions when printing out human-readable dumps of ASN.1 data. Therefore applications that print this data could be vulnerable if the data is from untrusted sources. OpenSSL command line applications could also be vulnerable where they print out ASN.1 data, or if untrusted data is passed as command line arguments. Libssl is not considered directly vulnerable. Additionally certificates etc received via remote connections via libssl are also unlikely to be able to trigger these issues because of message size limits enforced within libssl.", "affected_packages": [ { @@ -6794,12 +6207,84 @@ "weaknesses": [] }, { - "unique_content_id": "3bb968a563522f059f423c3561dfed5d17a6d5c4d6bd3d1715133146dcc94142", - "aliases": [ - "CVE-2016-0800", - "VC-OPENSSL-20160301-CVE-2016-0800" + "unique_content_id": "ca04670f15a036f2d20611d996b2e03d", + "summary": "This issue only affected versions of OpenSSL prior to March 19th 2015 at which time the code was refactored to address the vulnerability CVE-2015-0293. s2_srvr.c overwrite the wrong bytes in the master-key when applying Bleichenbacher protection for export cipher suites. This provides a Bleichenbacher oracle, and could potentially allow more efficient variants of the DROWN attack.", + "affected_packages": [ + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "0.9.8zf", + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za|0.9.8zb|0.9.8zc|0.9.8zd|0.9.8ze" + }, + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.0r", + "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m|1.0.0n|1.0.0o|1.0.0p|1.0.0q" + }, + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.1m", + "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l" + }, + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.2a", + "affected_version_range": "vers:openssl/1.0.2" + } ], - "summary": "A cross-protocol attack was discovered that could lead to decryption of TLS sessions by using a server supporting SSLv2 and EXPORT cipher suites as a Bleichenbacher RSA padding oracle. Note that traffic between clients and non-vulnerable servers can be decrypted provided another server supporting SSLv2 and EXPORT ciphers (even with a different protocol such as SMTP, IMAP or POP) shares the RSA keys of the non-vulnerable server. This vulnerability is known as DROWN (CVE-2016-0800). Recovering one session key requires the attacker to perform approximately 2^50 computation, as well as thousands of connections to the affected server. A more efficient variant of the DROWN attack exists against unpatched OpenSSL servers using versions that predate 1.0.2a, 1.0.1m, 1.0.0r and 0.9.8zf released on 19/Mar/2015 (see CVE-2016-0703 below). Users can avoid this issue by disabling the SSLv2 protocol in all their SSL/TLS servers, if they've not done so already. Disabling all SSLv2 ciphers is also sufficient, provided the patches for CVE-2015-3197 (fixed in OpenSSL 1.0.1r and 1.0.2f) have been deployed. Servers that have not disabled the SSLv2 protocol, and are not patched for CVE-2015-3197 are vulnerable to DROWN even if all SSLv2 ciphers are nominally disabled, because malicious clients can force the use of SSLv2 with EXPORT ciphers. OpenSSL 1.0.2g and 1.0.1s deploy the following mitigation against DROWN: SSLv2 is now by default disabled at build-time. Builds that are not configured with \"enable-ssl2\" will not support SSLv2. Even if \"enable-ssl2\" is used, users who want to negotiate SSLv2 via the version-flexible SSLv23_method() will need to explicitly call either of: SSL_CTX_clear_options(ctx, SSL_OP_NO_SSLv2); or SSL_clear_options(ssl, SSL_OP_NO_SSLv2); as appropriate. Even if either of those is used, or the application explicitly uses the version-specific SSLv2_method() or its client or server variants, SSLv2 ciphers vulnerable to exhaustive search key recovery have been removed. Specifically, the SSLv2 40-bit EXPORT ciphers, and SSLv2 56-bit DES are no longer available. In addition, weak ciphers in SSLv3 and up are now disabled in default builds of OpenSSL. Builds that are not configured with \"enable-weak-ssl-ciphers\" will not provide any \"EXPORT\" or \"LOW\" strength ciphers.", + "references": [ + { + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-0704", + "severities": [], + "reference_id": "CVE-2016-0704", + "reference_type": "" + }, + { + "url": "https://www.openssl.org/news/secadv/20160301.txt", + "severities": [ + { + "value": "Moderate", + "system": "generic_textual", + "scoring_elements": "" + } + ], + "reference_id": "", + "reference_type": "" + } + ], + "date_published": "2016-03-01T00:00:00+00:00", + "weaknesses": [] + }, + { + "unique_content_id": "dcfad5e453c456b47b7dcb85f3bbf948", + "summary": "In the BN_hex2bn function the number of hex digits is calculated using an int value |i|. Later |bn_expand| is called with a value of |i * 4|. For large values of |i| this can result in |bn_expand| not allocating any memory because |i * 4| is negative. This can leave the internal BIGNUM data field as NULL leading to a subsequent NULL ptr deref. For very large values of |i|, the calculation |i * 4| could be a positive value smaller than |i|. In this case memory is allocated to the internal BIGNUM data field, but it is insufficiently sized leading to heap corruption. A similar issue exists in BN_dec2bn. This could have security consequences if BN_hex2bn/BN_dec2bn is ever called by user applications with very large untrusted hex/dec data. This is anticipated to be a rare occurrence. All OpenSSL internal usage of these functions use data that is not expected to be untrusted, e.g. config file data or application command line arguments. If user developed applications generate config file data based on untrusted data then it is possible that this could also lead to security consequences. This is also anticipated to be rare.", "affected_packages": [ { "package": { @@ -6828,16 +6313,16 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-0800", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-0797", "severities": [], - "reference_id": "CVE-2016-0800", + "reference_id": "CVE-2016-0797", "reference_type": "" }, { "url": "https://www.openssl.org/news/secadv/20160301.txt", "severities": [ { - "value": "High", + "value": "Low", "system": "generic_textual", "scoring_elements": "" } @@ -6850,12 +6335,8 @@ "weaknesses": [] }, { - "unique_content_id": "3eadfec35b5b88ba68ecb0d97d2cba4203556ca8be6c28566ff28d045dbeeaba", - "aliases": [ - "CVE-2016-2105", - "VC-OPENSSL-20160503-CVE-2016-2105" - ], - "summary": "An overflow can occur in the EVP_EncodeUpdate() function which is used for Base64 encoding of binary data. If an attacker is able to supply very large amounts of input data then a length check can overflow resulting in a heap corruption. Internally to OpenSSL the EVP_EncodeUpdate() function is primarly used by the PEM_write_bio* family of functions. These are mainly used within the OpenSSL command line applications. These internal uses are not considered vulnerable because all calls are bounded with length checks so no overflow is possible. User applications that call these APIs directly with large amounts of untrusted data may be vulnerable. (Note: Initial analysis suggested that the PEM_write_bio* were vulnerable, and this is reflected in the patch commit message. This is no longer believed to be the case).", + "unique_content_id": "0d33c0311add27a6e1a49d7a3d965c38", + "summary": "When ASN.1 data is read from a BIO using functions such as d2i_CMS_bio() a short invalid encoding can casuse allocation of large amounts of memory potentially consuming excessive resources or exhausting memory. Any application parsing untrusted data through d2i BIO functions is affected. The memory based functions such as d2i_X509() are *not* affected. Since the memory based functions are used by the TLS library, TLS applications are not affected.", "affected_packages": [ { "package": { @@ -6884,9 +6365,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-2105", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-2109", "severities": [], - "reference_id": "CVE-2016-2105", + "reference_id": "CVE-2016-2109", "reference_type": "" }, { @@ -6906,11 +6387,7 @@ "weaknesses": [] }, { - "unique_content_id": "fb73586b842fb010ced45dc708d8346e3aded542fe78c11f03f83bf754997edd", - "aliases": [ - "CVE-2016-2106", - "VC-OPENSSL-20160503-CVE-2016-2106" - ], + "unique_content_id": "6646efbc2c3440a5aaedd5479df16fe0", "summary": "An overflow can occur in the EVP_EncryptUpdate() function. If an attacker is able to supply very large amounts of input data after a previous call to EVP_EncryptUpdate() with a partial block then a length check can overflow resulting in a heap corruption. Following an analysis of all OpenSSL internal usage of the EVP_EncryptUpdate() function all usage is one of two forms. The first form is where the EVP_EncryptUpdate() call is known to be the first called function after an EVP_EncryptInit(), and therefore that specific call must be safe. The second form is where the length passed to EVP_EncryptUpdate() can be seen from the code to be some small value and therefore there is no possibility of an overflow. Since all instances are one of these two forms, it is believed that there can be no overflows in internal code due to this problem. It should be noted that EVP_DecryptUpdate() can call EVP_EncryptUpdate() in certain code paths. Also EVP_CipherUpdate() is a synonym for EVP_EncryptUpdate(). All instances of these calls have also been analysed too and it is believed there are no instances in internal usage where an overflow could occur. This could still represent a security issue for end user code that calls this function directly.", "affected_packages": [ { @@ -6962,12 +6439,8 @@ "weaknesses": [] }, { - "unique_content_id": "45c33cd5992b2f757ade809ec1b55e35aed7fa0d57bb8b46c8f7ab46d4cf5d81", - "aliases": [ - "CVE-2016-2107", - "VC-OPENSSL-20160503-CVE-2016-2107" - ], - "summary": "A MITM attacker can use a padding oracle attack to decrypt traffic when the connection uses an AES CBC cipher and the server support AES-NI. This issue was introduced as part of the fix for Lucky 13 padding attack (CVE-2013-0169). The padding check was rewritten to be in constant time by making sure that always the same bytes are read and compared against either the MAC or padding bytes. But it no longer checked that there was enough data to have both the MAC and padding bytes.", + "unique_content_id": "80621d002083a0f1c1d9267b2575c2af", + "summary": "An overflow can occur in the EVP_EncodeUpdate() function which is used for Base64 encoding of binary data. If an attacker is able to supply very large amounts of input data then a length check can overflow resulting in a heap corruption. Internally to OpenSSL the EVP_EncodeUpdate() function is primarly used by the PEM_write_bio* family of functions. These are mainly used within the OpenSSL command line applications. These internal uses are not considered vulnerable because all calls are bounded with length checks so no overflow is possible. User applications that call these APIs directly with large amounts of untrusted data may be vulnerable. (Note: Initial analysis suggested that the PEM_write_bio* were vulnerable, and this is reflected in the patch commit message. This is no longer believed to be the case).", "affected_packages": [ { "package": { @@ -6996,22 +6469,16 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-2107", - "severities": [], - "reference_id": "CVE-2016-2107", - "reference_type": "" - }, - { - "url": "https://github.com/openssl/openssl/commit/68595c0c2886e7942a14f98c17a55a88afb6c292", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-2105", "severities": [], - "reference_id": "", + "reference_id": "CVE-2016-2105", "reference_type": "" }, { "url": "https://www.openssl.org/news/secadv/20160503.txt", "severities": [ { - "value": "High", + "value": "Low", "system": "generic_textual", "scoring_elements": "" } @@ -7024,12 +6491,8 @@ "weaknesses": [] }, { - "unique_content_id": "2455be4d3319416de5807835a5e13cb7a40a862fcb21503efe33c9b0836132bc", - "aliases": [ - "CVE-2016-2108", - "VC-OPENSSL-20160503-CVE-2016-2108" - ], - "summary": "This issue affected versions of OpenSSL prior to April 2015. The bug causing the vulnerability was fixed on April 18th 2015, and released as part of the June 11th 2015 security releases. The security impact of the bug was not known at the time. In previous versions of OpenSSL, ASN.1 encoding the value zero represented as a negative integer can cause a buffer underflow with an out-of-bounds write in i2c_ASN1_INTEGER. The ASN.1 parser does not normally create \"negative zeroes\" when parsing ASN.1 input, and therefore, an attacker cannot trigger this bug. However, a second, independent bug revealed that the ASN.1 parser (specifically, d2i_ASN1_TYPE) can misinterpret a large universal tag as a negative zero value. Large universal tags are not present in any common ASN.1 structures (such as X509) but are accepted as part of ANY structures. Therefore, if an application deserializes untrusted ASN.1 structures containing an ANY field, and later reserializes them, an attacker may be able to trigger an out-of-bounds write. This has been shown to cause memory corruption that is potentially exploitable with some malloc implementations. Applications that parse and re-encode X509 certificates are known to be vulnerable. Applications that verify RSA signatures on X509 certificates may also be vulnerable; however, only certificates with valid signatures trigger ASN.1 re-encoding and hence the bug. Specifically, since OpenSSL's default TLS X509 chain verification code verifies the certificate chain from root to leaf, TLS handshakes could only be targeted with valid certificates issued by trusted Certification Authorities.", + "unique_content_id": "9448f7ccc33194fa36bbdb2f40e749b2", + "summary": "ASN1 Strings that are over 1024 bytes can cause an overread in applications using the X509_NAME_oneline() function on EBCDIC systems. This could result in arbitrary stack data being returned in the buffer.", "affected_packages": [ { "package": { @@ -7040,8 +6503,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.1o", - "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l|1.0.1m|1.0.1n" + "fixed_version": "1.0.1t", + "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l|1.0.1m|1.0.1n|1.0.1o|1.0.1p|1.0.1q|1.0.1r|1.0.1s" }, { "package": { @@ -7052,22 +6515,22 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.2c", - "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b" + "fixed_version": "1.0.2h", + "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d|1.0.2e|1.0.2f|1.0.2g" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-2108", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-2176", "severities": [], - "reference_id": "CVE-2016-2108", + "reference_id": "CVE-2016-2176", "reference_type": "" }, { "url": "https://www.openssl.org/news/secadv/20160503.txt", "severities": [ { - "value": "High", + "value": "Low", "system": "generic_textual", "scoring_elements": "" } @@ -7080,12 +6543,8 @@ "weaknesses": [] }, { - "unique_content_id": "86acb94d7c04bbcbd8c25c43ae292bd04c94a03e34fdc267053638c248e0b7f3", - "aliases": [ - "CVE-2016-2109", - "VC-OPENSSL-20160503-CVE-2016-2109" - ], - "summary": "When ASN.1 data is read from a BIO using functions such as d2i_CMS_bio() a short invalid encoding can casuse allocation of large amounts of memory potentially consuming excessive resources or exhausting memory. Any application parsing untrusted data through d2i BIO functions is affected. The memory based functions such as d2i_X509() are *not* affected. Since the memory based functions are used by the TLS library, TLS applications are not affected.", + "unique_content_id": "eaa2fce419eaf5b4ea668e9106c1fd43", + "summary": "A MITM attacker can use a padding oracle attack to decrypt traffic when the connection uses an AES CBC cipher and the server support AES-NI. This issue was introduced as part of the fix for Lucky 13 padding attack (CVE-2013-0169). The padding check was rewritten to be in constant time by making sure that always the same bytes are read and compared against either the MAC or padding bytes. But it no longer checked that there was enough data to have both the MAC and padding bytes.", "affected_packages": [ { "package": { @@ -7114,16 +6573,22 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-2109", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-2107", "severities": [], - "reference_id": "CVE-2016-2109", + "reference_id": "CVE-2016-2107", + "reference_type": "" + }, + { + "url": "https://github.com/openssl/openssl/commit/68595c0c2886e7942a14f98c17a55a88afb6c292", + "severities": [], + "reference_id": "", "reference_type": "" }, { "url": "https://www.openssl.org/news/secadv/20160503.txt", "severities": [ { - "value": "Low", + "value": "High", "system": "generic_textual", "scoring_elements": "" } @@ -7136,12 +6601,8 @@ "weaknesses": [] }, { - "unique_content_id": "79d98ea5b970167fc32b2dd513af82c8f21fc88f5863ff80c40bf92a86567dc8", - "aliases": [ - "CVE-2016-2176", - "VC-OPENSSL-20160503-CVE-2016-2176" - ], - "summary": "ASN1 Strings that are over 1024 bytes can cause an overread in applications using the X509_NAME_oneline() function on EBCDIC systems. This could result in arbitrary stack data being returned in the buffer.", + "unique_content_id": "eadc3ef5343caffdb16fc7a845983d99", + "summary": "This issue affected versions of OpenSSL prior to April 2015. The bug causing the vulnerability was fixed on April 18th 2015, and released as part of the June 11th 2015 security releases. The security impact of the bug was not known at the time. In previous versions of OpenSSL, ASN.1 encoding the value zero represented as a negative integer can cause a buffer underflow with an out-of-bounds write in i2c_ASN1_INTEGER. The ASN.1 parser does not normally create \"negative zeroes\" when parsing ASN.1 input, and therefore, an attacker cannot trigger this bug. However, a second, independent bug revealed that the ASN.1 parser (specifically, d2i_ASN1_TYPE) can misinterpret a large universal tag as a negative zero value. Large universal tags are not present in any common ASN.1 structures (such as X509) but are accepted as part of ANY structures. Therefore, if an application deserializes untrusted ASN.1 structures containing an ANY field, and later reserializes them, an attacker may be able to trigger an out-of-bounds write. This has been shown to cause memory corruption that is potentially exploitable with some malloc implementations. Applications that parse and re-encode X509 certificates are known to be vulnerable. Applications that verify RSA signatures on X509 certificates may also be vulnerable; however, only certificates with valid signatures trigger ASN.1 re-encoding and hence the bug. Specifically, since OpenSSL's default TLS X509 chain verification code verifies the certificate chain from root to leaf, TLS handshakes could only be targeted with valid certificates issued by trusted Certification Authorities.", "affected_packages": [ { "package": { @@ -7152,8 +6613,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.1t", - "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l|1.0.1m|1.0.1n|1.0.1o|1.0.1p|1.0.1q|1.0.1r|1.0.1s" + "fixed_version": "1.0.1o", + "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l|1.0.1m|1.0.1n" }, { "package": { @@ -7164,22 +6625,22 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.2h", - "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d|1.0.2e|1.0.2f|1.0.2g" + "fixed_version": "1.0.2c", + "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-2176", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-2108", "severities": [], - "reference_id": "CVE-2016-2176", + "reference_id": "CVE-2016-2108", "reference_type": "" }, { "url": "https://www.openssl.org/news/secadv/20160503.txt", "severities": [ { - "value": "Low", + "value": "High", "system": "generic_textual", "scoring_elements": "" } @@ -7192,11 +6653,7 @@ "weaknesses": [] }, { - "unique_content_id": "7cf78e4965685dd994d47b5b4648c8671c19b75994e4b56ad143255738c4b716", - "aliases": [ - "CVE-2016-2177", - "VC-OPENSSL-20160601-CVE-2016-2177" - ], + "unique_content_id": "4c10365eacf49048d2ca1f3d490de4c2", "summary": "Avoid some undefined pointer arithmetic A common idiom in the codebase is to check limits in the following manner: \"p + len > limit\" Where \"p\" points to some malloc'd data of SIZE bytes and limit == p + SIZE \"len\" here could be from some externally supplied data (e.g. from a TLS message). The rules of C pointer arithmetic are such that \"p + len\" is only well defined where len <= SIZE. Therefore the above idiom is actually undefined behaviour. For example this could cause problems if some malloc implementation provides an address for \"p\" such that \"p + len\" actually overflows for values of len that are too big and therefore p + len < limit.", "affected_packages": [ { @@ -7248,11 +6705,7 @@ "weaknesses": [] }, { - "unique_content_id": "41d1b686cc25b51e538b1294c03f9bd49194604c0a5b1878a85ef935c82f0573", - "aliases": [ - "CVE-2016-2178", - "VC-OPENSSL-20160607-CVE-2016-2178" - ], + "unique_content_id": "69c98b0d04f2bf1a2d1f044b54108625", "summary": "Operations in the DSA signing algorithm should run in constant time in order to avoid side channel attacks. A flaw in the OpenSSL DSA implementation means that a non-constant time codepath is followed for certain operations. This has been demonstrated through a cache-timing attack to be sufficient for an attacker to recover the private DSA key.", "affected_packages": [ { @@ -7304,12 +6757,8 @@ "weaknesses": [] }, { - "unique_content_id": "24d135d43dac5961bd8e824a6be06bf737548a27b6908a9bdb06c4cf6be7da66", - "aliases": [ - "CVE-2016-2179", - "VC-OPENSSL-20160822-CVE-2016-2179" - ], - "summary": "In a DTLS connection where handshake messages are delivered out-of-order those messages that OpenSSL is not yet ready to process will be buffered for later use. Under certain circumstances, a flaw in the logic means that those messages do not get removed from the buffer even though the handshake has been completed. An attacker could force up to approx. 15 messages to remain in the buffer when they are no longer required. These messages will be cleared when the DTLS connection is closed. The default maximum size for a message is 100k. Therefore the attacker could force an additional 1500k to be consumed per connection. By opening many simulataneous connections an attacker could cause a DoS attack through memory exhaustion.", + "unique_content_id": "c3ef560f8d241b1b75cdef3199faa45c", + "summary": "The function TS_OBJ_print_bio() misuses OBJ_obj2txt(): the return value is the total length the OID text representation would use and not the amount of data written. This will result in OOB reads when large OIDs are presented.", "affected_packages": [ { "package": { @@ -7338,21 +6787,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-2179", - "severities": [], - "reference_id": "CVE-2016-2179", - "reference_type": "" - }, - { - "url": "https://github.com/openssl/openssl/commit/00a4c1421407b6ac796688871b0a49a179c694d9", - "severities": [], - "reference_id": "", - "reference_type": "" - }, - { - "url": "https://github.com/openssl/openssl/commit/26f2c5774f117aea588e8f31fad38bcf14e83bec", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-2180", "severities": [], - "reference_id": "", + "reference_id": "CVE-2016-2180", "reference_type": "" }, { @@ -7368,16 +6805,12 @@ "reference_type": "" } ], - "date_published": "2016-08-22T00:00:00+00:00", + "date_published": "2016-07-22T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "39ea5c947d194650d344e5adcd4353a31075fe76556175678092991fef56935a", - "aliases": [ - "CVE-2016-2180", - "VC-OPENSSL-20160722-CVE-2016-2180" - ], - "summary": "The function TS_OBJ_print_bio() misuses OBJ_obj2txt(): the return value is the total length the OID text representation would use and not the amount of data written. This will result in OOB reads when large OIDs are presented.", + "unique_content_id": "e29c5c80d781403086304ecb4fce7a59", + "summary": "The function BN_bn2dec() does not check the return value of BN_div_word(). This can cause an OOB write if an application uses this function with an overly large BIGNUM. This could be a problem if an overly large certificate or CRL is printed out from an untrusted source. TLS is not affected because record limits will reject an oversized certificate before it is parsed.", "affected_packages": [ { "package": { @@ -7406,9 +6839,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-2180", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-2182", "severities": [], - "reference_id": "CVE-2016-2180", + "reference_id": "CVE-2016-2182", "reference_type": "" }, { @@ -7424,15 +6857,11 @@ "reference_type": "" } ], - "date_published": "2016-07-22T00:00:00+00:00", + "date_published": "2016-08-16T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "2dcc23a57bd50008fd1ff1dd5ab1e15ac70d58a3b621d70b039cee4339be5439", - "aliases": [ - "CVE-2016-2181", - "VC-OPENSSL-20160819-CVE-2016-2181" - ], + "unique_content_id": "bad085048774b51abab2b4e37c3868a0", "summary": "A flaw in the DTLS replay attack protection mechanism means that records that arrive for future epochs update the replay protection \"window\" before the MAC for the record has been validated. This could be exploited by an attacker by sending a record for the next epoch (which does not have to decrypt or have a valid MAC), with a very large sequence number. This means that all subsequent legitimate packets are dropped causing a denial of service for a specific DTLS connection.", "affected_packages": [ { @@ -7496,12 +6925,8 @@ "weaknesses": [] }, { - "unique_content_id": "fcd1d51451689926072528e12c9206f1c13c61ed97e42c132b0667ea48870171", - "aliases": [ - "CVE-2016-2182", - "VC-OPENSSL-20160816-CVE-2016-2182" - ], - "summary": "The function BN_bn2dec() does not check the return value of BN_div_word(). This can cause an OOB write if an application uses this function with an overly large BIGNUM. This could be a problem if an overly large certificate or CRL is printed out from an untrusted source. TLS is not affected because record limits will reject an oversized certificate before it is parsed.", + "unique_content_id": "c541cb508cce45e8ffa33b03c44a7706", + "summary": "In a DTLS connection where handshake messages are delivered out-of-order those messages that OpenSSL is not yet ready to process will be buffered for later use. Under certain circumstances, a flaw in the logic means that those messages do not get removed from the buffer even though the handshake has been completed. An attacker could force up to approx. 15 messages to remain in the buffer when they are no longer required. These messages will be cleared when the DTLS connection is closed. The default maximum size for a message is 100k. Therefore the attacker could force an additional 1500k to be consumed per connection. By opening many simulataneous connections an attacker could cause a DoS attack through memory exhaustion.", "affected_packages": [ { "package": { @@ -7530,57 +6955,25 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-2182", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-2179", "severities": [], - "reference_id": "CVE-2016-2182", + "reference_id": "CVE-2016-2179", "reference_type": "" }, { - "url": "https://www.openssl.org/news/secadv/20160922.txt", - "severities": [ - { - "value": "Low", - "system": "generic_textual", - "scoring_elements": "" - } - ], - "reference_id": "", - "reference_type": "" - } - ], - "date_published": "2016-08-16T00:00:00+00:00", - "weaknesses": [] - }, - { - "unique_content_id": "d87e634ab174d154043776ba4b3c6659d5f37175726b216710c42ec5144d3d95", - "aliases": [ - "CVE-2016-2183", - "VC-OPENSSL-20160824-CVE-2016-2183" - ], - "summary": "Because DES (and triple-DES) has only a 64-bit block size, birthday attacks are a real concern. For example, with the ability to run Javascript in a browser, it is possible to send enough traffic to cause a collision, and then use that information to recover something like a session Cookie. Triple-DES, which shows up as \u201cDES-CBC3\u201d in an OpenSSL cipher string, is still used on the Web, and major browsers are not yet willing to completely disable it. If you run a server, you should disable triple-DES. This is generally a configuration issue. If you run an old server that doesn\u2019t support any better ciphers than DES or RC4, you should upgrade. For 1.0.2 and 1.0.1, we removed the triple-DES ciphers from the \u201cHIGH\u201d keyword and put them into \u201cMEDIUM.\u201d Note that we did not remove them from the \u201cDEFAULT\u201d keyword. For the 1.1.0 release, we treat triple-DES just like we are treating RC4. It is not compiled by default; you have to use \u201cenable-weak-ssl-ciphers\u201d as a config option. Even when those ciphers are compiled, triple-DES is only in the \u201cMEDIUM\u201d keyword. In addition we also removed it from the \u201cDEFAULT\u201d keyword.", - "affected_packages": [ - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.2i", - "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d|1.0.2e|1.0.2f|1.0.2g|1.0.2h" - } - ], - "references": [ + "url": "https://github.com/openssl/openssl/commit/00a4c1421407b6ac796688871b0a49a179c694d9", + "severities": [], + "reference_id": "", + "reference_type": "" + }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-2183", + "url": "https://github.com/openssl/openssl/commit/26f2c5774f117aea588e8f31fad38bcf14e83bec", "severities": [], - "reference_id": "CVE-2016-2183", + "reference_id": "", "reference_type": "" }, { - "url": "https://www.openssl.org/blog/blog/2016/08/24/sweet32/", + "url": "https://www.openssl.org/news/secadv/20160922.txt", "severities": [ { "value": "Low", @@ -7592,15 +6985,11 @@ "reference_type": "" } ], - "date_published": "2016-08-24T00:00:00+00:00", + "date_published": "2016-08-22T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "e43493ec8a73bb371bf163314718c77edbe7d72190cd2e88e09e3a65d4500cdb", - "aliases": [ - "CVE-2016-6302", - "VC-OPENSSL-20160823-CVE-2016-6302" - ], + "unique_content_id": "cfaace2e186847527636a2195766fc52", "summary": "If a server uses SHA512 for TLS session ticket HMAC it is vulnerable to a DoS attack where a malformed ticket will result in an OOB read which will ultimately crash. The use of SHA512 in TLS session tickets is comparatively rare as it requires a custom server callback and ticket lookup mechanism.", "affected_packages": [ { @@ -7664,11 +7053,7 @@ "weaknesses": [] }, { - "unique_content_id": "c6b031581915c5cc5b42df4000da62b01be62afbba15c264e9c189aac336f855", - "aliases": [ - "CVE-2016-6303", - "VC-OPENSSL-20160824-CVE-2016-6303" - ], + "unique_content_id": "2af63a761bf4ddbbaeb92afa382151cf", "summary": "An overflow can occur in MDC2_Update() either if called directly or through the EVP_DigestUpdate() function using MDC2. If an attacker is able to supply very large amounts of input data after a previous call to EVP_EncryptUpdate() with a partial block then a length check can overflow resulting in a heap corruption. The amount of data needed is comparable to SIZE_MAX which is impractical on most platforms.", "affected_packages": [ { @@ -7732,12 +7117,8 @@ "weaknesses": [] }, { - "unique_content_id": "aef1aa2ae8685c93c4869930f90ef8cd3bcc3fbadf949e4238d182e8fd2684a0", - "aliases": [ - "CVE-2016-6304", - "VC-OPENSSL-20160922-CVE-2016-6304" - ], - "summary": "A malicious client can send an excessively large OCSP Status Request extension. If that client continually requests renegotiation, sending a large OCSP Status Request extension each time, then there will be unbounded memory growth on the server. This will eventually lead to a Denial Of Service attack through memory exhaustion. Servers with a default configuration are vulnerable even if they do not support OCSP. Builds using the \"no-ocsp\" build time option are not affected. Servers using OpenSSL versions prior to 1.0.1g are not vulnerable in a default configuration, instead only if an application explicitly enables OCSP stapling support.", + "unique_content_id": "659c848c83841e30d1052e8d49e18051", + "summary": "Because DES (and triple-DES) has only a 64-bit block size, birthday attacks are a real concern. For example, with the ability to run Javascript in a browser, it is possible to send enough traffic to cause a collision, and then use that information to recover something like a session Cookie. Triple-DES, which shows up as \u201cDES-CBC3\u201d in an OpenSSL cipher string, is still used on the Web, and major browsers are not yet willing to completely disable it. If you run a server, you should disable triple-DES. This is generally a configuration issue. If you run an old server that doesn\u2019t support any better ciphers than DES or RC4, you should upgrade. For 1.0.2 and 1.0.1, we removed the triple-DES ciphers from the \u201cHIGH\u201d keyword and put them into \u201cMEDIUM.\u201d Note that we did not remove them from the \u201cDEFAULT\u201d keyword. For the 1.1.0 release, we treat triple-DES just like we are treating RC4. It is not compiled by default; you have to use \u201cenable-weak-ssl-ciphers\u201d as a config option. Even when those ciphers are compiled, triple-DES is only in the \u201cMEDIUM\u201d keyword. In addition we also removed it from the \u201cDEFAULT\u201d keyword.", "affected_packages": [ { "package": { @@ -7748,9 +7129,37 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.1u", - "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l|1.0.1m|1.0.1n|1.0.1o|1.0.1p|1.0.1q|1.0.1r|1.0.1s|1.0.1t" + "fixed_version": "1.0.2i", + "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d|1.0.2e|1.0.2f|1.0.2g|1.0.2h" + } + ], + "references": [ + { + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-2183", + "severities": [], + "reference_id": "CVE-2016-2183", + "reference_type": "" }, + { + "url": "https://www.openssl.org/blog/blog/2016/08/24/sweet32/", + "severities": [ + { + "value": "Low", + "system": "generic_textual", + "scoring_elements": "" + } + ], + "reference_id": "", + "reference_type": "" + } + ], + "date_published": "2016-08-24T00:00:00+00:00", + "weaknesses": [] + }, + { + "unique_content_id": "17585a9b090ed55460ac0cad6c3b5f6e", + "summary": "In OpenSSL 1.0.2 and earlier some missing message length checks can result in OOB reads of up to 2 bytes beyond an allocated buffer. There is a theoretical DoS risk but this has not been observed in practice on common platforms. The messages affected are client certificate, client certificate request and server certificate. As a result the attack can only be performed against a client or a server which enables client authentication.", + "affected_packages": [ { "package": { "name": "openssl", @@ -7760,8 +7169,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.2i", - "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d|1.0.2e|1.0.2f|1.0.2g|1.0.2h" + "fixed_version": "1.0.1u", + "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l|1.0.1m|1.0.1n|1.0.1o|1.0.1p|1.0.1q|1.0.1r|1.0.1s|1.0.1t" }, { "package": { @@ -7772,31 +7181,25 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.1.0a", - "affected_version_range": "vers:openssl/1.1.0" + "fixed_version": "1.0.2i", + "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d|1.0.2e|1.0.2f|1.0.2g|1.0.2h" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-6304", - "severities": [], - "reference_id": "CVE-2016-6304", - "reference_type": "" - }, - { - "url": "https://github.com/openssl/openssl/commit/2c0d295e26306e15a92eb23a84a1802005c1c137", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-6306", "severities": [], - "reference_id": "", + "reference_id": "CVE-2016-6306", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/ea39b16b71e4e72a228a4535bd6d6a02c5edbc1f", + "url": "https://github.com/openssl/openssl/commit/bb1a4866034255749ac578adb06a76335fc117b1", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/a59ab1c4dd27a4c7c6e88f3c33747532fd144412", + "url": "https://github.com/openssl/openssl/commit/006a788c84e541c8920dd2ad85fb62b52185c519", "severities": [], "reference_id": "", "reference_type": "" @@ -7805,7 +7208,7 @@ "url": "https://www.openssl.org/news/secadv/20160922.txt", "severities": [ { - "value": "High", + "value": "Low", "system": "generic_textual", "scoring_elements": "" } @@ -7814,16 +7217,12 @@ "reference_type": "" } ], - "date_published": "2016-09-22T00:00:00+00:00", + "date_published": "2016-09-21T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "76c39ca965fe72efb7732d5f6a6388c6f20696a7d03dcbe47c0c548a60e41ca8", - "aliases": [ - "CVE-2016-6305", - "VC-OPENSSL-20160922-CVE-2016-6305" - ], - "summary": "OpenSSL 1.1.0 SSL/TLS will hang during a call to SSL_peek() if the peer sends an empty record. This could be exploited by a malicious peer in a Denial Of Service attack.", + "unique_content_id": "8ee9b8d2efa51108b44de0e5f0671902", + "summary": "A DTLS message includes 3 bytes for its length in the header for the message. This would allow for messages up to 16Mb in length. Messages of this length are excessive and OpenSSL includes a check to ensure that a peer is sending reasonably sized messages in order to avoid too much memory being consumed to service a connection. A flaw in the logic of version 1.1.0 means that memory for the message is allocated too early, prior to the excessive message length check. Due to way memory is allocated in OpenSSL this could mean an attacker could force up to 21Mb to be allocated to service a connection. This could lead to a Denial of Service through memory exhaustion. However, the excessive message length check still takes place, and this would cause the connection to immediately fail. Assuming that the application calls SSL_free() on the failed conneciton in a timely manner then the 21Mb of allocated memory will then be immediately freed again. Therefore the excessive memory allocation will be transitory in nature. This then means that there is only a security impact if: 1) The application does not call SSL_free() in a timely manner in the event that the connection fails or 2) The application is working in a constrained environment where there is very little free memory or 3) The attacker initiates multiple connection attempts such that there are multiple connections in a state where memory has been allocated for the connection; SSL_free() has not yet been called; and there is insufficient memory to service the multiple requests. Except in the instance of (1) above any Denial Of Service is likely to be transitory because as soon as the connection fails the memory is subsequently freed again in the SSL_free() call. However there is an increased risk during this period of application crashes due to the lack of memory - which would then mean a more serious Denial of Service.", "affected_packages": [ { "package": { @@ -7840,13 +7239,13 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-6305", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-6308", "severities": [], - "reference_id": "CVE-2016-6305", + "reference_id": "CVE-2016-6308", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/63658103d4441924f8dbfc517b99bb54758a98b9", + "url": "https://github.com/openssl/openssl/commit/df6b5e29ffea2d5a3e08de92fb765fdb21c7a21e", "severities": [], "reference_id": "", "reference_type": "" @@ -7855,7 +7254,7 @@ "url": "https://www.openssl.org/news/secadv/20160922.txt", "severities": [ { - "value": "Moderate", + "value": "Low", "system": "generic_textual", "scoring_elements": "" } @@ -7864,16 +7263,12 @@ "reference_type": "" } ], - "date_published": "2016-09-22T00:00:00+00:00", + "date_published": "2016-09-21T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "335b68ab52a7311993d1ba47eab3fb676dfaee4b10a497bd992f7cbcb13edd67", - "aliases": [ - "CVE-2016-6306", - "VC-OPENSSL-20160921-CVE-2016-6306" - ], - "summary": "In OpenSSL 1.0.2 and earlier some missing message length checks can result in OOB reads of up to 2 bytes beyond an allocated buffer. There is a theoretical DoS risk but this has not been observed in practice on common platforms. The messages affected are client certificate, client certificate request and server certificate. As a result the attack can only be performed against a client or a server which enables client authentication.", + "unique_content_id": "f598dbb4cacf63ed93e588c1db8ff5b8", + "summary": "A TLS message includes 3 bytes for its length in the header for the message. This would allow for messages up to 16Mb in length. Messages of this length are excessive and OpenSSL includes a check to ensure that a peer is sending reasonably sized messages in order to avoid too much memory being consumed to service a connection. A flaw in the logic of version 1.1.0 means that memory for the message is allocated too early, prior to the excessive message length check. Due to way memory is allocated in OpenSSL this could mean an attacker could force up to 21Mb to be allocated to service a connection. This could lead to a Denial of Service through memory exhaustion. However, the excessive message length check still takes place, and this would cause the connection to immediately fail. Assuming that the application calls SSL_free() on the failed conneciton in a timely manner then the 21Mb of allocated memory will then be immediately freed again. Therefore the excessive memory allocation will be transitory in nature. This then means that there is only a security impact if: 1) The application does not call SSL_free() in a timely manner in the event that the connection fails or 2) The application is working in a constrained environment where there is very little free memory or 3) The attacker initiates multiple connection attempts such that there are multiple connections in a state where memory has been allocated for the connection; SSL_free() has not yet been called; and there is insufficient memory to service the multiple requests. Except in the instance of (1) above any Denial Of Service is likely to be transitory because as soon as the connection fails the memory is subsequently freed again in the SSL_free() call. However there is an increased risk during this period of application crashes due to the lack of memory - which would then mean a more serious Denial of Service.", "affected_packages": [ { "package": { @@ -7884,37 +7279,19 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.1u", - "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l|1.0.1m|1.0.1n|1.0.1o|1.0.1p|1.0.1q|1.0.1r|1.0.1s|1.0.1t" - }, - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.2i", - "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d|1.0.2e|1.0.2f|1.0.2g|1.0.2h" + "fixed_version": "1.1.0a", + "affected_version_range": "vers:openssl/1.1.0" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-6306", - "severities": [], - "reference_id": "CVE-2016-6306", - "reference_type": "" - }, - { - "url": "https://github.com/openssl/openssl/commit/bb1a4866034255749ac578adb06a76335fc117b1", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-6307", "severities": [], - "reference_id": "", + "reference_id": "CVE-2016-6307", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/006a788c84e541c8920dd2ad85fb62b52185c519", + "url": "https://github.com/openssl/openssl/commit/4b390b6c3f8df925dc92a3dd6b022baa9a2f4650", "severities": [], "reference_id": "", "reference_type": "" @@ -7936,13 +7313,33 @@ "weaknesses": [] }, { - "unique_content_id": "4eaf09765e65f727f0d87c8ee1340fb5157d0195ebc4d87f7d243b7de0540731", - "aliases": [ - "CVE-2016-6307", - "VC-OPENSSL-20160921-CVE-2016-6307" - ], - "summary": "A TLS message includes 3 bytes for its length in the header for the message. This would allow for messages up to 16Mb in length. Messages of this length are excessive and OpenSSL includes a check to ensure that a peer is sending reasonably sized messages in order to avoid too much memory being consumed to service a connection. A flaw in the logic of version 1.1.0 means that memory for the message is allocated too early, prior to the excessive message length check. Due to way memory is allocated in OpenSSL this could mean an attacker could force up to 21Mb to be allocated to service a connection. This could lead to a Denial of Service through memory exhaustion. However, the excessive message length check still takes place, and this would cause the connection to immediately fail. Assuming that the application calls SSL_free() on the failed conneciton in a timely manner then the 21Mb of allocated memory will then be immediately freed again. Therefore the excessive memory allocation will be transitory in nature. This then means that there is only a security impact if: 1) The application does not call SSL_free() in a timely manner in the event that the connection fails or 2) The application is working in a constrained environment where there is very little free memory or 3) The attacker initiates multiple connection attempts such that there are multiple connections in a state where memory has been allocated for the connection; SSL_free() has not yet been called; and there is insufficient memory to service the multiple requests. Except in the instance of (1) above any Denial Of Service is likely to be transitory because as soon as the connection fails the memory is subsequently freed again in the SSL_free() call. However there is an increased risk during this period of application crashes due to the lack of memory - which would then mean a more serious Denial of Service.", + "unique_content_id": "3b3ff4143b6859104d216a310d58db58", + "summary": "A malicious client can send an excessively large OCSP Status Request extension. If that client continually requests renegotiation, sending a large OCSP Status Request extension each time, then there will be unbounded memory growth on the server. This will eventually lead to a Denial Of Service attack through memory exhaustion. Servers with a default configuration are vulnerable even if they do not support OCSP. Builds using the \"no-ocsp\" build time option are not affected. Servers using OpenSSL versions prior to 1.0.1g are not vulnerable in a default configuration, instead only if an application explicitly enables OCSP stapling support.", "affected_packages": [ + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.1u", + "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l|1.0.1m|1.0.1n|1.0.1o|1.0.1p|1.0.1q|1.0.1r|1.0.1s|1.0.1t" + }, + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.2i", + "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d|1.0.2e|1.0.2f|1.0.2g|1.0.2h" + }, { "package": { "name": "openssl", @@ -7958,13 +7355,25 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-6307", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-6304", "severities": [], - "reference_id": "CVE-2016-6307", + "reference_id": "CVE-2016-6304", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/4b390b6c3f8df925dc92a3dd6b022baa9a2f4650", + "url": "https://github.com/openssl/openssl/commit/2c0d295e26306e15a92eb23a84a1802005c1c137", + "severities": [], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://github.com/openssl/openssl/commit/ea39b16b71e4e72a228a4535bd6d6a02c5edbc1f", + "severities": [], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://github.com/openssl/openssl/commit/a59ab1c4dd27a4c7c6e88f3c33747532fd144412", "severities": [], "reference_id": "", "reference_type": "" @@ -7973,7 +7382,7 @@ "url": "https://www.openssl.org/news/secadv/20160922.txt", "severities": [ { - "value": "Low", + "value": "High", "system": "generic_textual", "scoring_elements": "" } @@ -7982,16 +7391,12 @@ "reference_type": "" } ], - "date_published": "2016-09-21T00:00:00+00:00", + "date_published": "2016-09-22T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "afa659ff3079acf90f6cbb1cc37cf907f479517ed3f41f22ff048c397dccc1e5", - "aliases": [ - "CVE-2016-6308", - "VC-OPENSSL-20160921-CVE-2016-6308" - ], - "summary": "A DTLS message includes 3 bytes for its length in the header for the message. This would allow for messages up to 16Mb in length. Messages of this length are excessive and OpenSSL includes a check to ensure that a peer is sending reasonably sized messages in order to avoid too much memory being consumed to service a connection. A flaw in the logic of version 1.1.0 means that memory for the message is allocated too early, prior to the excessive message length check. Due to way memory is allocated in OpenSSL this could mean an attacker could force up to 21Mb to be allocated to service a connection. This could lead to a Denial of Service through memory exhaustion. However, the excessive message length check still takes place, and this would cause the connection to immediately fail. Assuming that the application calls SSL_free() on the failed conneciton in a timely manner then the 21Mb of allocated memory will then be immediately freed again. Therefore the excessive memory allocation will be transitory in nature. This then means that there is only a security impact if: 1) The application does not call SSL_free() in a timely manner in the event that the connection fails or 2) The application is working in a constrained environment where there is very little free memory or 3) The attacker initiates multiple connection attempts such that there are multiple connections in a state where memory has been allocated for the connection; SSL_free() has not yet been called; and there is insufficient memory to service the multiple requests. Except in the instance of (1) above any Denial Of Service is likely to be transitory because as soon as the connection fails the memory is subsequently freed again in the SSL_free() call. However there is an increased risk during this period of application crashes due to the lack of memory - which would then mean a more serious Denial of Service.", + "unique_content_id": "ec3000e978936c5dc59eeb71d14f61d0", + "summary": "OpenSSL 1.1.0 SSL/TLS will hang during a call to SSL_peek() if the peer sends an empty record. This could be exploited by a malicious peer in a Denial Of Service attack.", "affected_packages": [ { "package": { @@ -8008,13 +7413,13 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-6308", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-6305", "severities": [], - "reference_id": "CVE-2016-6308", + "reference_id": "CVE-2016-6305", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/df6b5e29ffea2d5a3e08de92fb765fdb21c7a21e", + "url": "https://github.com/openssl/openssl/commit/63658103d4441924f8dbfc517b99bb54758a98b9", "severities": [], "reference_id": "", "reference_type": "" @@ -8023,7 +7428,7 @@ "url": "https://www.openssl.org/news/secadv/20160922.txt", "severities": [ { - "value": "Low", + "value": "Moderate", "system": "generic_textual", "scoring_elements": "" } @@ -8032,16 +7437,12 @@ "reference_type": "" } ], - "date_published": "2016-09-21T00:00:00+00:00", + "date_published": "2016-09-22T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "2c4faeb5ab598f4bcd7363261f979466b7ea8c02e2bfa6f68c53ef466d115f77", - "aliases": [ - "CVE-2016-6309", - "VC-OPENSSL-20160926-CVE-2016-6309" - ], - "summary": "This issue only affects OpenSSL 1.1.0a, released on 22nd September 2016. The patch applied to address CVE-2016-6307 resulted in an issue where if a message larger than approx 16k is received then the underlying buffer to store the incoming message is reallocated and moved. Unfortunately a dangling pointer to the old location is left which results in an attempt to write to the previously freed location. This is likely to result in a crash, however it could potentially lead to execution of arbitrary code.", + "unique_content_id": "76efc0216d0391eac89b5097852a6f7e", + "summary": "This issue only affects OpenSSL 1.0.2i, released on 22nd September 2016. A bug fix which included a CRL sanity check was added to OpenSSL 1.1.0 but was omitted from OpenSSL 1.0.2i. As a result any attempt to use CRLs in OpenSSL 1.0.2i will crash with a null pointer exception.", "affected_packages": [ { "package": { @@ -8052,19 +7453,19 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.1.0b", - "affected_version_range": "vers:openssl/1.1.0a" + "fixed_version": "1.0.2j", + "affected_version_range": "vers:openssl/1.0.2i" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-6309", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-7052", "severities": [], - "reference_id": "CVE-2016-6309", + "reference_id": "CVE-2016-7052", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/acacbfa7565c78d2273c0b2a2e5e803f44afefeb", + "url": "https://github.com/openssl/openssl/commit/6e629b5be45face20b4ca71c4fcbfed78b864a2e", "severities": [], "reference_id": "", "reference_type": "" @@ -8073,7 +7474,7 @@ "url": "https://www.openssl.org/news/secadv/20160926.txt", "severities": [ { - "value": "Critical", + "value": "Moderate", "system": "generic_textual", "scoring_elements": "" } @@ -8086,12 +7487,8 @@ "weaknesses": [] }, { - "unique_content_id": "3dea46bb518540bd2524894a1c99e33282ca9610f926cb1a2a6ab62fee7e9a8a", - "aliases": [ - "CVE-2016-7052", - "VC-OPENSSL-20160926-CVE-2016-7052" - ], - "summary": "This issue only affects OpenSSL 1.0.2i, released on 22nd September 2016. A bug fix which included a CRL sanity check was added to OpenSSL 1.1.0 but was omitted from OpenSSL 1.0.2i. As a result any attempt to use CRLs in OpenSSL 1.0.2i will crash with a null pointer exception.", + "unique_content_id": "ec731ec05e8399f02edc888b078cfcf1", + "summary": "This issue only affects OpenSSL 1.1.0a, released on 22nd September 2016. The patch applied to address CVE-2016-6307 resulted in an issue where if a message larger than approx 16k is received then the underlying buffer to store the incoming message is reallocated and moved. Unfortunately a dangling pointer to the old location is left which results in an attempt to write to the previously freed location. This is likely to result in a crash, however it could potentially lead to execution of arbitrary code.", "affected_packages": [ { "package": { @@ -8102,19 +7499,19 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.2j", - "affected_version_range": "vers:openssl/1.0.2i" + "fixed_version": "1.1.0b", + "affected_version_range": "vers:openssl/1.1.0a" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-7052", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-6309", "severities": [], - "reference_id": "CVE-2016-7052", + "reference_id": "CVE-2016-6309", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/6e629b5be45face20b4ca71c4fcbfed78b864a2e", + "url": "https://github.com/openssl/openssl/commit/acacbfa7565c78d2273c0b2a2e5e803f44afefeb", "severities": [], "reference_id": "", "reference_type": "" @@ -8123,7 +7520,7 @@ "url": "https://www.openssl.org/news/secadv/20160926.txt", "severities": [ { - "value": "Moderate", + "value": "Critical", "system": "generic_textual", "scoring_elements": "" } @@ -8136,12 +7533,8 @@ "weaknesses": [] }, { - "unique_content_id": "ced21cf334c43c1968b1e630d0e5e466fc113b5ec477a716a9d2328d84a41e08", - "aliases": [ - "CVE-2016-7053", - "VC-OPENSSL-20161110-CVE-2016-7053" - ], - "summary": "Applications parsing invalid CMS structures can crash with a NULL pointer dereference. This is caused by a bug in the handling of the ASN.1 CHOICE type in OpenSSL 1.1.0 which can result in a NULL value being passed to the structure callback if an attempt is made to free certain invalid encodings. Only CHOICE structures using a callback which do not handle NULL value are affected.", + "unique_content_id": "2260cd2fea019c35edd74053d43afbfa", + "summary": "TLS connections using *-CHACHA20-POLY1305 ciphersuites are susceptible to a DoS attack by corrupting larger payloads. This can result in an OpenSSL crash. This issue is not considered to be exploitable beyond a DoS.", "affected_packages": [ { "package": { @@ -8158,13 +7551,13 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-7053", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-7054", "severities": [], - "reference_id": "CVE-2016-7053", + "reference_id": "CVE-2016-7054", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/610b66267e41a32805ab54cbc580c5a6d5826cb4", + "url": "https://github.com/openssl/openssl/commit/99d97842ddb5fbbbfb5e9820a64ebd19afe569f6", "severities": [], "reference_id": "", "reference_type": "" @@ -8173,7 +7566,7 @@ "url": "https://www.openssl.org/news/secadv/20161110.txt", "severities": [ { - "value": "Moderate", + "value": "High", "system": "generic_textual", "scoring_elements": "" } @@ -8186,12 +7579,8 @@ "weaknesses": [] }, { - "unique_content_id": "de494f6e53a555a8a467bd0841b9b26accb6e568bdb3f941b8100b02f3325224", - "aliases": [ - "CVE-2016-7054", - "VC-OPENSSL-20161110-CVE-2016-7054" - ], - "summary": "TLS connections using *-CHACHA20-POLY1305 ciphersuites are susceptible to a DoS attack by corrupting larger payloads. This can result in an OpenSSL crash. This issue is not considered to be exploitable beyond a DoS.", + "unique_content_id": "ad064a076d4f4136c4ff5cc9a1c32cb4", + "summary": "Applications parsing invalid CMS structures can crash with a NULL pointer dereference. This is caused by a bug in the handling of the ASN.1 CHOICE type in OpenSSL 1.1.0 which can result in a NULL value being passed to the structure callback if an attempt is made to free certain invalid encodings. Only CHOICE structures using a callback which do not handle NULL value are affected.", "affected_packages": [ { "package": { @@ -8208,13 +7597,13 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-7054", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-7053", "severities": [], - "reference_id": "CVE-2016-7054", + "reference_id": "CVE-2016-7053", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/99d97842ddb5fbbbfb5e9820a64ebd19afe569f6", + "url": "https://github.com/openssl/openssl/commit/610b66267e41a32805ab54cbc580c5a6d5826cb4", "severities": [], "reference_id": "", "reference_type": "" @@ -8223,7 +7612,7 @@ "url": "https://www.openssl.org/news/secadv/20161110.txt", "severities": [ { - "value": "High", + "value": "Moderate", "system": "generic_textual", "scoring_elements": "" } @@ -8236,11 +7625,7 @@ "weaknesses": [] }, { - "unique_content_id": "f65656f9a1a4be03cdd849a3aa82992f4af18eae8d67af063c16232e3f59f754", - "aliases": [ - "CVE-2016-7055", - "VC-OPENSSL-20161110-CVE-2016-7055" - ], + "unique_content_id": "c46f2f9d6517a007f907f8a2e4c84820", "summary": "There is a carry propagating bug in the Broadwell-specific Montgomery multiplication procedure that handles input lengths divisible by, but longer than 256 bits. Analysis suggests that attacks against RSA, DSA and DH private keys are impossible. This is because the subroutine in question is not used in operations with the private key itself and an input of the attacker's direct choice. Otherwise the bug can manifest itself as transient authentication and key negotiation failures or reproducible erroneous outcome of public-key operations with specially crafted input. Among EC algorithms only Brainpool P-512 curves are affected and one presumably can attack ECDH key negotiation. Impact was not analyzed in detail, because pre-requisites for attack are considered unlikely. Namely multiple clients have to choose the curve in question and the server has to share the private key among them, neither of which is default behaviour. Even then only clients that chose the curve will be affected.", "affected_packages": [ { @@ -8304,11 +7689,7 @@ "weaknesses": [] }, { - "unique_content_id": "a98bf1ba98e36233e2f7857bfdf284a1cedd8cfe0d07d9f913d8b075bee096f1", - "aliases": [ - "CVE-2017-3730", - "VC-OPENSSL-20170126-CVE-2017-3730" - ], + "unique_content_id": "6f703a0f132094abbd39fd883ed6e241", "summary": "If a malicious server supplies bad parameters for a DHE or ECDHE key exchange then this can result in the client attempting to dereference a NULL pointer leading to a client crash. This could be exploited in a Denial of Service attack.", "affected_packages": [ { @@ -8354,12 +7735,8 @@ "weaknesses": [] }, { - "unique_content_id": "16f408917ccdd649067c701789a6f062b284973f8da7ae2ce42116010005ffc2", - "aliases": [ - "CVE-2017-3731", - "VC-OPENSSL-20170126-CVE-2017-3731" - ], - "summary": "If an SSL/TLS server or client is running on a 32-bit host, and a specific cipher is being used, then a truncated packet can cause that server or client to perform an out-of-bounds read, usually resulting in a crash. For OpenSSL 1.1.0, the crash can be triggered when using CHACHA20/POLY1305; users should upgrade to 1.1.0d. For Openssl 1.0.2, the crash can be triggered when using RC4-MD5; users who have not disabled that algorithm should update to 1.0.2k", + "unique_content_id": "b14fc26f1382b65b58128617820053c3", + "summary": "There is a carry propagating bug in the x86_64 Montgomery squaring procedure. No EC algorithms are affected. Analysis suggests that attacks against RSA and DSA as a result of this defect would be very difficult to perform and are not believed likely. Attacks against DH are considered just feasible (although very difficult) because most of the work necessary to deduce information about a private key may be performed offline. The amount of resources required for such an attack would be very significant and likely only accessible to a limited number of attackers. An attacker would additionally need online access to an unpatched system using the target private key in a scenario with persistent DH parameters and a private key that is shared between multiple clients. For example this can occur by default in OpenSSL DHE based SSL/TLS ciphersuites. Note: This issue is very similar to CVE-2015-3193 but must be treated as a separate problem.", "affected_packages": [ { "package": { @@ -8388,19 +7765,19 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2017-3731", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2017-3732", "severities": [], - "reference_id": "CVE-2017-3731", + "reference_id": "CVE-2017-3732", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/00d965474b22b54e4275232bc71ee0c699c5cd21", + "url": "https://github.com/openssl/openssl/commit/a59b90bf491410f1f2bc4540cc21f1980fd14c5b", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/51d009043670a627d6abe66894126851cf3690e9", + "url": "https://github.com/openssl/openssl/commit/760d04342a495ee86bf5adc71a91d126af64397f", "severities": [], "reference_id": "", "reference_type": "" @@ -8422,12 +7799,8 @@ "weaknesses": [] }, { - "unique_content_id": "d525e54aa33322501d8c100f7be2df5900113d09c8409a50ce37b77478001f13", - "aliases": [ - "CVE-2017-3732", - "VC-OPENSSL-20170126-CVE-2017-3732" - ], - "summary": "There is a carry propagating bug in the x86_64 Montgomery squaring procedure. No EC algorithms are affected. Analysis suggests that attacks against RSA and DSA as a result of this defect would be very difficult to perform and are not believed likely. Attacks against DH are considered just feasible (although very difficult) because most of the work necessary to deduce information about a private key may be performed offline. The amount of resources required for such an attack would be very significant and likely only accessible to a limited number of attackers. An attacker would additionally need online access to an unpatched system using the target private key in a scenario with persistent DH parameters and a private key that is shared between multiple clients. For example this can occur by default in OpenSSL DHE based SSL/TLS ciphersuites. Note: This issue is very similar to CVE-2015-3193 but must be treated as a separate problem.", + "unique_content_id": "c271a33647e7cdefdce8ed38c15e1bb7", + "summary": "If an SSL/TLS server or client is running on a 32-bit host, and a specific cipher is being used, then a truncated packet can cause that server or client to perform an out-of-bounds read, usually resulting in a crash. For OpenSSL 1.1.0, the crash can be triggered when using CHACHA20/POLY1305; users should upgrade to 1.1.0d. For Openssl 1.0.2, the crash can be triggered when using RC4-MD5; users who have not disabled that algorithm should update to 1.0.2k", "affected_packages": [ { "package": { @@ -8456,19 +7829,19 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2017-3732", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2017-3731", "severities": [], - "reference_id": "CVE-2017-3732", + "reference_id": "CVE-2017-3731", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/a59b90bf491410f1f2bc4540cc21f1980fd14c5b", + "url": "https://github.com/openssl/openssl/commit/00d965474b22b54e4275232bc71ee0c699c5cd21", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/760d04342a495ee86bf5adc71a91d126af64397f", + "url": "https://github.com/openssl/openssl/commit/51d009043670a627d6abe66894126851cf3690e9", "severities": [], "reference_id": "", "reference_type": "" @@ -8490,11 +7863,7 @@ "weaknesses": [] }, { - "unique_content_id": "70561a8c52747e3882749934d40dc3c52e1a6fccd239925f1ff317779b30257e", - "aliases": [ - "CVE-2017-3733", - "VC-OPENSSL-20170216-CVE-2017-3733" - ], + "unique_content_id": "e5c015c5ea09f74ca8830fc675109209", "summary": "During a renegotiation handshake if the Encrypt-Then-Mac extension is negotiated where it was not in the original handshake (or vice-versa) then this can cause OpenSSL to crash (dependent on ciphersuite). Both clients and servers are affected.", "affected_packages": [ { @@ -8540,11 +7909,7 @@ "weaknesses": [] }, { - "unique_content_id": "6e51a8310007cae6d2dd2da43402f0ce33e9bc503675618ae3ed5e22435384c0", - "aliases": [ - "CVE-2017-3735", - "VC-OPENSSL-20170828-CVE-2017-3735" - ], + "unique_content_id": "3e3d332a535202d4a355d9c6f46f8511", "summary": "While parsing an IPAdressFamily extension in an X.509 certificate, it is possible to do a one-byte overread. This would result in an incorrect text display of the certificate.", "affected_packages": [ { @@ -8608,11 +7973,7 @@ "weaknesses": [] }, { - "unique_content_id": "723a84486e608c93ef84d012c9a3bdbec50fc03f94b6af7f2e3c6db35c4870db", - "aliases": [ - "CVE-2017-3736", - "VC-OPENSSL-20171102-CVE-2017-3736" - ], + "unique_content_id": "135805c0fbb3f388567abe5a782e3678", "summary": "There is a carry propagating bug in the x86_64 Montgomery squaring procedure. No EC algorithms are affected. Analysis suggests that attacks against RSA and DSA as a result of this defect would be very difficult to perform and are not believed likely. Attacks against DH are considered just feasible (although very difficult) because most of the work necessary to deduce information about a private key may be performed offline. The amount of resources required for such an attack would be very significant and likely only accessible to a limited number of attackers. An attacker would additionally need online access to an unpatched system using the target private key in a scenario with persistent DH parameters and a private key that is shared between multiple clients. This only affects processors that support the BMI1, BMI2 and ADX extensions like Intel Broadwell (5th generation) and later or AMD Ryzen.", "affected_packages": [ { @@ -8676,11 +8037,7 @@ "weaknesses": [] }, { - "unique_content_id": "99b0a08fcb1d6012836e07da86ee39aec6568240922f58640fcb7c9b8f561492", - "aliases": [ - "CVE-2017-3737", - "VC-OPENSSL-20171207-CVE-2017-3737" - ], + "unique_content_id": "37c832cd6a7a445e21de6bcaae2e6aad", "summary": "OpenSSL 1.0.2 (starting from version 1.0.2b) introduced an \"error state\" mechanism. The intent was that if a fatal error occurred during a handshake then OpenSSL would move into the error state and would immediately fail if you attempted to continue the handshake. This works as designed for the explicit handshake functions (SSL_do_handshake(), SSL_accept() and SSL_connect()), however due to a bug it does not work correctly if SSL_read() or SSL_write() is called directly. In that scenario, if the handshake fails then a fatal error will be returned in the initial function call. If SSL_read()/SSL_write() is subsequently called by the application for the same SSL object then it will succeed and the data is passed without being decrypted/encrypted directly from the SSL/TLS record layer. In order to exploit this issue an application bug would have to be present that resulted in a call to SSL_read()/SSL_write() being issued after having already received a fatal error.", "affected_packages": [ { @@ -8726,11 +8083,7 @@ "weaknesses": [] }, { - "unique_content_id": "9855d6d4847a8dac0b2ec4a4a8556a921f9a32c035e43bb98f4201ab12df0d4c", - "aliases": [ - "CVE-2017-3738", - "VC-OPENSSL-20171207-CVE-2017-3738" - ], + "unique_content_id": "fe526b02e32f024f79ab16ad59c5cd59", "summary": "There is an overflow bug in the AVX2 Montgomery multiplication procedure used in exponentiation with 1024-bit moduli. No EC algorithms are affected. Analysis suggests that attacks against RSA and DSA as a result of this defect would be very difficult to perform and are not believed likely. Attacks against DH1024 are considered just feasible, because most of the work necessary to deduce information about a private key may be performed offline. The amount of resources required for such an attack would be significant. However, for an attack on TLS to be meaningful, the server would have to share the DH1024 private key among multiple clients, which is no longer an option since CVE-2016-0701. This only affects processors that support the AVX2 but not ADX extensions like Intel Haswell (4th generation). Note: The impact from this issue is similar to CVE-2017-3736, CVE-2017-3732 and CVE-2015-3193. Due to the low severity of this issue we are not issuing a new release of OpenSSL 1.1.0 at this time. The fix will be included in OpenSSL 1.1.0h when it becomes available. The fix is also available in commit e502cc86d in the OpenSSL git repository.", "affected_packages": [ { @@ -8794,12 +8147,8 @@ "weaknesses": [] }, { - "unique_content_id": "52b60416f56fbd4cf154ad29a878e1a745b607dcee1653acb5985fa68607508b", - "aliases": [ - "CVE-2018-0732", - "VC-OPENSSL-20180612-CVE-2018-0732" - ], - "summary": "During key agreement in a TLS handshake using a DH(E) based ciphersuite a malicious server can send a very large prime value to the client. This will cause the client to spend an unreasonably long period of time generating a key for this prime resulting in a hang until the client has finished. This could be exploited in a Denial Of Service attack.", + "unique_content_id": "0add28e4bf2017a49afa086624548363", + "summary": "Because of an implementation bug the PA-RISC CRYPTO_memcmp function is effectively reduced to only comparing the least significant bit of each byte. This allows an attacker to forge messages that would be considered as authenticated in an amount of tries lower than that guaranteed by the security claims of the scheme. The module can only be compiled by the HP-UX assembler, so that only HP-UX PA-RISC targets are affected.", "affected_packages": [ { "package": { @@ -8810,46 +8159,28 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.1.0i", - "affected_version_range": "vers:openssl/1.1.0|1.1.0a|1.1.0b|1.1.0c|1.1.0d|1.1.0e|1.1.0f|1.1.0g|1.1.0h" - }, - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.2p", - "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d|1.0.2e|1.0.2f|1.0.2g|1.0.2h|1.0.2i|1.0.2j|1.0.2k|1.0.2l|1.0.2m|1.0.2n|1.0.2o" + "fixed_version": "1.1.0h", + "affected_version_range": "vers:openssl/1.1.0|1.1.0a|1.1.0b|1.1.0c|1.1.0d|1.1.0e|1.1.0f|1.1.0g" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2018-0732", - "severities": [], - "reference_id": "CVE-2018-0732", - "reference_type": "" - }, - { - "url": "https://github.com/openssl/openssl/commit/ea7abeeabf92b7aca160bdd0208636d4da69f4f4", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2018-0733", "severities": [], - "reference_id": "", + "reference_id": "CVE-2018-0733", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/3984ef0b72831da8b3ece4745cac4f8575b19098", + "url": "https://github.com/openssl/openssl/commit/56d5a4bfcaf37fa420aef2bb881aa55e61cf5f2f", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://www.openssl.org/news/secadv/20180612.txt", + "url": "https://www.openssl.org/news/secadv/20180327.txt", "severities": [ { - "value": "Low", + "value": "Moderate", "system": "generic_textual", "scoring_elements": "" } @@ -8858,16 +8189,12 @@ "reference_type": "" } ], - "date_published": "2018-06-12T00:00:00+00:00", + "date_published": "2018-03-27T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "f62f0a22bd4695353076d3dc1b2e7670ed0bd9607d774a7cc31c86086cacb015", - "aliases": [ - "CVE-2018-0733", - "VC-OPENSSL-20180327-CVE-2018-0733" - ], - "summary": "Because of an implementation bug the PA-RISC CRYPTO_memcmp function is effectively reduced to only comparing the least significant bit of each byte. This allows an attacker to forge messages that would be considered as authenticated in an amount of tries lower than that guaranteed by the security claims of the scheme. The module can only be compiled by the HP-UX assembler, so that only HP-UX PA-RISC targets are affected.", + "unique_content_id": "fd56a1d08c404d18a2425bde4a2cc222", + "summary": "Constructed ASN.1 types with a recursive definition (such as can be found in PKCS7) could eventually exceed the stack given malicious input with excessive recursion. This could result in a Denial Of Service attack. There are no such structures used within SSL/TLS that come from untrusted sources so this is considered safe.", "affected_packages": [ { "package": { @@ -8880,17 +8207,35 @@ }, "fixed_version": "1.1.0h", "affected_version_range": "vers:openssl/1.1.0|1.1.0a|1.1.0b|1.1.0c|1.1.0d|1.1.0e|1.1.0f|1.1.0g" + }, + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.2o", + "affected_version_range": "vers:openssl/1.0.2b|1.0.2c|1.0.2d|1.0.2e|1.0.2f|1.0.2g|1.0.2h|1.0.2i|1.0.2j|1.0.2k|1.0.2l|1.0.2m|1.0.2n" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2018-0733", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2018-0739", "severities": [], - "reference_id": "CVE-2018-0733", + "reference_id": "CVE-2018-0739", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/56d5a4bfcaf37fa420aef2bb881aa55e61cf5f2f", + "url": "https://github.com/openssl/openssl/commit/2ac4c6f7b2b2af20c0e2b0ba05367e454cd11b33", + "severities": [], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://github.com/openssl/openssl/commit/9310d45087ae546e27e61ddf8f6367f29848220d", "severities": [], "reference_id": "", "reference_type": "" @@ -8912,12 +8257,8 @@ "weaknesses": [] }, { - "unique_content_id": "d2c4e2cf5d78c3a480feea4e1721e0acbb60155c70d8b6a30a282b546f09afcf", - "aliases": [ - "CVE-2018-0734", - "VC-OPENSSL-20181030-CVE-2018-0734" - ], - "summary": "The OpenSSL DSA signature algorithm has been shown to be vulnerable to a timing side channel attack. An attacker could use variations in the signing algorithm to recover the private key.", + "unique_content_id": "5ce5c73a388c1721baa86dd346bc5cca", + "summary": "The OpenSSL RSA Key generation algorithm has been shown to be vulnerable to a cache timing side channel attack. An attacker with sufficient access to mount cache timing attacks during the RSA key generation process could recover the private key.", "affected_packages": [ { "package": { @@ -8928,20 +8269,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.1.1a", - "affected_version_range": "vers:openssl/1.1.1" - }, - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.1.0j", - "affected_version_range": "vers:openssl/1.1.0|1.1.0a|1.1.0b|1.1.0c|1.1.0d|1.1.0e|1.1.0f|1.1.0g|1.1.0h|1.1.0i" + "fixed_version": "1.1.0i", + "affected_version_range": "vers:openssl/1.1.0|1.1.0a|1.1.0b|1.1.0c|1.1.0d|1.1.0e|1.1.0f|1.1.0g|1.1.0h" }, { "package": { @@ -8952,37 +8281,31 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.2q", - "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d|1.0.2e|1.0.2f|1.0.2g|1.0.2h|1.0.2i|1.0.2j|1.0.2k|1.0.2l|1.0.2m|1.0.2n|1.0.2o|1.0.2p" + "fixed_version": "1.0.2p", + "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d|1.0.2e|1.0.2f|1.0.2g|1.0.2h|1.0.2i|1.0.2j|1.0.2k|1.0.2l|1.0.2m|1.0.2n|1.0.2o" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2018-0734", - "severities": [], - "reference_id": "CVE-2018-0734", - "reference_type": "" - }, - { - "url": "https://github.com/openssl/openssl/commit/8abfe72e8c1de1b95f50aa0d9134803b4d00070f", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2018-0737", "severities": [], - "reference_id": "", + "reference_id": "CVE-2018-0737", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/ef11e19d1365eea2b1851e6f540a0bf365d303e7", + "url": "https://github.com/openssl/openssl/commit/6939eab03a6e23d2bd2c3f5e34fe1d48e542e787", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/43e6a58d4991a451daf4891ff05a48735df871ac", + "url": "https://github.com/openssl/openssl/commit/349a41da1ad88ad87825414752a8ff5fdd6a6c3f", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://www.openssl.org/news/secadv/20181030.txt", + "url": "https://www.openssl.org/news/secadv/20180416.txt", "severities": [ { "value": "Low", @@ -8994,16 +8317,12 @@ "reference_type": "" } ], - "date_published": "2018-10-30T00:00:00+00:00", + "date_published": "2018-04-16T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "b208d67bce0a078a253edbd6b6f817f83f3c7e0f384dae57fd11f43aa6645a78", - "aliases": [ - "CVE-2018-0735", - "VC-OPENSSL-20181029-CVE-2018-0735" - ], - "summary": "The OpenSSL ECDSA signature algorithm has been shown to be vulnerable to a timing side channel attack. An attacker could use variations in the signing algorithm to recover the private key.", + "unique_content_id": "891a444705c4d9e9d6d9514e6152b93d", + "summary": "During key agreement in a TLS handshake using a DH(E) based ciphersuite a malicious server can send a very large prime value to the client. This will cause the client to spend an unreasonably long period of time generating a key for this prime resulting in a hang until the client has finished. This could be exploited in a Denial Of Service attack.", "affected_packages": [ { "package": { @@ -9014,8 +8333,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.1.1a", - "affected_version_range": "vers:openssl/1.1.1" + "fixed_version": "1.1.0i", + "affected_version_range": "vers:openssl/1.1.0|1.1.0a|1.1.0b|1.1.0c|1.1.0d|1.1.0e|1.1.0f|1.1.0g|1.1.0h" }, { "package": { @@ -9026,31 +8345,31 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.1.0j", - "affected_version_range": "vers:openssl/1.1.0|1.1.0a|1.1.0b|1.1.0c|1.1.0d|1.1.0e|1.1.0f|1.1.0g|1.1.0h|1.1.0i" + "fixed_version": "1.0.2p", + "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d|1.0.2e|1.0.2f|1.0.2g|1.0.2h|1.0.2i|1.0.2j|1.0.2k|1.0.2l|1.0.2m|1.0.2n|1.0.2o" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2018-0735", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2018-0732", "severities": [], - "reference_id": "CVE-2018-0735", + "reference_id": "CVE-2018-0732", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/56fb454d281a023b3f950d969693553d3f3ceea1", + "url": "https://github.com/openssl/openssl/commit/ea7abeeabf92b7aca160bdd0208636d4da69f4f4", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/b1d6d55ece1c26fa2829e2b819b038d7b6d692b4", + "url": "https://github.com/openssl/openssl/commit/3984ef0b72831da8b3ece4745cac4f8575b19098", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://www.openssl.org/news/secadv/20181029.txt", + "url": "https://www.openssl.org/news/secadv/20180612.txt", "severities": [ { "value": "Low", @@ -9062,16 +8381,12 @@ "reference_type": "" } ], - "date_published": "2018-10-29T00:00:00+00:00", + "date_published": "2018-06-12T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "fdffac35f130aaa543d59035f255119efd80363f868a8aac69b3b5036d4e9052", - "aliases": [ - "CVE-2018-0737", - "VC-OPENSSL-20180416-CVE-2018-0737" - ], - "summary": "The OpenSSL RSA Key generation algorithm has been shown to be vulnerable to a cache timing side channel attack. An attacker with sufficient access to mount cache timing attacks during the RSA key generation process could recover the private key.", + "unique_content_id": "3193861b88f934ec25c275d622932dc2", + "summary": "The OpenSSL ECDSA signature algorithm has been shown to be vulnerable to a timing side channel attack. An attacker could use variations in the signing algorithm to recover the private key.", "affected_packages": [ { "package": { @@ -9082,8 +8397,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.1.0i", - "affected_version_range": "vers:openssl/1.1.0|1.1.0a|1.1.0b|1.1.0c|1.1.0d|1.1.0e|1.1.0f|1.1.0g|1.1.0h" + "fixed_version": "1.1.1a", + "affected_version_range": "vers:openssl/1.1.1" }, { "package": { @@ -9094,31 +8409,31 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.2p", - "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d|1.0.2e|1.0.2f|1.0.2g|1.0.2h|1.0.2i|1.0.2j|1.0.2k|1.0.2l|1.0.2m|1.0.2n|1.0.2o" + "fixed_version": "1.1.0j", + "affected_version_range": "vers:openssl/1.1.0|1.1.0a|1.1.0b|1.1.0c|1.1.0d|1.1.0e|1.1.0f|1.1.0g|1.1.0h|1.1.0i" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2018-0737", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2018-0735", "severities": [], - "reference_id": "CVE-2018-0737", + "reference_id": "CVE-2018-0735", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/6939eab03a6e23d2bd2c3f5e34fe1d48e542e787", + "url": "https://github.com/openssl/openssl/commit/56fb454d281a023b3f950d969693553d3f3ceea1", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/349a41da1ad88ad87825414752a8ff5fdd6a6c3f", + "url": "https://github.com/openssl/openssl/commit/b1d6d55ece1c26fa2829e2b819b038d7b6d692b4", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://www.openssl.org/news/secadv/20180416.txt", + "url": "https://www.openssl.org/news/secadv/20181029.txt", "severities": [ { "value": "Low", @@ -9130,16 +8445,12 @@ "reference_type": "" } ], - "date_published": "2018-04-16T00:00:00+00:00", + "date_published": "2018-10-29T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "40730ed1276c0a934bcd453d832b1b05ea61d1aeddf8d2a88ed31efc6625e1c9", - "aliases": [ - "CVE-2018-0739", - "VC-OPENSSL-20180327-CVE-2018-0739" - ], - "summary": "Constructed ASN.1 types with a recursive definition (such as can be found in PKCS7) could eventually exceed the stack given malicious input with excessive recursion. This could result in a Denial Of Service attack. There are no such structures used within SSL/TLS that come from untrusted sources so this is considered safe.", + "unique_content_id": "c6585613e6f674c7ea39eefc5057e85d", + "summary": "The OpenSSL DSA signature algorithm has been shown to be vulnerable to a timing side channel attack. An attacker could use variations in the signing algorithm to recover the private key.", "affected_packages": [ { "package": { @@ -9150,8 +8461,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.1.0h", - "affected_version_range": "vers:openssl/1.1.0|1.1.0a|1.1.0b|1.1.0c|1.1.0d|1.1.0e|1.1.0f|1.1.0g" + "fixed_version": "1.1.1a", + "affected_version_range": "vers:openssl/1.1.1" }, { "package": { @@ -9162,34 +8473,52 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.2o", - "affected_version_range": "vers:openssl/1.0.2b|1.0.2c|1.0.2d|1.0.2e|1.0.2f|1.0.2g|1.0.2h|1.0.2i|1.0.2j|1.0.2k|1.0.2l|1.0.2m|1.0.2n" + "fixed_version": "1.1.0j", + "affected_version_range": "vers:openssl/1.1.0|1.1.0a|1.1.0b|1.1.0c|1.1.0d|1.1.0e|1.1.0f|1.1.0g|1.1.0h|1.1.0i" + }, + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.2q", + "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d|1.0.2e|1.0.2f|1.0.2g|1.0.2h|1.0.2i|1.0.2j|1.0.2k|1.0.2l|1.0.2m|1.0.2n|1.0.2o|1.0.2p" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2018-0739", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2018-0734", "severities": [], - "reference_id": "CVE-2018-0739", + "reference_id": "CVE-2018-0734", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/2ac4c6f7b2b2af20c0e2b0ba05367e454cd11b33", + "url": "https://github.com/openssl/openssl/commit/8abfe72e8c1de1b95f50aa0d9134803b4d00070f", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/9310d45087ae546e27e61ddf8f6367f29848220d", + "url": "https://github.com/openssl/openssl/commit/ef11e19d1365eea2b1851e6f540a0bf365d303e7", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://www.openssl.org/news/secadv/20180327.txt", + "url": "https://github.com/openssl/openssl/commit/43e6a58d4991a451daf4891ff05a48735df871ac", + "severities": [], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://www.openssl.org/news/secadv/20181030.txt", "severities": [ { - "value": "Moderate", + "value": "Low", "system": "generic_textual", "scoring_elements": "" } @@ -9198,15 +8527,11 @@ "reference_type": "" } ], - "date_published": "2018-03-27T00:00:00+00:00", + "date_published": "2018-10-30T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "dd129503db8d87d87f40d36a21b3e7ad7a51515303ca1ddff0a7722bf6b6b809", - "aliases": [ - "CVE-2018-5407", - "VC-OPENSSL-20181102-CVE-2018-5407" - ], + "unique_content_id": "a86eaada3e2c85065180d5d7eb1d3a31", "summary": "OpenSSL ECC scalar multiplication, used in e.g. ECDSA and ECDH, has been shown to be vulnerable to a microarchitecture timing side channel attack. An attacker with sufficient access to mount local timing attacks during ECDSA signature generation could recover the private key.", "affected_packages": [ { @@ -9270,11 +8595,53 @@ "weaknesses": [] }, { - "unique_content_id": "a182ef84f10d8869b39936326cf01831942571fde976d293e8cbb7f9182371de", - "aliases": [ - "CVE-2019-1543", - "VC-OPENSSL-20190306-CVE-2019-1543" + "unique_content_id": "bd17aac4dde8bee4fba0c673c8287082", + "summary": "If an application encounters a fatal protocol error and then calls SSL_shutdown() twice (once to send a close_notify, and once to receive one) then OpenSSL can respond differently to the calling application if a 0 byte record is received with invalid padding compared to if a 0 byte record is received with an invalid MAC. If the application then behaves differently based on that in a way that is detectable to the remote peer, then this amounts to a padding oracle that could be used to decrypt data. In order for this to be exploitable \"non-stitched\" ciphersuites must be in use. Stitched ciphersuites are optimised implementations of certain commonly used ciphersuites. Also the application must call SSL_shutdown() twice even if a protocol error has occurred (applications should not do this but some do anyway). AEAD ciphersuites are not impacted.", + "affected_packages": [ + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.2r", + "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d|1.0.2e|1.0.2f|1.0.2g|1.0.2h|1.0.2i|1.0.2j|1.0.2k|1.0.2l|1.0.2m|1.0.2n|1.0.2o|1.0.2p|1.0.2q" + } + ], + "references": [ + { + "url": "https://nvd.nist.gov/vuln/detail/CVE-2019-1559", + "severities": [], + "reference_id": "CVE-2019-1559", + "reference_type": "" + }, + { + "url": "https://github.com/openssl/openssl/commit/e9bbefbf0f24c57645e7ad6a5a71ae649d18ac8e", + "severities": [], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://www.openssl.org/news/secadv/20190226.txt", + "severities": [ + { + "value": "Moderate", + "system": "generic_textual", + "scoring_elements": "" + } + ], + "reference_id": "", + "reference_type": "" + } ], + "date_published": "2019-02-26T00:00:00+00:00", + "weaknesses": [] + }, + { + "unique_content_id": "939439dfee2c7c3ef79f3f7fa3e5f90b", "summary": "ChaCha20-Poly1305 is an AEAD cipher, and requires a unique nonce input for every encryption operation. RFC 7539 specifies that the nonce value (IV) should be 96 bits (12 bytes). OpenSSL allows a variable nonce length and front pads the nonce with 0 bytes if it is less than 12 bytes. However it also incorrectly allows a nonce to be set of up to 16 bytes. In this case only the last 12 bytes are significant and any additional leading bytes are ignored. It is a requirement of using this cipher that nonce values are unique. Messages encrypted using a reused nonce value are susceptible to serious confidentiality and integrity attacks. If an application changes the default nonce length to be longer than 12 bytes and then makes a change to the leading bytes of the nonce expecting the new value to be a new unique nonce then such an application could inadvertently encrypt messages with a reused nonce. Additionally the ignored bytes in a long nonce are not covered by the integrity guarantee of this cipher. Any application that relies on the integrity of these ignored leading bytes of a long nonce may be further affected. Any OpenSSL internal use of this cipher, including in SSL/TLS, is safe because no such use sets such a long nonce value. However user applications that use this cipher directly and set a non-default nonce length to be longer than 12 bytes may be vulnerable. OpenSSL versions 1.1.1 and 1.1.0 are affected by this issue. Due to the limited scope of affected deployments this has been assessed as low severity and therefore we are not creating new releases at this time.", "affected_packages": [ { @@ -9338,12 +8705,8 @@ "weaknesses": [] }, { - "unique_content_id": "1608445a20cee1c7f70bf4d4567f869870a5bda078ae3054db819f7197868284", - "aliases": [ - "CVE-2019-1547", - "VC-OPENSSL-20190910-CVE-2019-1547" - ], - "summary": "Normally in OpenSSL EC groups always have a co-factor present and this is used in side channel resistant code paths. However, in some cases, it is possible to construct a group using explicit parameters (instead of using a named curve). In those cases it is possible that such a group does not have the cofactor present. This can occur even where all the parameters match a known named curve. If such a curve is used then OpenSSL falls back to non-side channel resistant code paths which may result in full key recovery during an ECDSA signature operation. In order to be vulnerable an attacker would have to have the ability to time the creation of a large number of signatures where explicit parameters with no co-factor present are in use by an application using libcrypto. For the avoidance of doubt libssl is not vulnerable because explicit parameters are never used.", + "unique_content_id": "4213f363ba037058475897c693173044", + "summary": "OpenSSL has internal defaults for a directory tree where it can find a configuration file as well as certificates used for verification in TLS. This directory is most commonly referred to as OPENSSLDIR, and is configurable with the --prefix / --openssldir configuration options. For OpenSSL versions 1.1.0 and 1.1.1, the mingw configuration targets assume that resulting programs and libraries are installed in a Unix-like environment and the default prefix for program installation as well as for OPENSSLDIR should be '/usr/local'. However, mingw programs are Windows programs, and as such, find themselves looking at sub-directories of 'C:/usr/local', which may be world writable, which enables untrusted users to modify OpenSSL's default configuration, insert CA certificates, modify (or even replace) existing engine modules, etc. For OpenSSL 1.0.2, '/usr/local/ssl' is used as default for OPENSSLDIR on all Unix and Windows targets, including Visual C builds. However, some build instructions for the diverse Windows targets on 1.0.2 encourage you to specify your own --prefix. OpenSSL versions 1.1.1, 1.1.0 and 1.0.2 are affected by this issue. Due to the limited scope of affected deployments this has been assessed as low severity and therefore we are not creating new releases at this time.", "affected_packages": [ { "package": { @@ -9384,31 +8747,37 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2019-1547", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2019-1552", "severities": [], - "reference_id": "CVE-2019-1547", + "reference_id": "CVE-2019-1552", + "reference_type": "" + }, + { + "url": "https://github.com/openssl/openssl/commit/54aa9d51b09d67e90db443f682cface795f5af9e", + "severities": [], + "reference_id": "", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/30c22fa8b1d840036b8e203585738df62a03cec8", + "url": "https://github.com/openssl/openssl/commit/e32bc855a81a2d48d215c506bdeb4f598045f7e9", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/7c1709c2da5414f5b6133d00a03fc8c5bf996c7a", + "url": "https://github.com/openssl/openssl/commit/b15a19c148384e73338aa7c5b12652138e35ed28", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/21c856b75d81eff61aa63b4f036bb64a85bf6d46", + "url": "https://github.com/openssl/openssl/commit/d333ebaf9c77332754a9d5e111e2f53e1de54fdd", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://www.openssl.org/news/secadv/20190910.txt", + "url": "https://www.openssl.org/news/secadv/20190730.txt", "severities": [ { "value": "Low", @@ -9420,15 +8789,11 @@ "reference_type": "" } ], - "date_published": "2019-09-10T00:00:00+00:00", + "date_published": "2019-07-30T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "efa3c01bca1f8857f755aac0413f6b448077604f02470f2887ccf96682279dba", - "aliases": [ - "CVE-2019-1549", - "VC-OPENSSL-20190910-CVE-2019-1549" - ], + "unique_content_id": "05226413367dc1d93fc68106f47a330c", "summary": "OpenSSL 1.1.1 introduced a rewritten random number generator (RNG). This was intended to include protection in the event of a fork() system call in order to ensure that the parent and child processes did not share the same RNG state. However this protection was not being used in the default case. A partial mitigation for this issue is that the output from a high precision timer is mixed into the RNG state so the likelihood of a parent and child process sharing state is significantly reduced. If an application already calls OPENSSL_init_crypto() explicitly using OPENSSL_INIT_ATFORK then this problem does not occur at all.", "affected_packages": [ { @@ -9474,12 +8839,8 @@ "weaknesses": [] }, { - "unique_content_id": "1386c9f10ab439a308d3b6c4bfa71d7f17de4bb9b041d065029c422a0d559caf", - "aliases": [ - "CVE-2019-1551", - "VC-OPENSSL-20191206-CVE-2019-1551" - ], - "summary": "There is an overflow bug in the x64_64 Montgomery squaring procedure used in exponentiation with 512-bit moduli. No EC algorithms are affected. Analysis suggests that attacks against 2-prime RSA1024, 3-prime RSA1536, and DSA1024 as a result of this defect would be very difficult to perform and are not believed likely. Attacks against DH512 are considered just feasible. However, for an attack the target would have to re-use the DH512 private key, which is not recommended anyway. Also applications directly using the low level API BN_mod_exp may be affected if they use BN_FLG_CONSTTIME.", + "unique_content_id": "45ac1a1229fc8b49656c3e6fd99221cd", + "summary": "In situations where an attacker receives automated notification of the success or failure of a decryption attempt an attacker, after sending a very large number of messages to be decrypted, can recover a CMS/PKCS7 transported encryption key or decrypt any RSA encrypted message that was encrypted with the public RSA key, using a Bleichenbacher padding oracle attack. Applications are not affected if they use a certificate together with the private RSA key to the CMS_decrypt or PKCS7_decrypt functions to select the correct recipient info to decrypt.", "affected_packages": [ { "package": { @@ -9490,8 +8851,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.1.1e", - "affected_version_range": "vers:openssl/1.1.1|1.1.1a|1.1.1b|1.1.1c|1.1.1d" + "fixed_version": "1.1.1d", + "affected_version_range": "vers:openssl/1.1.1|1.1.1a|1.1.1b|1.1.1c" }, { "package": { @@ -9502,31 +8863,49 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.2u", - "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d|1.0.2e|1.0.2f|1.0.2g|1.0.2h|1.0.2i|1.0.2j|1.0.2k|1.0.2l|1.0.2m|1.0.2n|1.0.2o|1.0.2p|1.0.2q|1.0.2r|1.0.2s|1.0.2t" + "fixed_version": "1.1.0l", + "affected_version_range": "vers:openssl/1.1.0|1.1.0a|1.1.0b|1.1.0c|1.1.0d|1.1.0e|1.1.0f|1.1.0g|1.1.0h|1.1.0i|1.1.0j|1.1.0k" + }, + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.2t", + "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d|1.0.2e|1.0.2f|1.0.2g|1.0.2h|1.0.2i|1.0.2j|1.0.2k|1.0.2l|1.0.2m|1.0.2n|1.0.2o|1.0.2p|1.0.2q|1.0.2r|1.0.2s" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2019-1551", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2019-1563", "severities": [], - "reference_id": "CVE-2019-1551", + "reference_id": "CVE-2019-1563", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/419102400a2811582a7a3d4a4e317d72e5ce0a8f", + "url": "https://github.com/openssl/openssl/commit/08229ad838c50f644d7e928e2eef147b4308ad64", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/f1c5eea8a817075d31e43f5876993c6710238c98", + "url": "https://github.com/openssl/openssl/commit/631f94db0065c78181ca9ba5546ebc8bb3884b97", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://www.openssl.org/news/secadv/20191206.txt", + "url": "https://github.com/openssl/openssl/commit/e21f8cf78a125cd3c8c0d1a1a6c8bb0b901f893f", + "severities": [], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://www.openssl.org/news/secadv/20190910.txt", "severities": [ { "value": "Low", @@ -9538,16 +8917,12 @@ "reference_type": "" } ], - "date_published": "2019-12-06T00:00:00+00:00", + "date_published": "2019-09-10T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "edd85067182fe9c90b55fc43bfb734f907e3209f959fe776fbca8d96c71accb6", - "aliases": [ - "CVE-2019-1552", - "VC-OPENSSL-20190730-CVE-2019-1552" - ], - "summary": "OpenSSL has internal defaults for a directory tree where it can find a configuration file as well as certificates used for verification in TLS. This directory is most commonly referred to as OPENSSLDIR, and is configurable with the --prefix / --openssldir configuration options. For OpenSSL versions 1.1.0 and 1.1.1, the mingw configuration targets assume that resulting programs and libraries are installed in a Unix-like environment and the default prefix for program installation as well as for OPENSSLDIR should be '/usr/local'. However, mingw programs are Windows programs, and as such, find themselves looking at sub-directories of 'C:/usr/local', which may be world writable, which enables untrusted users to modify OpenSSL's default configuration, insert CA certificates, modify (or even replace) existing engine modules, etc. For OpenSSL 1.0.2, '/usr/local/ssl' is used as default for OPENSSLDIR on all Unix and Windows targets, including Visual C builds. However, some build instructions for the diverse Windows targets on 1.0.2 encourage you to specify your own --prefix. OpenSSL versions 1.1.1, 1.1.0 and 1.0.2 are affected by this issue. Due to the limited scope of affected deployments this has been assessed as low severity and therefore we are not creating new releases at this time.", + "unique_content_id": "c251f1e3c85429b0daa07cb6ea7d1e67", + "summary": "Normally in OpenSSL EC groups always have a co-factor present and this is used in side channel resistant code paths. However, in some cases, it is possible to construct a group using explicit parameters (instead of using a named curve). In those cases it is possible that such a group does not have the cofactor present. This can occur even where all the parameters match a known named curve. If such a curve is used then OpenSSL falls back to non-side channel resistant code paths which may result in full key recovery during an ECDSA signature operation. In order to be vulnerable an attacker would have to have the ability to time the creation of a large number of signatures where explicit parameters with no co-factor present are in use by an application using libcrypto. For the avoidance of doubt libssl is not vulnerable because explicit parameters are never used.", "affected_packages": [ { "package": { @@ -9588,37 +8963,31 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2019-1552", - "severities": [], - "reference_id": "CVE-2019-1552", - "reference_type": "" - }, - { - "url": "https://github.com/openssl/openssl/commit/54aa9d51b09d67e90db443f682cface795f5af9e", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2019-1547", "severities": [], - "reference_id": "", + "reference_id": "CVE-2019-1547", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/e32bc855a81a2d48d215c506bdeb4f598045f7e9", + "url": "https://github.com/openssl/openssl/commit/30c22fa8b1d840036b8e203585738df62a03cec8", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/b15a19c148384e73338aa7c5b12652138e35ed28", + "url": "https://github.com/openssl/openssl/commit/7c1709c2da5414f5b6133d00a03fc8c5bf996c7a", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/d333ebaf9c77332754a9d5e111e2f53e1de54fdd", + "url": "https://github.com/openssl/openssl/commit/21c856b75d81eff61aa63b4f036bb64a85bf6d46", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://www.openssl.org/news/secadv/20190730.txt", + "url": "https://www.openssl.org/news/secadv/20190910.txt", "severities": [ { "value": "Low", @@ -9630,66 +8999,12 @@ "reference_type": "" } ], - "date_published": "2019-07-30T00:00:00+00:00", - "weaknesses": [] - }, - { - "unique_content_id": "d2ba9b6bba240765f8121e99e081e43b48475b118a7c16aed6cc5556d5b6be89", - "aliases": [ - "CVE-2019-1559", - "VC-OPENSSL-20190226-CVE-2019-1559" - ], - "summary": "If an application encounters a fatal protocol error and then calls SSL_shutdown() twice (once to send a close_notify, and once to receive one) then OpenSSL can respond differently to the calling application if a 0 byte record is received with invalid padding compared to if a 0 byte record is received with an invalid MAC. If the application then behaves differently based on that in a way that is detectable to the remote peer, then this amounts to a padding oracle that could be used to decrypt data. In order for this to be exploitable \"non-stitched\" ciphersuites must be in use. Stitched ciphersuites are optimised implementations of certain commonly used ciphersuites. Also the application must call SSL_shutdown() twice even if a protocol error has occurred (applications should not do this but some do anyway). AEAD ciphersuites are not impacted.", - "affected_packages": [ - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.2r", - "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d|1.0.2e|1.0.2f|1.0.2g|1.0.2h|1.0.2i|1.0.2j|1.0.2k|1.0.2l|1.0.2m|1.0.2n|1.0.2o|1.0.2p|1.0.2q" - } - ], - "references": [ - { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2019-1559", - "severities": [], - "reference_id": "CVE-2019-1559", - "reference_type": "" - }, - { - "url": "https://github.com/openssl/openssl/commit/e9bbefbf0f24c57645e7ad6a5a71ae649d18ac8e", - "severities": [], - "reference_id": "", - "reference_type": "" - }, - { - "url": "https://www.openssl.org/news/secadv/20190226.txt", - "severities": [ - { - "value": "Moderate", - "system": "generic_textual", - "scoring_elements": "" - } - ], - "reference_id": "", - "reference_type": "" - } - ], - "date_published": "2019-02-26T00:00:00+00:00", + "date_published": "2019-09-10T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "07c966215a883c2032c38139472d5ff371ad61b8affa5e951c49f96438ab07cc", - "aliases": [ - "CVE-2019-1563", - "VC-OPENSSL-20190910-CVE-2019-1563" - ], - "summary": "In situations where an attacker receives automated notification of the success or failure of a decryption attempt an attacker, after sending a very large number of messages to be decrypted, can recover a CMS/PKCS7 transported encryption key or decrypt any RSA encrypted message that was encrypted with the public RSA key, using a Bleichenbacher padding oracle attack. Applications are not affected if they use a certificate together with the private RSA key to the CMS_decrypt or PKCS7_decrypt functions to select the correct recipient info to decrypt.", + "unique_content_id": "70a045decd4328c7ff88c8a1d969e8c4", + "summary": "There is an overflow bug in the x64_64 Montgomery squaring procedure used in exponentiation with 512-bit moduli. No EC algorithms are affected. Analysis suggests that attacks against 2-prime RSA1024, 3-prime RSA1536, and DSA1024 as a result of this defect would be very difficult to perform and are not believed likely. Attacks against DH512 are considered just feasible. However, for an attack the target would have to re-use the DH512 private key, which is not recommended anyway. Also applications directly using the low level API BN_mod_exp may be affected if they use BN_FLG_CONSTTIME.", "affected_packages": [ { "package": { @@ -9700,20 +9015,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.1.1d", - "affected_version_range": "vers:openssl/1.1.1|1.1.1a|1.1.1b|1.1.1c" - }, - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.1.0l", - "affected_version_range": "vers:openssl/1.1.0|1.1.0a|1.1.0b|1.1.0c|1.1.0d|1.1.0e|1.1.0f|1.1.0g|1.1.0h|1.1.0i|1.1.0j|1.1.0k" + "fixed_version": "1.1.1e", + "affected_version_range": "vers:openssl/1.1.1|1.1.1a|1.1.1b|1.1.1c|1.1.1d" }, { "package": { @@ -9724,37 +9027,31 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.2t", - "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d|1.0.2e|1.0.2f|1.0.2g|1.0.2h|1.0.2i|1.0.2j|1.0.2k|1.0.2l|1.0.2m|1.0.2n|1.0.2o|1.0.2p|1.0.2q|1.0.2r|1.0.2s" + "fixed_version": "1.0.2u", + "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d|1.0.2e|1.0.2f|1.0.2g|1.0.2h|1.0.2i|1.0.2j|1.0.2k|1.0.2l|1.0.2m|1.0.2n|1.0.2o|1.0.2p|1.0.2q|1.0.2r|1.0.2s|1.0.2t" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2019-1563", - "severities": [], - "reference_id": "CVE-2019-1563", - "reference_type": "" - }, - { - "url": "https://github.com/openssl/openssl/commit/08229ad838c50f644d7e928e2eef147b4308ad64", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2019-1551", "severities": [], - "reference_id": "", + "reference_id": "CVE-2019-1551", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/631f94db0065c78181ca9ba5546ebc8bb3884b97", + "url": "https://github.com/openssl/openssl/commit/419102400a2811582a7a3d4a4e317d72e5ce0a8f", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/e21f8cf78a125cd3c8c0d1a1a6c8bb0b901f893f", + "url": "https://github.com/openssl/openssl/commit/f1c5eea8a817075d31e43f5876993c6710238c98", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://www.openssl.org/news/secadv/20190910.txt", + "url": "https://www.openssl.org/news/secadv/20191206.txt", "severities": [ { "value": "Low", @@ -9766,15 +9063,11 @@ "reference_type": "" } ], - "date_published": "2019-09-10T00:00:00+00:00", + "date_published": "2019-12-06T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "5657b64f70e97033e61583196c24a7a9e4b643cd241052028cb0a2b764adbe7e", - "aliases": [ - "CVE-2020-1967", - "VC-OPENSSL-20200421-CVE-2020-1967" - ], + "unique_content_id": "afb9d94adcf86f7b0de8aa4f7ff7c6b4", "summary": "Server or client applications that call the SSL_check_chain() function during or after a TLS 1.3 handshake may crash due to a NULL pointer dereference as a result of incorrect handling of the \"signature_algorithms_cert\" TLS extension. The crash occurs if an invalid or unrecognised signature algorithm is received from the peer. This could be exploited by a malicious peer in a Denial of Service attack. OpenSSL version 1.1.1d, 1.1.1e, and 1.1.1f are affected by this issue. This issue did not affect OpenSSL versions prior to 1.1.1d.", "affected_packages": [ { @@ -9820,11 +9113,7 @@ "weaknesses": [] }, { - "unique_content_id": "8291dd784cec9b49787a85058d536e4d4c9a136bdc21bcfb7e975a2c41218195", - "aliases": [ - "CVE-2020-1968", - "VC-OPENSSL-20200909-CVE-2020-1968" - ], + "unique_content_id": "56010436497977628dcea6e96888d450", "summary": "The Raccoon attack exploits a flaw in the TLS specification which can lead to an attacker being able to compute the pre-master secret in connections which have used a Diffie-Hellman (DH) based ciphersuite. In such a case this would result in the attacker being able to eavesdrop on all encrypted communications sent over that TLS connection. The attack can only be exploited if an implementation re-uses a DH secret across multiple TLS connections. Note that this issue only impacts DH ciphersuites and not ECDH ciphersuites. This issue affects OpenSSL 1.0.2 which is out of support and no longer receiving public updates. OpenSSL 1.1.1 is not vulnerable to this issue.", "affected_packages": [ { @@ -9864,11 +9153,7 @@ "weaknesses": [] }, { - "unique_content_id": "a5da7dab57b99ce22236cb42d5329a816d2abf2481a6857c5b4ce16acb8b940f", - "aliases": [ - "CVE-2020-1971", - "VC-OPENSSL-20201208-CVE-2020-1971" - ], + "unique_content_id": "87b17158b6ad69a4d8043755547f45ad", "summary": "The X.509 GeneralName type is a generic type for representing different types of names. One of those name types is known as EDIPartyName. OpenSSL provides a function GENERAL_NAME_cmp which compares different instances of a GENERAL_NAME to see if they are equal or not. This function behaves incorrectly when both GENERAL_NAMEs contain an EDIPARTYNAME. A NULL pointer dereference and a crash may occur leading to a possible denial of service attack. OpenSSL itself uses the GENERAL_NAME_cmp function for two purposes: 1) Comparing CRL distribution point names between an available CRL and a CRL distribution point embedded in an X509 certificate 2) When verifying that a timestamp response token signer matches the timestamp authority name (exposed via the API functions TS_RESP_verify_response and TS_RESP_verify_token) If an attacker can control both items being compared then that attacker could trigger a crash. For example if the attacker can trick a client or server into checking a malicious certificate against a malicious CRL then this may occur. Note that some applications automatically download CRLs based on a URL embedded in a certificate. This checking happens prior to the signatures on the certificate and CRL being verified. OpenSSL's s_server, s_client and verify tools have support for the \"-crl_download\" option which implements automatic CRL downloading and this attack has been demonstrated to work against those tools. Note that an unrelated bug means that affected versions of OpenSSL cannot parse or construct correct encodings of EDIPARTYNAME. However it is possible to construct a malformed EDIPARTYNAME that OpenSSL's parser will accept and hence trigger this attack. All OpenSSL 1.1.1 and 1.0.2 versions are affected by this issue. Other OpenSSL releases are out of support and have not been checked.", "affected_packages": [ { @@ -9932,13 +9217,21 @@ "weaknesses": [] }, { - "unique_content_id": "d94a89c4d33239b6b8b49b765224bdb2cff846ce52cf8d1bfd59e6401fd406d7", - "aliases": [ - "CVE-2021-23839", - "VC-OPENSSL-20210216-CVE-2021-23839" - ], - "summary": "OpenSSL 1.0.2 supports SSLv2. If a client attempts to negotiate SSLv2 with a server that is configured to support both SSLv2 and more recent SSL and TLS versions then a check is made for a version rollback attack when unpadding an RSA signature. Clients that support SSL or TLS versions greater than SSLv2 are supposed to use a special form of padding. A server that supports greater than SSLv2 is supposed to reject connection attempts from a client where this special form of padding is present, because this indicates that a version rollback has occurred (i.e. both client and server support greater than SSLv2, and yet this is the version that is being requested). The implementation of this padding check inverted the logic so that the connection attempt is accepted if the padding is present, and rejected if it is absent. This means that such as server will accept a connection if a version rollback attack has occurred. Further the server will erroneously reject a connection if a normal SSLv2 connection attempt is made. Only OpenSSL 1.0.2 servers from version 1.0.2s to 1.0.2x are affected by this issue. In order to be vulnerable a 1.0.2 server must: 1) have configured SSLv2 support at compile time (this is off by default), 2) have configured SSLv2 support at runtime (this is off by default), 3) have configured SSLv2 ciphersuites (these are not in the default ciphersuite list) OpenSSL 1.1.1 does not have SSLv2 support and therefore is not vulnerable to this issue. The underlying error is in the implementation of the RSA_padding_check_SSLv23() function. This also affects the RSA_SSLV23_PADDING padding mode used by various other functions. Although 1.1.1 does not support SSLv2 the RSA_padding_check_SSLv23() function still exists, as does the RSA_SSLV23_PADDING padding mode. Applications that directly call that function or use that padding mode will encounter this issue. However since there is no support for the SSLv2 protocol in 1.1.1 this is considered a bug and not a security issue in that version. OpenSSL 1.0.2 is out of support and no longer receiving public updates. Premium support customers of OpenSSL 1.0.2 should upgrade to 1.0.2y. Other users should upgrade to 1.1.1j.", + "unique_content_id": "510307f6edf17f0620c4a096bb61df0c", + "summary": "The OpenSSL public API function X509_issuer_and_serial_hash() attempts to create a unique hash value based on the issuer and serial number data contained within an X509 certificate. However it fails to correctly handle any errors that may occur while parsing the issuer field (which might occur if the issuer field is maliciously constructed). This may subsequently result in a NULL pointer deref and a crash leading to a potential denial of service attack. The function X509_issuer_and_serial_hash() is never directly called by OpenSSL itself so applications are only vulnerable if they use this function directly and they use it on certificates that may have been obtained from untrusted sources. OpenSSL versions 1.1.1i and below are affected by this issue. Users of these versions should upgrade to OpenSSL 1.1.1j. OpenSSL versions 1.0.2x and below are affected by this issue. However OpenSSL 1.0.2 is out of support and no longer receiving public updates. Premium support customers of OpenSSL 1.0.2 should upgrade to 1.0.2y. Other users should upgrade to 1.1.1j.", "affected_packages": [ + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.1.1j", + "affected_version_range": "vers:openssl/1.1.1|1.1.1a|1.1.1b|1.1.1c|1.1.1d|1.1.1e|1.1.1f|1.1.1g|1.1.1h|1.1.1i" + }, { "package": { "name": "openssl", @@ -9949,18 +9242,24 @@ "qualifiers": "" }, "fixed_version": "1.0.2y", - "affected_version_range": "vers:openssl/1.0.2s|1.0.2t|1.0.2u|1.0.2v|1.0.2w|1.0.2x" + "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d|1.0.2e|1.0.2f|1.0.2g|1.0.2h|1.0.2i|1.0.2j|1.0.2k|1.0.2l|1.0.2m|1.0.2n|1.0.2o|1.0.2p|1.0.2q|1.0.2r|1.0.2s|1.0.2t|1.0.2u|1.0.2v|1.0.2w|1.0.2x" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2021-23839", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2021-23841", "severities": [], - "reference_id": "CVE-2021-23839", + "reference_id": "CVE-2021-23841", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/30919ab80a478f2d81f2e9acdcca3fa4740cd547", + "url": "https://github.com/openssl/openssl/commit/122a19ab48091c657f7cb1fb3af9fc07bd557bbf", + "severities": [], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://github.com/openssl/openssl/commit/8252ee4d90f3f2004d3d0aeeed003ad49c9a7807", "severities": [], "reference_id": "", "reference_type": "" @@ -9969,7 +9268,7 @@ "url": "https://www.openssl.org/news/secadv/20210216.txt", "severities": [ { - "value": "Low", + "value": "Moderate", "system": "generic_textual", "scoring_elements": "" } @@ -9982,11 +9281,7 @@ "weaknesses": [] }, { - "unique_content_id": "37127413ec3efbf57f25327ebbe739c46f14fb0992651a32236c3fc60a12e4a4", - "aliases": [ - "CVE-2021-23840", - "VC-OPENSSL-20210216-CVE-2021-23840" - ], + "unique_content_id": "62778ba1713cdf9851ef92f4d2f46fa7", "summary": "Calls to EVP_CipherUpdate, EVP_EncryptUpdate and EVP_DecryptUpdate may overflow the output length argument in some cases where the input length is close to the maximum permissable length for an integer on the platform. In such cases the return value from the function call will be 1 (indicating success), but the output length value will be negative. This could cause applications to behave incorrectly or crash. OpenSSL versions 1.1.1i and below are affected by this issue. Users of these versions should upgrade to OpenSSL 1.1.1j. OpenSSL versions 1.0.2x and below are affected by this issue. However OpenSSL 1.0.2 is out of support and no longer receiving public updates. Premium support customers of OpenSSL 1.0.2 should upgrade to 1.0.2y. Other users should upgrade to 1.1.1j.", "affected_packages": [ { @@ -10050,25 +9345,9 @@ "weaknesses": [] }, { - "unique_content_id": "4f747fd9c1e01f00c514c9af30852970db6eb6c9b83462affe737ebd3b893a0d", - "aliases": [ - "CVE-2021-23841", - "VC-OPENSSL-20210216-CVE-2021-23841" - ], - "summary": "The OpenSSL public API function X509_issuer_and_serial_hash() attempts to create a unique hash value based on the issuer and serial number data contained within an X509 certificate. However it fails to correctly handle any errors that may occur while parsing the issuer field (which might occur if the issuer field is maliciously constructed). This may subsequently result in a NULL pointer deref and a crash leading to a potential denial of service attack. The function X509_issuer_and_serial_hash() is never directly called by OpenSSL itself so applications are only vulnerable if they use this function directly and they use it on certificates that may have been obtained from untrusted sources. OpenSSL versions 1.1.1i and below are affected by this issue. Users of these versions should upgrade to OpenSSL 1.1.1j. OpenSSL versions 1.0.2x and below are affected by this issue. However OpenSSL 1.0.2 is out of support and no longer receiving public updates. Premium support customers of OpenSSL 1.0.2 should upgrade to 1.0.2y. Other users should upgrade to 1.1.1j.", + "unique_content_id": "ebbc5ad78a20128d4894106ef368c8f1", + "summary": "OpenSSL 1.0.2 supports SSLv2. If a client attempts to negotiate SSLv2 with a server that is configured to support both SSLv2 and more recent SSL and TLS versions then a check is made for a version rollback attack when unpadding an RSA signature. Clients that support SSL or TLS versions greater than SSLv2 are supposed to use a special form of padding. A server that supports greater than SSLv2 is supposed to reject connection attempts from a client where this special form of padding is present, because this indicates that a version rollback has occurred (i.e. both client and server support greater than SSLv2, and yet this is the version that is being requested). The implementation of this padding check inverted the logic so that the connection attempt is accepted if the padding is present, and rejected if it is absent. This means that such as server will accept a connection if a version rollback attack has occurred. Further the server will erroneously reject a connection if a normal SSLv2 connection attempt is made. Only OpenSSL 1.0.2 servers from version 1.0.2s to 1.0.2x are affected by this issue. In order to be vulnerable a 1.0.2 server must: 1) have configured SSLv2 support at compile time (this is off by default), 2) have configured SSLv2 support at runtime (this is off by default), 3) have configured SSLv2 ciphersuites (these are not in the default ciphersuite list) OpenSSL 1.1.1 does not have SSLv2 support and therefore is not vulnerable to this issue. The underlying error is in the implementation of the RSA_padding_check_SSLv23() function. This also affects the RSA_SSLV23_PADDING padding mode used by various other functions. Although 1.1.1 does not support SSLv2 the RSA_padding_check_SSLv23() function still exists, as does the RSA_SSLV23_PADDING padding mode. Applications that directly call that function or use that padding mode will encounter this issue. However since there is no support for the SSLv2 protocol in 1.1.1 this is considered a bug and not a security issue in that version. OpenSSL 1.0.2 is out of support and no longer receiving public updates. Premium support customers of OpenSSL 1.0.2 should upgrade to 1.0.2y. Other users should upgrade to 1.1.1j.", "affected_packages": [ - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.1.1j", - "affected_version_range": "vers:openssl/1.1.1|1.1.1a|1.1.1b|1.1.1c|1.1.1d|1.1.1e|1.1.1f|1.1.1g|1.1.1h|1.1.1i" - }, { "package": { "name": "openssl", @@ -10079,24 +9358,18 @@ "qualifiers": "" }, "fixed_version": "1.0.2y", - "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d|1.0.2e|1.0.2f|1.0.2g|1.0.2h|1.0.2i|1.0.2j|1.0.2k|1.0.2l|1.0.2m|1.0.2n|1.0.2o|1.0.2p|1.0.2q|1.0.2r|1.0.2s|1.0.2t|1.0.2u|1.0.2v|1.0.2w|1.0.2x" + "affected_version_range": "vers:openssl/1.0.2s|1.0.2t|1.0.2u|1.0.2v|1.0.2w|1.0.2x" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2021-23841", - "severities": [], - "reference_id": "CVE-2021-23841", - "reference_type": "" - }, - { - "url": "https://github.com/openssl/openssl/commit/122a19ab48091c657f7cb1fb3af9fc07bd557bbf", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2021-23839", "severities": [], - "reference_id": "", + "reference_id": "CVE-2021-23839", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/8252ee4d90f3f2004d3d0aeeed003ad49c9a7807", + "url": "https://github.com/openssl/openssl/commit/30919ab80a478f2d81f2e9acdcca3fa4740cd547", "severities": [], "reference_id": "", "reference_type": "" @@ -10105,7 +9378,7 @@ "url": "https://www.openssl.org/news/secadv/20210216.txt", "severities": [ { - "value": "Moderate", + "value": "Low", "system": "generic_textual", "scoring_elements": "" } @@ -10118,12 +9391,8 @@ "weaknesses": [] }, { - "unique_content_id": "7e96ee7be9f83a18c1773a7c46610f55024cfbe0be196a47e2b3ea741ae398e2", - "aliases": [ - "CVE-2021-3449", - "VC-OPENSSL-20210325-CVE-2021-3449" - ], - "summary": "An OpenSSL TLS server may crash if sent a maliciously crafted renegotiation ClientHello message from a client. If a TLSv1.2 renegotiation ClientHello omits the signature_algorithms extension (where it was present in the initial ClientHello), but includes a signature_algorithms_cert extension then a NULL pointer dereference will result, leading to a crash and a denial of service attack. A server is only vulnerable if it has TLSv1.2 and renegotiation enabled (which is the default configuration). OpenSSL TLS clients are not impacted by this issue. All OpenSSL 1.1.1 versions are affected by this issue. Users of these versions should upgrade to OpenSSL 1.1.1k. OpenSSL 1.0.2 is not impacted by this issue.", + "unique_content_id": "8017a45e047c6a8a07ddcef5b019a5a9", + "summary": "The X509_V_FLAG_X509_STRICT flag enables additional security checks of the certificates present in a certificate chain. It is not set by default. Starting from OpenSSL version 1.1.1h a check to disallow certificates in the chain that have explicitly encoded elliptic curve parameters was added as an additional strict check. An error in the implementation of this check meant that the result of a previous check to confirm that certificates in the chain are valid CA certificates was overwritten. This effectively bypasses the check that non-CA certificates must not be able to issue other certificates. If a \"purpose\" has been configured then there is a subsequent opportunity for checks that the certificate is a valid CA. All of the named \"purpose\" values implemented in libcrypto perform this check. Therefore, where a purpose is set the certificate chain will still be rejected even when the strict flag has been used. A purpose is set by default in libssl client and server certificate verification routines, but it can be overridden or removed by an application. In order to be affected, an application must explicitly set the X509_V_FLAG_X509_STRICT verification flag and either not set a purpose for the certificate verification or, in the case of TLS client or server applications, override the default purpose. OpenSSL versions 1.1.1h and newer are affected by this issue. Users of these versions should upgrade to OpenSSL 1.1.1k. OpenSSL 1.0.2 is not impacted by this issue.", "affected_packages": [ { "package": { @@ -10135,18 +9404,18 @@ "qualifiers": "" }, "fixed_version": "1.1.1k", - "affected_version_range": "vers:openssl/1.1.1|1.1.1a|1.1.1b|1.1.1c|1.1.1d|1.1.1e|1.1.1f|1.1.1g|1.1.1h|1.1.1i|1.1.1j" + "affected_version_range": "vers:openssl/1.1.1h|1.1.1i|1.1.1j" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2021-3449", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2021-3450", "severities": [], - "reference_id": "CVE-2021-3449", + "reference_id": "CVE-2021-3450", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/fb9fa6b51defd48157eeb207f52181f735d96148", + "url": "https://github.com/openssl/openssl/commit/2a40b7bc7b94dd7de897a74571e7024f0cf0d63b", "severities": [], "reference_id": "", "reference_type": "" @@ -10168,12 +9437,8 @@ "weaknesses": [] }, { - "unique_content_id": "943c0441bb44156232628b06433f25a6e1d5c4bef1db447845be8bb595d55320", - "aliases": [ - "CVE-2021-3450", - "VC-OPENSSL-20210325-CVE-2021-3450" - ], - "summary": "The X509_V_FLAG_X509_STRICT flag enables additional security checks of the certificates present in a certificate chain. It is not set by default. Starting from OpenSSL version 1.1.1h a check to disallow certificates in the chain that have explicitly encoded elliptic curve parameters was added as an additional strict check. An error in the implementation of this check meant that the result of a previous check to confirm that certificates in the chain are valid CA certificates was overwritten. This effectively bypasses the check that non-CA certificates must not be able to issue other certificates. If a \"purpose\" has been configured then there is a subsequent opportunity for checks that the certificate is a valid CA. All of the named \"purpose\" values implemented in libcrypto perform this check. Therefore, where a purpose is set the certificate chain will still be rejected even when the strict flag has been used. A purpose is set by default in libssl client and server certificate verification routines, but it can be overridden or removed by an application. In order to be affected, an application must explicitly set the X509_V_FLAG_X509_STRICT verification flag and either not set a purpose for the certificate verification or, in the case of TLS client or server applications, override the default purpose. OpenSSL versions 1.1.1h and newer are affected by this issue. Users of these versions should upgrade to OpenSSL 1.1.1k. OpenSSL 1.0.2 is not impacted by this issue.", + "unique_content_id": "b9610772604a38aae37934639b563f2d", + "summary": "An OpenSSL TLS server may crash if sent a maliciously crafted renegotiation ClientHello message from a client. If a TLSv1.2 renegotiation ClientHello omits the signature_algorithms extension (where it was present in the initial ClientHello), but includes a signature_algorithms_cert extension then a NULL pointer dereference will result, leading to a crash and a denial of service attack. A server is only vulnerable if it has TLSv1.2 and renegotiation enabled (which is the default configuration). OpenSSL TLS clients are not impacted by this issue. All OpenSSL 1.1.1 versions are affected by this issue. Users of these versions should upgrade to OpenSSL 1.1.1k. OpenSSL 1.0.2 is not impacted by this issue.", "affected_packages": [ { "package": { @@ -10185,18 +9450,18 @@ "qualifiers": "" }, "fixed_version": "1.1.1k", - "affected_version_range": "vers:openssl/1.1.1h|1.1.1i|1.1.1j" + "affected_version_range": "vers:openssl/1.1.1|1.1.1a|1.1.1b|1.1.1c|1.1.1d|1.1.1e|1.1.1f|1.1.1g|1.1.1h|1.1.1i|1.1.1j" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2021-3450", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2021-3449", "severities": [], - "reference_id": "CVE-2021-3450", + "reference_id": "CVE-2021-3449", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/2a40b7bc7b94dd7de897a74571e7024f0cf0d63b", + "url": "https://github.com/openssl/openssl/commit/fb9fa6b51defd48157eeb207f52181f735d96148", "severities": [], "reference_id": "", "reference_type": "" @@ -10218,11 +9483,7 @@ "weaknesses": [] }, { - "unique_content_id": "b2e254e7e251e702fd77c5eaf069909ab6e7ddf360fc3ff323ee75dc20566220", - "aliases": [ - "CVE-2021-3711", - "VC-OPENSSL-20210824-CVE-2021-3711" - ], + "unique_content_id": "7c59ebbda08fad46ad3628c58c6e1f4f", "summary": "In order to decrypt SM2 encrypted data an application is expected to call the API function EVP_PKEY_decrypt(). Typically an application will call this function twice. The first time, on entry, the \"out\" parameter can be NULL and, on exit, the \"outlen\" parameter is populated with the buffer size required to hold the decrypted plaintext. The application can then allocate a sufficiently sized buffer and call EVP_PKEY_decrypt() again, but this time passing a non-NULL value for the \"out\" parameter. A bug in the implementation of the SM2 decryption code means that the calculation of the buffer size required to hold the plaintext returned by the first call to EVP_PKEY_decrypt() can be smaller than the actual size required by the second call. This can lead to a buffer overflow when EVP_PKEY_decrypt() is called by the application a second time with a buffer that is too small. A malicious attacker who is able present SM2 content for decryption to an application could cause attacker chosen data to overflow the buffer by up to a maximum of 62 bytes altering the contents of other data held after the buffer, possibly changing application behaviour or causing the application to crash. The location of the buffer is application dependent but is typically heap allocated.", "affected_packages": [ { @@ -10268,11 +9529,7 @@ "weaknesses": [] }, { - "unique_content_id": "ac1bc5a0f0673f7e6556dd40ca607825904051f6f1686650f07ba5727dcab9f9", - "aliases": [ - "CVE-2021-3712", - "VC-OPENSSL-20210824-CVE-2021-3712" - ], + "unique_content_id": "97ca2e1d473bc9e2e802285c56f85541", "summary": "ASN.1 strings are represented internally within OpenSSL as an ASN1_STRING structure which contains a buffer holding the string data and a field holding the buffer length. This contrasts with normal C strings which are repesented as a buffer for the string data which is terminated with a NUL (0) byte. Although not a strict requirement, ASN.1 strings that are parsed using OpenSSL's own \"d2i\" functions (and other similar parsing functions) as well as any string whose value has been set with the ASN1_STRING_set() function will additionally NUL terminate the byte array in the ASN1_STRING structure. However, it is possible for applications to directly construct valid ASN1_STRING structures which do not NUL terminate the byte array by directly setting the \"data\" and \"length\" fields in the ASN1_STRING array. This can also happen by using the ASN1_STRING_set0() function. Numerous OpenSSL functions that print ASN.1 data have been found to assume that the ASN1_STRING byte array will be NUL terminated, even though this is not guaranteed for strings that have been directly constructed. Where an application requests an ASN.1 structure to be printed, and where that ASN.1 structure contains ASN1_STRINGs that have been directly constructed by the application without NUL terminating the \"data\" field, then a read buffer overrun can occur. The same thing can also occur during name constraints processing of certificates (for example if a certificate has been directly constructed by the application instead of loading it via the OpenSSL parsing functions, and the certificate contains non NUL terminated ASN1_STRING structures). It can also occur in the X509_get1_email(), X509_REQ_get1_email() and X509_get1_ocsp() functions. If a malicious actor can cause an application to directly construct an ASN1_STRING and then process it through one of the affected OpenSSL functions then this issue could be hit. This might result in a crash (causing a Denial of Service attack). It could also result in the disclosure of private memory contents (such as private keys, or sensitive plaintext).", "affected_packages": [ { @@ -10336,11 +9593,7 @@ "weaknesses": [] }, { - "unique_content_id": "2480e0bc015e4765e66637e4b96ea45d8d93d41719e171100ca32011f81d6e80", - "aliases": [ - "CVE-2021-4044", - "VC-OPENSSL-20211214-CVE-2021-4044" - ], + "unique_content_id": "1c5bbe67613cfce3a310b822466ad17e", "summary": "Internally libssl in OpenSSL calls X509_verify_cert() on the client side to verify a certificate supplied by a server. That function may return a negative return value to indicate an internal error (for example out of memory). Such a negative return value is mishandled by OpenSSL and will cause an IO function (such as SSL_connect() or SSL_do_handshake()) to not indicate success and a subsequent call to SSL_get_error() to return the value SSL_ERROR_WANT_RETRY_VERIFY. This return value is only supposed to be returned by OpenSSL if the application has previously called SSL_CTX_set_cert_verify_callback(). Since most applications do not do this the SSL_ERROR_WANT_RETRY_VERIFY return value from SSL_get_error() will be totally unexpected and applications may not behave correctly as a result. The exact behaviour will depend on the application but it could result in crashes, infinite loops or other similar incorrect responses. This issue is made more serious in combination with a separate bug in OpenSSL 3.0 that will cause X509_verify_cert() to indicate an internal error when processing a certificate chain. This will occur where a certificate does not include the Subject Alternative Name extension but where a Certificate Authority has enforced name constraints. This issue can occur even with valid chains. By combining the two issues an attacker could induce incorrect, application dependent behaviour.", "affected_packages": [ { @@ -10386,11 +9639,7 @@ "weaknesses": [] }, { - "unique_content_id": "80c2054b079c7d69842fe524fdc6abcf1246a37323a9f29ce4f80f4300e8282f", - "aliases": [ - "CVE-2021-4160", - "VC-OPENSSL-20220128-CVE-2021-4160" - ], + "unique_content_id": "0039548ab133f97e2138bb298ccc7cae", "summary": "There is a carry propagation bug in the MIPS32 and MIPS64 squaring procedure. Many EC algorithms are affected, including some of the TLS 1.3 default curves. Impact was not analyzed in detail, because the pre-requisites for attack are considered unlikely and include reusing private keys. Analysis suggests that attacks against RSA and DSA as a result of this defect would be very difficult to perform and are not believed likely. Attacks against DH are considered just feasible (although very difficult) because most of the work necessary to deduce information about a private key may be performed offline. The amount of resources required for such an attack would be significant. However, for an attack on TLS to be meaningful, the server would have to share the DH private key among multiple clients, which is no longer an option since CVE-2016-0701. This issue affects OpenSSL versions 1.0.2, 1.1.1 and 3.0.0. It was addressed in the releases of 1.1.1m and 3.0.1 on the 15th of December 2021. For the 1.0.2 release it is addressed in git commit 6fc1aaaf3 that is available to premium support customers only. It will be made available in 1.0.2zc when it is released. The issue only affects OpenSSL on MIPS platforms.", "affected_packages": [ { @@ -10472,11 +9721,7 @@ "weaknesses": [] }, { - "unique_content_id": "dc0cbb60dc9280799a925c566b952d1c952cf5c3b30d9e3d5726c30a815e49d2", - "aliases": [ - "CVE-2022-0778", - "VC-OPENSSL-20220315-CVE-2022-0778" - ], + "unique_content_id": "caa5eb3135dc715346ce3a32211b024e", "summary": "The BN_mod_sqrt() function, which computes a modular square root, contains a bug that can cause it to loop forever for non-prime moduli. Internally this function is used when parsing certificates that contain elliptic curve public keys in compressed form or explicit elliptic curve parameters with a base point encoded in compressed form. It is possible to trigger the infinite loop by crafting a certificate that has invalid explicit curve parameters. Since certificate parsing happens prior to verification of the certificate signature, any process that parses an externally supplied certificate may thus be subject to a denial of service attack. The infinite loop can also be reached when parsing crafted private keys as they can contain explicit elliptic curve parameters. Thus vulnerable situations include: - TLS clients consuming server certificates - TLS servers consuming client certificates - Hosting providers taking certificates or private keys from customers - Certificate authorities parsing certification requests from subscribers - Anything else which parses ASN.1 elliptic curve parameters Also any other applications that use the BN_mod_sqrt() where the attacker can control the parameter values are vulnerable to this DoS issue. In the OpenSSL 1.0.2 version the public key is not parsed during initial parsing of the certificate which makes it slightly harder to trigger the infinite loop. However any operation which requires the public key from the certificate will trigger the infinite loop. In particular the attacker can use a self-signed certificate to trigger the loop during verification of the certificate signature. This issue affects OpenSSL versions 1.0.2, 1.1.1 and 3.0. It was addressed in the releases of 1.1.1n and 3.0.2 on the 15th March 2022.", "affected_packages": [ { diff --git a/vulnerabilities/tests/test_example.py b/vulnerabilities/tests/test_example.py index 119a8b61d..dfde6425d 100644 --- a/vulnerabilities/tests/test_example.py +++ b/vulnerabilities/tests/test_example.py @@ -76,7 +76,7 @@ def test_import_framework_using_example_importer(self): ImportRunner(ExampleImporter).run() for expected in mock_fetch_advisory_data(): - assert models.Advisory.objects.get(aliases__contains=expected["id"]) + assert models.Alias.objects.get(alias=expected["id"]).advisories.all() @pytest.mark.django_db(transaction=True) def test_improve_framework_using_example_improver(self): diff --git a/vulnerabilities/tests/test_import_runner.py b/vulnerabilities/tests/test_import_runner.py index 3f88e0963..d46a34861 100644 --- a/vulnerabilities/tests/test_import_runner.py +++ b/vulnerabilities/tests/test_import_runner.py @@ -163,7 +163,10 @@ def test_process_advisories_can_import_advisories_with_severities_and_no_date(): } ad = AdvisoryData.from_dict(advisory) ImportRunner(DummyImporter).process_advisories([ad], "test_importer_date") - advisory_aliases = list(models.Advisory.objects.all().values("aliases")) + advisory_aliases = [ + {"aliases": [item.alias for item in adv.aliases.all()]} + for adv in models.Advisory.objects.all() + ] assert advisory_aliases == [{"aliases": ["CVE-2024-31079"]}] diff --git a/vulnerabilities/tests/test_openssl.py b/vulnerabilities/tests/test_openssl.py index 0effc9515..90dbbc8b7 100644 --- a/vulnerabilities/tests/test_openssl.py +++ b/vulnerabilities/tests/test_openssl.py @@ -30,7 +30,6 @@ ADVISORY_FIELDS_TO_TEST = ( "unique_content_id", - "aliases", "summary", "affected_packages", "references", diff --git a/vulnerabilities/tests/test_postgres_workaround.py b/vulnerabilities/tests/test_postgres_workaround.py index 3b0f215ab..38943bd1a 100644 --- a/vulnerabilities/tests/test_postgres_workaround.py +++ b/vulnerabilities/tests/test_postgres_workaround.py @@ -23,6 +23,7 @@ from vulnerabilities.importer import Reference from vulnerabilities.importer import VulnerabilitySeverity from vulnerabilities.models import Advisory +from vulnerabilities.pipes.advisory import get_or_create_aliases data = AdvisoryData( aliases=["CVE-2020-8908", "GHSA-5mg8-w23w-74h3"], @@ -424,8 +425,7 @@ @pytest.mark.django_db def test_postgres_workaround_with_many_references_many_affected_packages_and_long_summary(): - Advisory.objects.get_or_create( - aliases=data.aliases, + adv, _ = Advisory.objects.get_or_create( summary=data.summary, affected_packages=[pkg.to_dict() for pkg in data.affected_packages], references=[ref.to_dict() for ref in data.references], @@ -435,3 +435,4 @@ def test_postgres_workaround_with_many_references_many_affected_packages_and_lon "date_collected": datetime.now(tz=timezone.utc), }, ) + adv.aliases.add(*get_or_create_aliases(data.aliases)) diff --git a/vulnerabilities/tests/test_vulnerability_status_improver.py b/vulnerabilities/tests/test_vulnerability_status_improver.py index 89a57c0b7..084df3a2c 100644 --- a/vulnerabilities/tests/test_vulnerability_status_improver.py +++ b/vulnerabilities/tests/test_vulnerability_status_improver.py @@ -13,12 +13,14 @@ import pytest +from vulnerabilities.import_runner import associate_vulnerability_with_aliases from vulnerabilities.improvers.vulnerability_status import VulnerabilityStatusImprover from vulnerabilities.models import Advisory from vulnerabilities.models import Alias from vulnerabilities.models import Vulnerability from vulnerabilities.models import VulnerabilityStatusType from vulnerabilities.pipelines.nvd_importer import NVDImporterPipeline +from vulnerabilities.pipes.advisory import get_or_create_aliases BASE_DIR = os.path.dirname(os.path.abspath(__file__)) @@ -31,20 +33,20 @@ @pytest.mark.django_db(transaction=True) def test_interesting_advisories(): - Advisory.objects.create( - aliases=["CVE-1"], + adv1 = Advisory.objects.create( created_by=NVDImporterPipeline.pipeline_id, summary="1", date_collected=datetime.now(), ) - Advisory.objects.create( - aliases=["CVE-1"], + adv1.aliases.add(*get_or_create_aliases(["CVE-1"])) + adv2 = Advisory.objects.create( created_by=NVDImporterPipeline.pipeline_id, summary="2", date_collected=datetime.now(), ) + adv2.aliases.add(*get_or_create_aliases(["CVE-1"])) advs = VulnerabilityStatusImprover().interesting_advisories - assert len(list(advs)) == 1 + assert len(list(advs)) == 2 @pytest.mark.django_db(transaction=True) @@ -53,13 +55,14 @@ def test_improver_end_to_end(mock_response): response = os.path.join(TEST_DATA, "CVE-2023-35866.json") mock_response.return_value = response adv = Advisory.objects.create( - aliases=["CVE-2023-35866"], created_by=NVDImporterPipeline.pipeline_id, summary="1", date_collected=datetime.now(), ) + aliases = get_or_create_aliases(aliases=["CVE-2023-35866"]) + adv.aliases.add(*aliases) v1 = Vulnerability.objects.create(summary="test") - Alias.objects.create(alias="CVE-2023-35866", vulnerability=v1) - VulnerabilityStatusImprover().get_inferences(advisory_data=adv) + associate_vulnerability_with_aliases(aliases=aliases, vulnerability=v1) + VulnerabilityStatusImprover().get_inferences(advisory_data=adv.to_advisory_data()) v1 = Vulnerability.objects.get(summary="test") assert v1.status == VulnerabilityStatusType.DISPUTED From 801f8711f8690ad7efe3a63be0dfe23d59068d39 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Tue, 25 Mar 2025 14:33:58 +0530 Subject: [PATCH 071/545] Remove unused include_metadata parameter Signed-off-by: Keshav Priyadarshi --- vulnerabilities/utils.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/vulnerabilities/utils.py b/vulnerabilities/utils.py index 8c777610d..52104b556 100644 --- a/vulnerabilities/utils.py +++ b/vulnerabilities/utils.py @@ -585,12 +585,11 @@ def normalize_list(lst): return sorted(lst) if lst else [] -def compute_content_id(advisory_data, include_metadata=False): +def compute_content_id(advisory_data): """ Compute a unique content_id for an advisory by normalizing its data and hashing it. :param advisory_data: An AdvisoryData object - :param include_metadata: Boolean indicating whether to include `created_by` and `url` :return: SHA-256 hash digest as content_id """ From 05ba61f4721d38f45fd8381c56d77ddc3a05d72a Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Tue, 25 Mar 2025 14:39:46 +0530 Subject: [PATCH 072/545] Make unique_content_id required field - Also validate fields before saving Signed-off-by: Keshav Priyadarshi --- .../migrations/0090_migrate_advisory_aliases.py | 11 +++++++++++ vulnerabilities/models.py | 8 ++++---- 2 files changed, 15 insertions(+), 4 deletions(-) diff --git a/vulnerabilities/migrations/0090_migrate_advisory_aliases.py b/vulnerabilities/migrations/0090_migrate_advisory_aliases.py index c0e03295a..2734d2bec 100644 --- a/vulnerabilities/migrations/0090_migrate_advisory_aliases.py +++ b/vulnerabilities/migrations/0090_migrate_advisory_aliases.py @@ -86,6 +86,17 @@ def reverse_populate_new_advisory_aliases_field(apps, schema_editor): ) operations = [ + migrations.AlterField( + model_name="advisory", + name="unique_content_id", + field=models.CharField( + help_text="A 64 character unique identifier for the content of the advisory since we use sha256 as hex", + max_length=64, + blank=False, + null=False, + ), + ), + # Make vulnerability relation optional migrations.AlterField( model_name="alias", diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index 18aa6ba14..4085c5171 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -1319,7 +1319,8 @@ class Advisory(models.Model): unique_content_id = models.CharField( max_length=64, - blank=True, + blank=False, + null=False, help_text="A 64 character unique identifier for the content of the advisory since we use sha256 as hex", ) aliases = models.ManyToManyField( @@ -1364,9 +1365,8 @@ class Meta: ordering = ["date_published", "unique_content_id"] def save(self, *args, **kwargs): - advisory_data = self.to_advisory_data() - self.unique_content_id = compute_content_id(advisory_data, include_metadata=False) - super().save(*args, **kwargs) + self.full_clean() + return super().save(*args, **kwargs) def to_advisory_data(self) -> "AdvisoryData": from vulnerabilities.importer import AdvisoryData From 996da1337ee4235b8ec5d68ad81117d7176fcee8 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Tue, 25 Mar 2025 17:06:11 +0530 Subject: [PATCH 073/545] Update tests to support latest advisory model changes Signed-off-by: Keshav Priyadarshi --- .../tests/pipelines/test_base_pipeline.py | 14 +- .../test_remove_duplicate_advisories.py | 17 +- vulnerabilities/tests/pipes/test_advisory.py | 15 +- vulnerabilities/tests/test_add_cvsssv31.py | 2 + vulnerabilities/tests/test_changelog.py | 4 + .../tests/test_compute_content_id.py | 20 - ...security_advisories-importer-expected.json | 1262 ++++----- ...security_advisories-importer-expected.json | 2294 ++++++++--------- vulnerabilities/tests/test_data_migrations.py | 1 - vulnerabilities/tests/test_import_runner.py | 7 +- vulnerabilities/tests/test_improve_runner.py | 11 +- .../tests/test_postgres_workaround.py | 1 + .../test_vulnerability_status_improver.py | 3 + 13 files changed, 1824 insertions(+), 1827 deletions(-) diff --git a/vulnerabilities/tests/pipelines/test_base_pipeline.py b/vulnerabilities/tests/pipelines/test_base_pipeline.py index 7d16315ad..7c7319e5d 100644 --- a/vulnerabilities/tests/pipelines/test_base_pipeline.py +++ b/vulnerabilities/tests/pipelines/test_base_pipeline.py @@ -39,16 +39,12 @@ def get_advisory1(created_by="test_pipeline"): - adv = models.Advisory.objects.create( - summary=advisory_data1.summary, - affected_packages=[pkg.to_dict() for pkg in advisory_data1.affected_packages], - references=[ref.to_dict() for ref in advisory_data1.references], - url=advisory_data1.url, - created_by=created_by, - date_collected=timezone.now(), + from vulnerabilities.pipes.advisory import insert_advisory + + return insert_advisory( + advisory=advisory_data1, + pipeline_id=created_by, ) - adv.aliases.add(*get_or_create_aliases(advisory_data1.aliases)) - return adv class TestVulnerableCodePipeline(TestCase): diff --git a/vulnerabilities/tests/pipelines/test_remove_duplicate_advisories.py b/vulnerabilities/tests/pipelines/test_remove_duplicate_advisories.py index 89187d488..d6cd5b5d7 100644 --- a/vulnerabilities/tests/pipelines/test_remove_duplicate_advisories.py +++ b/vulnerabilities/tests/pipelines/test_remove_duplicate_advisories.py @@ -39,6 +39,8 @@ def test_remove_duplicates_keeps_oldest(self): Test that when multiple advisories have the same content, only the oldest one is kept. """ + from vulnerabilities.utils import compute_content_id + # Create three advisories with same content but different dates dates = [ datetime.datetime(2024, 1, 1, tzinfo=pytz.UTC), @@ -49,14 +51,15 @@ def test_remove_duplicates_keeps_oldest(self): advisories = [] for date in dates: advisory = Advisory.objects.create( + unique_content_id=compute_content_id(advisory_data=self.advisory_data), summary=self.advisory_data.summary, affected_packages=[pkg.to_dict() for pkg in self.advisory_data.affected_packages], references=[ref.to_dict() for ref in self.advisory_data.references], date_imported=date, date_collected=date, + created_by="test_pipeline", ) advisories.append(advisory) - print(advisory.id) # Run the pipeline pipeline = RemoveDuplicateAdvisoriesPipeline() @@ -73,19 +76,23 @@ def test_different_content_preserved(self): """ # Create two advisories with different content advisory1 = Advisory.objects.create( + unique_content_id="test-id1", summary="Summary 1", affected_packages=[], date_collected=datetime.datetime(2024, 1, 1, tzinfo=pytz.UTC), references=[], date_imported=datetime.datetime(2024, 1, 1, tzinfo=pytz.UTC), + created_by="test_pipeline", ) advisory2 = Advisory.objects.create( + unique_content_id="test-id2", summary="Summary 2", affected_packages=[], references=[], date_collected=datetime.datetime(2024, 1, 1, tzinfo=pytz.UTC), date_imported=datetime.datetime(2024, 1, 2, tzinfo=pytz.UTC), + created_by="test_pipeline", ) # Run the pipeline @@ -99,13 +106,16 @@ def test_recompute_content_ids(self): """ Test that advisories without content IDs get them updated. """ + from vulnerabilities.utils import compute_content_id + # Create advisory without content ID advisory = Advisory.objects.create( + unique_content_id="incorrect-content-id", summary=self.advisory_data.summary, affected_packages=[pkg.to_dict() for pkg in self.advisory_data.affected_packages], references=[ref.to_dict() for ref in self.advisory_data.references], - unique_content_id="", date_collected=datetime.datetime(2024, 1, 1, tzinfo=pytz.UTC), + created_by="test_pipeline", ) # Run the pipeline @@ -114,4 +124,5 @@ def test_recompute_content_ids(self): # Check that content ID was updated advisory.refresh_from_db() - self.assertNotEqual(advisory.unique_content_id, "") + expected_content_id = compute_content_id(advisory_data=self.advisory_data) + self.assertNotEqual(advisory.unique_content_id, expected_content_id) diff --git a/vulnerabilities/tests/pipes/test_advisory.py b/vulnerabilities/tests/pipes/test_advisory.py index 6135fbf59..84b44a5d0 100644 --- a/vulnerabilities/tests/pipes/test_advisory.py +++ b/vulnerabilities/tests/pipes/test_advisory.py @@ -18,6 +18,7 @@ from vulnerabilities.importer import Reference from vulnerabilities.pipes.advisory import get_or_create_aliases from vulnerabilities.pipes.advisory import import_advisory +from vulnerabilities.utils import compute_content_id advisory_data1 = AdvisoryData( summary="vulnerability description here", @@ -34,16 +35,12 @@ def get_advisory1(created_by="test_pipeline"): - advisory = models.Advisory.objects.create( - summary=advisory_data1.summary, - affected_packages=[pkg.to_dict() for pkg in advisory_data1.affected_packages], - references=[ref.to_dict() for ref in advisory_data1.references], - url=advisory_data1.url, - created_by=created_by, - date_collected=timezone.now(), + from vulnerabilities.pipes.advisory import insert_advisory + + return insert_advisory( + advisory=advisory_data1, + pipeline_id=created_by, ) - advisory.aliases.add(*get_or_create_aliases(advisory_data1.aliases)) - return advisory def get_all_vulnerability_relationships_objects(): diff --git a/vulnerabilities/tests/test_add_cvsssv31.py b/vulnerabilities/tests/test_add_cvsssv31.py index 96c2abf86..6b1c1875a 100644 --- a/vulnerabilities/tests/test_add_cvsssv31.py +++ b/vulnerabilities/tests/test_add_cvsssv31.py @@ -24,6 +24,7 @@ def setUp(self): self.pipeline = CVEAdvisoryMappingPipeline() advisory = Advisory.objects.create( created_by="nvd_importer", + unique_content_id="test-unique-content-id", references=[ { "severities": [ @@ -41,6 +42,7 @@ def setUp(self): date_collected="2024-09-27T19:38:00Z", ) advisory.aliases.add(*get_or_create_aliases(["CVE-2024-1234"])) + vuln = Vulnerability.objects.create(vulnerability_id="CVE-2024-1234") sev = VulnerabilitySeverity.objects.create( scoring_system=CVSSV3.identifier, diff --git a/vulnerabilities/tests/test_changelog.py b/vulnerabilities/tests/test_changelog.py index 3f762ac5e..fce5be00f 100644 --- a/vulnerabilities/tests/test_changelog.py +++ b/vulnerabilities/tests/test_changelog.py @@ -25,6 +25,7 @@ def test_package_changelog(): pkg, _ = models.Package.objects.get_or_create_from_purl("pkg:npm/foo@1.0.0") assert models.PackageChangeLog.objects.filter(package=pkg).count() == 0 adv = models.Advisory.objects.create( + unique_content_id="test-unique-content-id1", created_by=NpmImporterPipeline.pipeline_id, summary="TEST", date_collected=datetime.now(), @@ -53,6 +54,7 @@ def test_package_changelog(): pkg1, _ = models.Package.objects.get_or_create_from_purl("pkg:npm/foo@2.0.0") assert models.PackageChangeLog.objects.filter(package=pkg1).count() == 0 adv = models.Advisory.objects.create( + unique_content_id="test-unique-content-id2", created_by=NpmImporterPipeline.pipeline_id, summary="TEST-1", date_collected=datetime.now(), @@ -84,6 +86,7 @@ def test_package_changelog(): @pytest.mark.django_db def test_vulnerability_changelog(): adv = models.Advisory.objects.create( + unique_content_id="test-unique-content-id3", created_by=NpmImporterPipeline.pipeline_id, summary="TEST_1", date_collected=datetime.now(), @@ -117,6 +120,7 @@ def test_vulnerability_changelog(): @pytest.mark.django_db def test_vulnerability_changelog_software_version(): adv = models.Advisory.objects.create( + unique_content_id="test-unique-content-id4", created_by=NpmImporterPipeline.pipeline_id, summary="TEST_1", date_collected=datetime.now(), diff --git a/vulnerabilities/tests/test_compute_content_id.py b/vulnerabilities/tests/test_compute_content_id.py index 87fe9e9f0..3e718b1af 100644 --- a/vulnerabilities/tests/test_compute_content_id.py +++ b/vulnerabilities/tests/test_compute_content_id.py @@ -85,26 +85,6 @@ def test_different_metadata_same_content_same_id(self): assert compute_content_id(advisory1) == compute_content_id(advisory2) - def test_different_metadata_different_id_when_included(self): - """ - Test that advisories with same content but different metadata have different content IDs - when include_metadata=True - """ - advisory1 = self.base_advisory - - advisory2 = AdvisoryData( - summary="Test summary", - affected_packages=self.base_advisory.affected_packages, - references=self.base_advisory.references, - date_published=datetime.datetime(2024, 1, 1, tzinfo=pytz.UTC), - url="https://different.url", - ) - - self.assertNotEqual( - compute_content_id(advisory1, include_metadata=True), - compute_content_id(advisory2, include_metadata=True), - ) - def test_different_summary_different_id(self): """ Test that advisories with different summaries have different content IDs diff --git a/vulnerabilities/tests/test_data/nginx/security_advisories-importer-expected.json b/vulnerabilities/tests/test_data/nginx/security_advisories-importer-expected.json index 047cb209e..9e760590f 100644 --- a/vulnerabilities/tests/test_data/nginx/security_advisories-importer-expected.json +++ b/vulnerabilities/tests/test_data/nginx/security_advisories-importer-expected.json @@ -1,7 +1,7 @@ [ { - "unique_content_id": "04f5bc12ff49a95a29c459222379abe4", - "summary": "NULL pointer dereference while writing client request body", + "unique_content_id": "041e081a630681e36df17fc2471cd58a789dce20b54dce62c66900baceb7d771", + "summary": "Stack overflow and use-after-free in HTTP/3", "affected_packages": [ { "package": { @@ -12,8 +12,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.11.1", - "affected_version_range": "vers:nginx/>=1.3.9|<=1.11.0" + "fixed_version": "1.27.0", + "affected_version_range": "vers:nginx/>=1.25.0|<=1.25.5|1.26.0" }, { "package": { @@ -24,13 +24,13 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.10.1", - "affected_version_range": "vers:nginx/>=1.3.9|<=1.11.0" + "fixed_version": "1.26.1", + "affected_version_range": "vers:nginx/>=1.25.0|<=1.25.5|1.26.0" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2016/000179.html", + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2024/GMY32CSHFH6VFTN76HJNX7WNEX4RLHF6.html", "severities": [ { "value": "medium", @@ -42,33 +42,9 @@ "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-4450", - "severities": [], - "reference_id": "CVE-2016-4450", - "reference_type": "" - }, - { - "url": "https://nginx.org/download/patch.2016.write.txt", - "severities": [], - "reference_id": "", - "reference_type": "" - }, - { - "url": "https://nginx.org/download/patch.2016.write.txt.asc", - "severities": [], - "reference_id": "", - "reference_type": "" - }, - { - "url": "https://nginx.org/download/patch.2016.write2.txt", - "severities": [], - "reference_id": "", - "reference_type": "" - }, - { - "url": "https://nginx.org/download/patch.2016.write2.txt.asc", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2024-31079", "severities": [], - "reference_id": "", + "reference_id": "CVE-2024-31079", "reference_type": "" } ], @@ -76,8 +52,8 @@ "weaknesses": [] }, { - "unique_content_id": "0f21f4e3d88f4af06f0c46d096e90320", - "summary": "Request line parsing vulnerability", + "unique_content_id": "044f1ec3ed59bdbafada7e40b37f7a3cbd0afc31c67aac002251f7ed56e756db", + "summary": "Vulnerabilities with Windows directory aliases", "affected_packages": [ { "package": { @@ -86,10 +62,10 @@ "subpath": "", "version": "", "namespace": "", - "qualifiers": "" + "qualifiers": "os=windows" }, - "fixed_version": "1.5.7", - "affected_version_range": "vers:nginx/>=0.8.41|<=1.5.6" + "fixed_version": "1.3.1", + "affected_version_range": "vers:nginx/>=0.7.52|<=1.3.0" }, { "package": { @@ -98,15 +74,15 @@ "subpath": "", "version": "", "namespace": "", - "qualifiers": "" + "qualifiers": "os=windows" }, - "fixed_version": "1.4.4", - "affected_version_range": "vers:nginx/>=0.8.41|<=1.5.6" + "fixed_version": "1.2.1", + "affected_version_range": "vers:nginx/>=0.7.52|<=1.3.0" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2013/000125.html", + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2012/000086.html", "severities": [ { "value": "medium", @@ -118,21 +94,9 @@ "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2013-4547", - "severities": [], - "reference_id": "CVE-2013-4547", - "reference_type": "" - }, - { - "url": "https://nginx.org/download/patch.2013.space.txt", - "severities": [], - "reference_id": "", - "reference_type": "" - }, - { - "url": "https://nginx.org/download/patch.2013.space.txt.asc", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2011-4963", "severities": [], - "reference_id": "", + "reference_id": "CVE-2011-4963", "reference_type": "" } ], @@ -140,8 +104,8 @@ "weaknesses": [] }, { - "unique_content_id": "13592aaee15657bff9afca8c98edf8bf", - "summary": "Memory disclosure with specially crafted HTTP backend responses", + "unique_content_id": "04ec1beb69b3712ef90b5975ff13d5d9ece8dc4c31e2fbd033e1e7be98f889ed", + "summary": "SPDY heap buffer overflow", "affected_packages": [ { "package": { @@ -152,20 +116,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.5.0", - "affected_version_range": "vers:nginx/>=1.1.4|<=1.2.8|>=1.3.9|<=1.4.0" - }, - { - "package": { - "name": "nginx", - "type": "nginx", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.4.1", - "affected_version_range": "vers:nginx/>=1.1.4|<=1.2.8|>=1.3.9|<=1.4.0" + "fixed_version": "1.5.12", + "affected_version_range": "vers:nginx/>=1.3.15|<=1.5.11" }, { "package": { @@ -176,51 +128,61 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.2.9", - "affected_version_range": "vers:nginx/>=1.1.4|<=1.2.8|>=1.3.9|<=1.4.0" + "fixed_version": "1.4.7", + "affected_version_range": "vers:nginx/>=1.3.15|<=1.5.11" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2013/000114.html", - "severities": [ - { - "value": "medium", - "system": "generic_textual", - "scoring_elements": "" - } - ], + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2014/000135.html", + "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2013-2070", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-0133", "severities": [], - "reference_id": "CVE-2013-2070", + "reference_id": "CVE-2014-0133", "reference_type": "" }, { - "url": "https://nginx.org/download/patch.2013.chunked.txt", + "url": "https://nginx.org/download/patch.2014.spdy2.txt", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://nginx.org/download/patch.2013.chunked.txt.asc", + "url": "https://nginx.org/download/patch.2014.spdy2.txt.asc", "severities": [], "reference_id": "", "reference_type": "" - }, + } + ], + "date_published": null, + "weaknesses": [] + }, + { + "unique_content_id": "1000911200f3a7046464251c86a45451e6d049b88cb3e5edc6d009a1867418f7", + "summary": "An error log data are not sanitized", + "affected_packages": [ { - "url": "https://nginx.org/download/patch.2013.proxy.txt", - "severities": [], - "reference_id": "", - "reference_type": "" - }, + "package": { + "name": "nginx", + "type": "nginx", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": null, + "affected_version_range": "vers:nginx/*" + } + ], + "references": [ { - "url": "https://nginx.org/download/patch.2013.proxy.txt.asc", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-4487", "severities": [], - "reference_id": "", + "reference_id": "CVE-2009-4487", "reference_type": "" } ], @@ -228,8 +190,8 @@ "weaknesses": [] }, { - "unique_content_id": "2537fa6a9e8e84a3c06bb122fcbf468d", - "summary": "Excessive memory usage in HTTP/2 with zero length headers", + "unique_content_id": "37a3e3a4d916420d151462c0e761db15f3dfb81ead3e3fa18e84ef4a93151d4c", + "summary": "Excessive CPU usage in HTTP/2", "affected_packages": [ { "package": { @@ -240,8 +202,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.17.3", - "affected_version_range": "vers:nginx/>=1.9.5|<=1.17.2" + "fixed_version": "1.15.6", + "affected_version_range": "vers:nginx/>=1.9.5|<=1.15.5" }, { "package": { @@ -252,13 +214,13 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.16.1", - "affected_version_range": "vers:nginx/>=1.9.5|<=1.17.2" + "fixed_version": "1.14.1", + "affected_version_range": "vers:nginx/>=1.9.5|<=1.15.5" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2019/000249.html", + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2018/000220.html", "severities": [ { "value": "low", @@ -270,9 +232,9 @@ "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2019-9516", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2018-16844", "severities": [], - "reference_id": "CVE-2019-9516", + "reference_id": "CVE-2018-16844", "reference_type": "" } ], @@ -280,8 +242,8 @@ "weaknesses": [] }, { - "unique_content_id": "27612bc7cab82114b1549552f5ad48ff", - "summary": "1-byte memory overwrite in resolver", + "unique_content_id": "3db919e67e7061f392f575e7ac88884850c686c133ebdd4f58dfddb6196e15bf", + "summary": "NULL pointer dereference while writing client request body", "affected_packages": [ { "package": { @@ -292,8 +254,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.21.0", - "affected_version_range": "vers:nginx/>=0.6.18|<=1.20.0" + "fixed_version": "1.11.1", + "affected_version_range": "vers:nginx/>=1.3.9|<=1.11.0" }, { "package": { @@ -304,13 +266,13 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.20.1", - "affected_version_range": "vers:nginx/>=0.6.18|<=1.20.0" + "fixed_version": "1.10.1", + "affected_version_range": "vers:nginx/>=1.3.9|<=1.11.0" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2021/000300.html", + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2016/000179.html", "severities": [ { "value": "medium", @@ -322,19 +284,31 @@ "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2021-23017", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-4450", "severities": [], - "reference_id": "CVE-2021-23017", + "reference_id": "CVE-2016-4450", "reference_type": "" }, { - "url": "https://nginx.org/download/patch.2021.resolver.txt", + "url": "https://nginx.org/download/patch.2016.write.txt", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://nginx.org/download/patch.2021.resolver.txt.asc", + "url": "https://nginx.org/download/patch.2016.write.txt.asc", + "severities": [], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://nginx.org/download/patch.2016.write2.txt", + "severities": [], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://nginx.org/download/patch.2016.write2.txt.asc", "severities": [], "reference_id": "", "reference_type": "" @@ -344,8 +318,8 @@ "weaknesses": [] }, { - "unique_content_id": "2ec9de991e2cb7a5a0ba79bed8556a41", - "summary": "Use-after-free during CNAME response processing in resolver", + "unique_content_id": "3f9a96e88c2c8cb3ad5852621091d686b420e0fa25921a9f10f330e02e7f47d6", + "summary": "Insufficient limits of CNAME resolution in resolver", "affected_packages": [ { "package": { @@ -386,9 +360,9 @@ "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-0746", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-0747", "severities": [], - "reference_id": "CVE-2016-0746", + "reference_id": "CVE-2016-0747", "reference_type": "" } ], @@ -396,8 +370,8 @@ "weaknesses": [] }, { - "unique_content_id": "31675b37fe392d1e36b77f7198b1d008", - "summary": "An error log data are not sanitized", + "unique_content_id": "4590b8b17cfdf0314dffd75372ba416fd8ced35cdeb673aabe9d2ed5b19dab3d", + "summary": "Memory disclosure with specially crafted HTTP backend responses", "affected_packages": [ { "package": { @@ -408,25 +382,9 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": null, - "affected_version_range": "vers:nginx/*" - } - ], - "references": [ - { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-4487", - "severities": [], - "reference_id": "CVE-2009-4487", - "reference_type": "" - } - ], - "date_published": null, - "weaknesses": [] - }, - { - "unique_content_id": "33d08a513ea5fef861e924f2601f7ac6", - "summary": "Memory disclosure in the ngx_http_mp4_module", - "affected_packages": [ + "fixed_version": "1.5.0", + "affected_version_range": "vers:nginx/>=1.1.4|<=1.2.8|>=1.3.9|<=1.4.0" + }, { "package": { "name": "nginx", @@ -436,8 +394,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.15.6", - "affected_version_range": "vers:nginx/>=1.0.7|<=1.0.15|>=1.1.3|<=1.15.5" + "fixed_version": "1.4.1", + "affected_version_range": "vers:nginx/>=1.1.4|<=1.2.8|>=1.3.9|<=1.4.0" }, { "package": { @@ -448,13 +406,13 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.14.1", - "affected_version_range": "vers:nginx/>=1.0.7|<=1.0.15|>=1.1.3|<=1.15.5" + "fixed_version": "1.2.9", + "affected_version_range": "vers:nginx/>=1.1.4|<=1.2.8|>=1.3.9|<=1.4.0" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2018/000221.html", + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2013/000114.html", "severities": [ { "value": "medium", @@ -466,19 +424,31 @@ "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2018-16845", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2013-2070", "severities": [], - "reference_id": "CVE-2018-16845", + "reference_id": "CVE-2013-2070", "reference_type": "" }, { - "url": "https://nginx.org/download/patch.2018.mp4.txt", + "url": "https://nginx.org/download/patch.2013.chunked.txt", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://nginx.org/download/patch.2018.mp4.txt.asc", + "url": "https://nginx.org/download/patch.2013.chunked.txt.asc", + "severities": [], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://nginx.org/download/patch.2013.proxy.txt", + "severities": [], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://nginx.org/download/patch.2013.proxy.txt.asc", "severities": [], "reference_id": "", "reference_type": "" @@ -488,8 +458,8 @@ "weaknesses": [] }, { - "unique_content_id": "3430956de63de2b1188c3d1e50c3b0cd", - "summary": "SPDY memory corruption", + "unique_content_id": "516f2188bdac91f9372ec3e200c4e754179f61fb8bf3a4613d97ebb569e46831", + "summary": "Memory corruption in the ngx_http_mp4_module", "affected_packages": [ { "package": { @@ -500,31 +470,49 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.5.11", - "affected_version_range": "vers:nginx/1.5.10" - } - ], + "fixed_version": "1.23.2", + "affected_version_range": "vers:nginx/>=1.0.7|<=1.0.15|>=1.1.3|<=1.23.1" + }, + { + "package": { + "name": "nginx", + "type": "nginx", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.22.1", + "affected_version_range": "vers:nginx/>=1.0.7|<=1.0.15|>=1.1.3|<=1.23.1" + } + ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2014/000132.html", - "severities": [], + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2022/RBRRON6PYBJJM2XIAPQBFBVLR4Q6IHRA.html", + "severities": [ + { + "value": "medium", + "system": "generic_textual", + "scoring_elements": "" + } + ], "reference_id": "", "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-0088", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2022-41741", "severities": [], - "reference_id": "CVE-2014-0088", + "reference_id": "CVE-2022-41741", "reference_type": "" }, { - "url": "https://nginx.org/download/patch.2014.spdy.txt", + "url": "https://nginx.org/download/patch.2022.mp4.txt", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://nginx.org/download/patch.2014.spdy.txt.asc", + "url": "https://nginx.org/download/patch.2022.mp4.txt.asc", "severities": [], "reference_id": "", "reference_type": "" @@ -534,8 +522,8 @@ "weaknesses": [] }, { - "unique_content_id": "43c2f41bb851164d3495f3c204a57f20", - "summary": "Memory disclosure in HTTP/3", + "unique_content_id": "60c648561ee11d1ece306182ff608e5d66aeb748c91c4c91d79aa4f7967f2149", + "summary": "Integer overflow in the range filter", "affected_packages": [ { "package": { @@ -546,8 +534,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.27.0", - "affected_version_range": "vers:nginx/>=1.25.0|<=1.25.5|1.26.0" + "fixed_version": "1.13.3", + "affected_version_range": "vers:nginx/>=0.5.6|<=1.13.2" }, { "package": { @@ -558,13 +546,13 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.26.1", - "affected_version_range": "vers:nginx/>=1.25.0|<=1.25.5|1.26.0" + "fixed_version": "1.12.1", + "affected_version_range": "vers:nginx/>=0.5.6|<=1.13.2" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2024/GMY32CSHFH6VFTN76HJNX7WNEX4RLHF6.html", + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2017/000200.html", "severities": [ { "value": "medium", @@ -576,59 +564,19 @@ "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2024-34161", - "severities": [], - "reference_id": "CVE-2024-34161", - "reference_type": "" - } - ], - "date_published": null, - "weaknesses": [] - }, - { - "unique_content_id": "686399b9012be40d39b5366ec1695768", - "summary": "The renegotiation vulnerability in SSL protocol", - "affected_packages": [ - { - "package": { - "name": "nginx", - "type": "nginx", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "0.8.23", - "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.22" - }, - { - "package": { - "name": "nginx", - "type": "nginx", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "0.7.64", - "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.22" - } - ], - "references": [ - { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-3555", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2017-7529", "severities": [], - "reference_id": "CVE-2009-3555", + "reference_id": "CVE-2017-7529", "reference_type": "" }, { - "url": "https://nginx.org/download/patch.cve-2009-3555.txt", + "url": "https://nginx.org/download/patch.2017.ranges.txt", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://nginx.org/download/patch.cve-2009-3555.txt.asc", + "url": "https://nginx.org/download/patch.2017.ranges.txt.asc", "severities": [], "reference_id": "", "reference_type": "" @@ -638,8 +586,8 @@ "weaknesses": [] }, { - "unique_content_id": "71c918b8f82b4de8cfa23fc96fa0d7a7", - "summary": "Invalid pointer dereference in resolver", + "unique_content_id": "68957cdbe4f38386944b07c2f3138ad59f02df490dab487d8709f8642a395496", + "summary": "SSL session reuse vulnerability", "affected_packages": [ { "package": { @@ -650,8 +598,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.9.10", - "affected_version_range": "vers:nginx/>=0.6.18|<=1.9.9" + "fixed_version": "1.7.5", + "affected_version_range": "vers:nginx/>=0.5.6|<=1.7.4" }, { "package": { @@ -662,13 +610,13 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.8.1", - "affected_version_range": "vers:nginx/>=0.6.18|<=1.9.9" + "fixed_version": "1.6.2", + "affected_version_range": "vers:nginx/>=0.5.6|<=1.7.4" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2016/000169.html", + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2014/000147.html", "severities": [ { "value": "medium", @@ -680,9 +628,9 @@ "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-0742", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3616", "severities": [], - "reference_id": "CVE-2016-0742", + "reference_id": "CVE-2014-3616", "reference_type": "" } ], @@ -690,8 +638,8 @@ "weaknesses": [] }, { - "unique_content_id": "74ec3c647d544d6e6935492b7dceb572", - "summary": "Excessive CPU usage in HTTP/2 with priority changes", + "unique_content_id": "6dfd4b51bcdf1ee31bfdd97ee6370422b70533c1db972de69cdc2e281a4bb90a", + "summary": "Stack-based buffer overflow with specially crafted request", "affected_packages": [ { "package": { @@ -702,8 +650,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.17.3", - "affected_version_range": "vers:nginx/>=1.9.5|<=1.17.2" + "fixed_version": "1.5.0", + "affected_version_range": "vers:nginx/>=1.3.9|<=1.4.0" }, { "package": { @@ -714,27 +662,33 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.16.1", - "affected_version_range": "vers:nginx/>=1.9.5|<=1.17.2" + "fixed_version": "1.4.1", + "affected_version_range": "vers:nginx/>=1.3.9|<=1.4.0" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2019/000249.html", - "severities": [ - { - "value": "low", - "system": "generic_textual", - "scoring_elements": "" - } - ], + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2013/000112.html", + "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2019-9513", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2013-2028", "severities": [], - "reference_id": "CVE-2019-9513", + "reference_id": "CVE-2013-2028", + "reference_type": "" + }, + { + "url": "https://nginx.org/download/patch.2013.chunked.txt", + "severities": [], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://nginx.org/download/patch.2013.chunked.txt.asc", + "severities": [], + "reference_id": "", "reference_type": "" } ], @@ -742,8 +696,8 @@ "weaknesses": [] }, { - "unique_content_id": "79d9b38e6e89e3f3fc5ca4b2e64d0faa", - "summary": "Stack overflow and use-after-free in HTTP/3", + "unique_content_id": "702a79bf8a92e5ce967d5d540f03d225e05906df0cb641c5538e0e8b8045aa89", + "summary": "NULL pointer dereference in HTTP/3", "affected_packages": [ { "package": { @@ -754,9 +708,31 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.27.0", - "affected_version_range": "vers:nginx/>=1.25.0|<=1.25.5|1.26.0" + "fixed_version": "1.25.4", + "affected_version_range": "vers:nginx/1.25.3" + } + ], + "references": [ + { + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2024/NW6MNW34VZ6HDIHH5YFBIJYZJN7FGNAV.html", + "severities": [], + "reference_id": "", + "reference_type": "" }, + { + "url": "https://nvd.nist.gov/vuln/detail/CVE-2024-24989", + "severities": [], + "reference_id": "CVE-2024-24989", + "reference_type": "" + } + ], + "date_published": null, + "weaknesses": [] + }, + { + "unique_content_id": "71ee7b435e15272f8531b568d58f82e33cfb3881f3ee80b5cae1788183f91827", + "summary": "Use-after-free in HTTP/3", + "affected_packages": [ { "package": { "name": "nginx", @@ -766,27 +742,21 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.26.1", - "affected_version_range": "vers:nginx/>=1.25.0|<=1.25.5|1.26.0" + "fixed_version": "1.25.4", + "affected_version_range": "vers:nginx/>=1.25.0|<=1.25.3" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2024/GMY32CSHFH6VFTN76HJNX7WNEX4RLHF6.html", - "severities": [ - { - "value": "medium", - "system": "generic_textual", - "scoring_elements": "" - } - ], + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2024/NW6MNW34VZ6HDIHH5YFBIJYZJN7FGNAV.html", + "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2024-31079", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2024-24990", "severities": [], - "reference_id": "CVE-2024-31079", + "reference_id": "CVE-2024-24990", "reference_type": "" } ], @@ -794,8 +764,8 @@ "weaknesses": [] }, { - "unique_content_id": "83d5fba07f12acd2e4947e68d233fbe5", - "summary": "STARTTLS command injection", + "unique_content_id": "743193c823a19a8eea1eeb8bb5ea6c3314ca6350b8d6ba0bcf2ac29d2e99ab11", + "summary": "Memory disclosure in the ngx_http_mp4_module", "affected_packages": [ { "package": { @@ -806,8 +776,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.7.4", - "affected_version_range": "vers:nginx/>=1.5.6|<=1.7.3" + "fixed_version": "1.23.2", + "affected_version_range": "vers:nginx/>=1.0.7|<=1.0.15|>=1.1.3|<=1.23.1" }, { "package": { @@ -818,13 +788,13 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.6.1", - "affected_version_range": "vers:nginx/>=1.5.6|<=1.7.3" + "fixed_version": "1.22.1", + "affected_version_range": "vers:nginx/>=1.0.7|<=1.0.15|>=1.1.3|<=1.23.1" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2014/000144.html", + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2022/RBRRON6PYBJJM2XIAPQBFBVLR4Q6IHRA.html", "severities": [ { "value": "medium", @@ -836,19 +806,19 @@ "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3556", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2022-41742", "severities": [], - "reference_id": "CVE-2014-3556", + "reference_id": "CVE-2022-41742", "reference_type": "" }, { - "url": "https://nginx.org/download/patch.2014.starttls.txt", + "url": "https://nginx.org/download/patch.2022.mp4.txt", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://nginx.org/download/patch.2014.starttls.txt.asc", + "url": "https://nginx.org/download/patch.2022.mp4.txt.asc", "severities": [], "reference_id": "", "reference_type": "" @@ -858,8 +828,8 @@ "weaknesses": [] }, { - "unique_content_id": "8ca47577347bd9f2027e09e32bc74866", - "summary": "Excessive CPU usage in HTTP/2 with small window updates", + "unique_content_id": "74d2403b1a2d875ba8411a315d217fd704642a39c3e9392bd2b81cd4e4cca8a8", + "summary": "Use-after-free during CNAME response processing in resolver", "affected_packages": [ { "package": { @@ -870,8 +840,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.17.3", - "affected_version_range": "vers:nginx/>=1.9.5|<=1.17.2" + "fixed_version": "1.9.10", + "affected_version_range": "vers:nginx/>=0.6.18|<=1.9.9" }, { "package": { @@ -882,13 +852,13 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.16.1", - "affected_version_range": "vers:nginx/>=1.9.5|<=1.17.2" + "fixed_version": "1.8.1", + "affected_version_range": "vers:nginx/>=0.6.18|<=1.9.9" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2019/000249.html", + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2016/000169.html", "severities": [ { "value": "medium", @@ -900,9 +870,9 @@ "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2019-9511", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-0746", "severities": [], - "reference_id": "CVE-2019-9511", + "reference_id": "CVE-2016-0746", "reference_type": "" } ], @@ -910,8 +880,8 @@ "weaknesses": [] }, { - "unique_content_id": "901e1dc04473ff40c6e503baec5e9bf6", - "summary": "Buffer overflow in the ngx_http_mp4_module", + "unique_content_id": "79d90dc8b83d6267a92f31d11be14dc27e619f6edaa996935bf4d0d33b70e575", + "summary": "Buffer overflow in resolver", "affected_packages": [ { "package": { @@ -922,8 +892,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.1.19", - "affected_version_range": "vers:nginx/>=1.0.7|<=1.0.14|>=1.1.3|<=1.1.18" + "fixed_version": "1.1.8", + "affected_version_range": "vers:nginx/>=0.6.18|<=1.1.7" }, { "package": { @@ -934,83 +904,25 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.15", - "affected_version_range": "vers:nginx/>=1.0.7|<=1.0.14|>=1.1.3|<=1.1.18" + "fixed_version": "1.0.10", + "affected_version_range": "vers:nginx/>=0.6.18|<=1.1.7" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2012/000080.html", - "severities": [], - "reference_id": "", - "reference_type": "" - }, - { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2012-2089", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2011-4315", "severities": [], - "reference_id": "CVE-2012-2089", + "reference_id": "CVE-2011-4315", "reference_type": "" - }, - { - "url": "https://nginx.org/download/patch.2012.mp4.txt", - "severities": [], - "reference_id": "", - "reference_type": "" - }, - { - "url": "https://nginx.org/download/patch.2012.mp4.txt.asc", - "severities": [], - "reference_id": "", - "reference_type": "" - } - ], - "date_published": null, - "weaknesses": [] - }, - { - "unique_content_id": "91c6638b38a1e6e2ff4997eeefef8cf8", - "summary": "Directory traversal vulnerability", - "affected_packages": [ - { - "package": { - "name": "nginx", - "type": "nginx", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "0.8.17", - "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.16" - }, - { - "package": { - "name": "nginx", - "type": "nginx", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "0.7.63", - "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.16" - } - ], - "references": [ - { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-3898", - "severities": [], - "reference_id": "CVE-2009-3898", - "reference_type": "" - } - ], - "date_published": null, - "weaknesses": [] - }, - { - "unique_content_id": "925abc90d30273fe8cb404b7f3c8dfd3", - "summary": "Insufficient limits of CNAME resolution in resolver", - "affected_packages": [ + } + ], + "date_published": null, + "weaknesses": [] + }, + { + "unique_content_id": "7dd1dec4f019ce4e044852324feb9444dbc965f26c98025bc28f50294251c5c0", + "summary": "Excessive CPU usage in HTTP/2 with small window updates", + "affected_packages": [ { "package": { "name": "nginx", @@ -1020,8 +932,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.9.10", - "affected_version_range": "vers:nginx/>=0.6.18|<=1.9.9" + "fixed_version": "1.17.3", + "affected_version_range": "vers:nginx/>=1.9.5|<=1.17.2" }, { "package": { @@ -1032,13 +944,13 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.8.1", - "affected_version_range": "vers:nginx/>=0.6.18|<=1.9.9" + "fixed_version": "1.16.1", + "affected_version_range": "vers:nginx/>=1.9.5|<=1.17.2" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2016/000169.html", + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2019/000249.html", "severities": [ { "value": "medium", @@ -1050,9 +962,9 @@ "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-0747", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2019-9511", "severities": [], - "reference_id": "CVE-2016-0747", + "reference_id": "CVE-2019-9511", "reference_type": "" } ], @@ -1060,8 +972,8 @@ "weaknesses": [] }, { - "unique_content_id": "96c2ffdeacca4901942abd83d54f33f5", - "summary": "Vulnerabilities with Windows directory aliases", + "unique_content_id": "8f54462a45ac49635f660b6fb755d5e05cdbc34ebaa565e38ca20c522579ce7f", + "summary": "Vulnerabilities with Windows 8.3 filename pseudonyms", "affected_packages": [ { "package": { @@ -1072,8 +984,8 @@ "namespace": "", "qualifiers": "os=windows" }, - "fixed_version": "1.3.1", - "affected_version_range": "vers:nginx/>=0.7.52|<=1.3.0" + "fixed_version": "0.8.33", + "affected_version_range": "vers:nginx/>=0.7.52|<=0.8.32" }, { "package": { @@ -1084,36 +996,17 @@ "namespace": "", "qualifiers": "os=windows" }, - "fixed_version": "1.2.1", - "affected_version_range": "vers:nginx/>=0.7.52|<=1.3.0" - } - ], - "references": [ - { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2012/000086.html", - "severities": [ - { - "value": "medium", - "system": "generic_textual", - "scoring_elements": "" - } - ], - "reference_id": "", - "reference_type": "" - }, - { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2011-4963", - "severities": [], - "reference_id": "CVE-2011-4963", - "reference_type": "" + "fixed_version": "0.7.65", + "affected_version_range": "vers:nginx/>=0.7.52|<=0.8.32" } ], + "references": [], "date_published": null, "weaknesses": [] }, { - "unique_content_id": "b3192a372fdac00b2cdf462b562cf73b", - "summary": "Integer overflow in the range filter", + "unique_content_id": "92ce767b8cea36271d33c119cb6f706f64f5aba7335cca6791eca90a87f48de1", + "summary": "Vulnerabilities with Windows file default stream", "affected_packages": [ { "package": { @@ -1122,10 +1015,10 @@ "subpath": "", "version": "", "namespace": "", - "qualifiers": "" + "qualifiers": "os=windows" }, - "fixed_version": "1.13.3", - "affected_version_range": "vers:nginx/>=0.5.6|<=1.13.2" + "fixed_version": "0.8.40", + "affected_version_range": "vers:nginx/>=0.7.52|<=0.8.39" }, { "package": { @@ -1134,41 +1027,17 @@ "subpath": "", "version": "", "namespace": "", - "qualifiers": "" + "qualifiers": "os=windows" }, - "fixed_version": "1.12.1", - "affected_version_range": "vers:nginx/>=0.5.6|<=1.13.2" + "fixed_version": "0.7.66", + "affected_version_range": "vers:nginx/>=0.7.52|<=0.8.39" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2017/000200.html", - "severities": [ - { - "value": "medium", - "system": "generic_textual", - "scoring_elements": "" - } - ], - "reference_id": "", - "reference_type": "" - }, - { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2017-7529", - "severities": [], - "reference_id": "CVE-2017-7529", - "reference_type": "" - }, - { - "url": "https://nginx.org/download/patch.2017.ranges.txt", - "severities": [], - "reference_id": "", - "reference_type": "" - }, - { - "url": "https://nginx.org/download/patch.2017.ranges.txt.asc", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2010-2263", "severities": [], - "reference_id": "", + "reference_id": "CVE-2010-2263", "reference_type": "" } ], @@ -1176,8 +1045,8 @@ "weaknesses": [] }, { - "unique_content_id": "b3d7627b206f561242cdd2eae0e3bbeb", - "summary": "Buffer overwrite in HTTP/3", + "unique_content_id": "93ffd507f57f7b01de0bc7cff479daba1c120e28d45b60a14f8fa98bdf597f4a", + "summary": "NULL pointer dereference in HTTP/3", "affected_packages": [ { "package": { @@ -1218,9 +1087,9 @@ "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2024-32760", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2024-35200", "severities": [], - "reference_id": "CVE-2024-32760", + "reference_id": "CVE-2024-35200", "reference_type": "" } ], @@ -1228,8 +1097,8 @@ "weaknesses": [] }, { - "unique_content_id": "b72c609cd1be7c77f4432e1bc8c365f3", - "summary": "NULL pointer dereference in HTTP/3", + "unique_content_id": "95dab77a3ea69d6d0bac6b48719f4e1d5435af7f1f1a0c1d62aa343bed5e3f32", + "summary": "Buffer overwrite in HTTP/3", "affected_packages": [ { "package": { @@ -1270,9 +1139,9 @@ "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2024-35200", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2024-32760", "severities": [], - "reference_id": "CVE-2024-35200", + "reference_id": "CVE-2024-32760", "reference_type": "" } ], @@ -1280,8 +1149,8 @@ "weaknesses": [] }, { - "unique_content_id": "c616b60f7fd802e88ca29fce6222654e", - "summary": "Buffer underflow vulnerability", + "unique_content_id": "9a3699853c72ab1e08f226c4f09f669b6e8b6f0431fa4e78549cd87d8466e0f7", + "summary": "Vulnerabilities with invalid UTF-8 sequence on Windows", "affected_packages": [ { "package": { @@ -1290,34 +1159,10 @@ "subpath": "", "version": "", "namespace": "", - "qualifiers": "" - }, - "fixed_version": "0.8.15", - "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.14" - }, - { - "package": { - "name": "nginx", - "type": "nginx", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "0.7.62", - "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.14" - }, - { - "package": { - "name": "nginx", - "type": "nginx", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" + "qualifiers": "os=windows" }, - "fixed_version": "0.6.39", - "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.14" + "fixed_version": "0.8.41", + "affected_version_range": "vers:nginx/>=0.7.52|<=0.8.40" }, { "package": { @@ -1326,29 +1171,17 @@ "subpath": "", "version": "", "namespace": "", - "qualifiers": "" + "qualifiers": "os=windows" }, - "fixed_version": "0.5.38", - "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.14" + "fixed_version": "0.7.67", + "affected_version_range": "vers:nginx/>=0.7.52|<=0.8.40" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-2629", - "severities": [], - "reference_id": "CVE-2009-2629", - "reference_type": "" - }, - { - "url": "https://nginx.org/download/patch.180065.txt", - "severities": [], - "reference_id": "", - "reference_type": "" - }, - { - "url": "https://nginx.org/download/patch.180065.txt.asc", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2010-2266", "severities": [], - "reference_id": "", + "reference_id": "CVE-2010-2266", "reference_type": "" } ], @@ -1356,7 +1189,7 @@ "weaknesses": [] }, { - "unique_content_id": "ca72fb146fcd014ee284ef66f7fc1c08", + "unique_content_id": "9bb829ca8d94430d97ea8bb4d67cddb9f41140a7550e5dced08918f35f1dc5f1", "summary": "Memory disclosure with specially crafted backend responses", "affected_packages": [ { @@ -1414,8 +1247,8 @@ "weaknesses": [] }, { - "unique_content_id": "cb70875e6e02b2d41dd8876b4729bf84", - "summary": "Excessive memory usage in HTTP/2", + "unique_content_id": "9d373a60d30d98c6a84d134e0f1c1880b4e82b795a9175c51b172c9d988633c4", + "summary": "Buffer overflow in the ngx_http_mp4_module", "affected_packages": [ { "package": { @@ -1426,8 +1259,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.15.6", - "affected_version_range": "vers:nginx/>=1.9.5|<=1.15.5" + "fixed_version": "1.1.19", + "affected_version_range": "vers:nginx/>=1.0.7|<=1.0.14|>=1.1.3|<=1.1.18" }, { "package": { @@ -1438,27 +1271,33 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.14.1", - "affected_version_range": "vers:nginx/>=1.9.5|<=1.15.5" + "fixed_version": "1.0.15", + "affected_version_range": "vers:nginx/>=1.0.7|<=1.0.14|>=1.1.3|<=1.1.18" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2018/000220.html", - "severities": [ - { - "value": "low", - "system": "generic_textual", - "scoring_elements": "" - } - ], + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2012/000080.html", + "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2018-16843", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2012-2089", "severities": [], - "reference_id": "CVE-2018-16843", + "reference_id": "CVE-2012-2089", + "reference_type": "" + }, + { + "url": "https://nginx.org/download/patch.2012.mp4.txt", + "severities": [], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://nginx.org/download/patch.2012.mp4.txt.asc", + "severities": [], + "reference_id": "", "reference_type": "" } ], @@ -1466,8 +1305,8 @@ "weaknesses": [] }, { - "unique_content_id": "ce87032bced3f187b1c0fbacc52b8c16", - "summary": "SSL session reuse vulnerability", + "unique_content_id": "b011769b7166e6e3a5b0dabd560be9fec2b4963a0c14c8934b394504041dd801", + "summary": "Request line parsing vulnerability", "affected_packages": [ { "package": { @@ -1478,8 +1317,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.7.5", - "affected_version_range": "vers:nginx/>=0.5.6|<=1.7.4" + "fixed_version": "1.5.7", + "affected_version_range": "vers:nginx/>=0.8.41|<=1.5.6" }, { "package": { @@ -1490,13 +1329,13 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.6.2", - "affected_version_range": "vers:nginx/>=0.5.6|<=1.7.4" + "fixed_version": "1.4.4", + "affected_version_range": "vers:nginx/>=0.8.41|<=1.5.6" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2014/000147.html", + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2013/000125.html", "severities": [ { "value": "medium", @@ -1508,9 +1347,21 @@ "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3616", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2013-4547", "severities": [], - "reference_id": "CVE-2014-3616", + "reference_id": "CVE-2013-4547", + "reference_type": "" + }, + { + "url": "https://nginx.org/download/patch.2013.space.txt", + "severities": [], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://nginx.org/download/patch.2013.space.txt.asc", + "severities": [], + "reference_id": "", "reference_type": "" } ], @@ -1518,8 +1369,8 @@ "weaknesses": [] }, { - "unique_content_id": "cf47abf58659080601c4cd87a119a769", - "summary": "Excessive CPU usage in HTTP/2", + "unique_content_id": "b141e948fdfecc52a52fd4111fff37b57216a7f8fd1421df478db15e620a4571", + "summary": "1-byte memory overwrite in resolver", "affected_packages": [ { "package": { @@ -1530,8 +1381,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.15.6", - "affected_version_range": "vers:nginx/>=1.9.5|<=1.15.5" + "fixed_version": "1.21.0", + "affected_version_range": "vers:nginx/>=0.6.18|<=1.20.0" }, { "package": { @@ -1542,16 +1393,16 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.14.1", - "affected_version_range": "vers:nginx/>=1.9.5|<=1.15.5" + "fixed_version": "1.20.1", + "affected_version_range": "vers:nginx/>=0.6.18|<=1.20.0" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2018/000220.html", + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2021/000300.html", "severities": [ { - "value": "low", + "value": "medium", "system": "generic_textual", "scoring_elements": "" } @@ -1560,9 +1411,21 @@ "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2018-16844", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2021-23017", "severities": [], - "reference_id": "CVE-2018-16844", + "reference_id": "CVE-2021-23017", + "reference_type": "" + }, + { + "url": "https://nginx.org/download/patch.2021.resolver.txt", + "severities": [], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://nginx.org/download/patch.2021.resolver.txt.asc", + "severities": [], + "reference_id": "", "reference_type": "" } ], @@ -1570,8 +1433,8 @@ "weaknesses": [] }, { - "unique_content_id": "d403898b9315a9ec88d9a401af5352fb", - "summary": "Buffer overflow in resolver", + "unique_content_id": "b97accb1929bfc3181c61e41c2163f051cac435ea3671b05ebf708ac24c53f15", + "summary": "Memory disclosure in HTTP/3", "affected_packages": [ { "package": { @@ -1582,8 +1445,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.1.8", - "affected_version_range": "vers:nginx/>=0.6.18|<=1.1.7" + "fixed_version": "1.27.0", + "affected_version_range": "vers:nginx/>=1.25.0|<=1.25.5|1.26.0" }, { "package": { @@ -1594,15 +1457,27 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.10", - "affected_version_range": "vers:nginx/>=0.6.18|<=1.1.7" + "fixed_version": "1.26.1", + "affected_version_range": "vers:nginx/>=1.25.0|<=1.25.5|1.26.0" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2011-4315", + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2024/GMY32CSHFH6VFTN76HJNX7WNEX4RLHF6.html", + "severities": [ + { + "value": "medium", + "system": "generic_textual", + "scoring_elements": "" + } + ], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://nvd.nist.gov/vuln/detail/CVE-2024-34161", "severities": [], - "reference_id": "CVE-2011-4315", + "reference_id": "CVE-2024-34161", "reference_type": "" } ], @@ -1610,8 +1485,8 @@ "weaknesses": [] }, { - "unique_content_id": "dab2e1aa4777dbcd579905643982aab1", - "summary": "Null pointer dereference vulnerability", + "unique_content_id": "cc6ff6eaba227bf65c93964fdf2731b75ff1597638283ae950e3941cd4932632", + "summary": "Invalid pointer dereference in resolver", "affected_packages": [ { "package": { @@ -1622,8 +1497,60 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.8.14", - "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.13" + "fixed_version": "1.9.10", + "affected_version_range": "vers:nginx/>=0.6.18|<=1.9.9" + }, + { + "package": { + "name": "nginx", + "type": "nginx", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.8.1", + "affected_version_range": "vers:nginx/>=0.6.18|<=1.9.9" + } + ], + "references": [ + { + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2016/000169.html", + "severities": [ + { + "value": "medium", + "system": "generic_textual", + "scoring_elements": "" + } + ], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-0742", + "severities": [], + "reference_id": "CVE-2016-0742", + "reference_type": "" + } + ], + "date_published": null, + "weaknesses": [] + }, + { + "unique_content_id": "de7a819f87c93c708251b734406d2b9916fce494ab3987be40ca37426b0c2044", + "summary": "Buffer underflow vulnerability", + "affected_packages": [ + { + "package": { + "name": "nginx", + "type": "nginx", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "0.8.15", + "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.14" }, { "package": { @@ -1635,7 +1562,7 @@ "qualifiers": "" }, "fixed_version": "0.7.62", - "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.13" + "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.14" }, { "package": { @@ -1647,7 +1574,7 @@ "qualifiers": "" }, "fixed_version": "0.6.39", - "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.13" + "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.14" }, { "package": { @@ -1659,24 +1586,24 @@ "qualifiers": "" }, "fixed_version": "0.5.38", - "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.13" + "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.14" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-3896", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-2629", "severities": [], - "reference_id": "CVE-2009-3896", + "reference_id": "CVE-2009-2629", "reference_type": "" }, { - "url": "https://nginx.org/download/patch.null.pointer.txt", + "url": "https://nginx.org/download/patch.180065.txt", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://nginx.org/download/patch.null.pointer.txt.asc", + "url": "https://nginx.org/download/patch.180065.txt.asc", "severities": [], "reference_id": "", "reference_type": "" @@ -1686,8 +1613,8 @@ "weaknesses": [] }, { - "unique_content_id": "dad2ebc242641f6a276b00769ef57efa", - "summary": "Memory corruption in the ngx_http_mp4_module", + "unique_content_id": "e3af8c6275036d10bb0d3b20807288808bcb24ff1fad37f09757d381f90fc862", + "summary": "STARTTLS command injection", "affected_packages": [ { "package": { @@ -1698,8 +1625,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.23.2", - "affected_version_range": "vers:nginx/>=1.0.7|<=1.0.15|>=1.1.3|<=1.23.1" + "fixed_version": "1.7.4", + "affected_version_range": "vers:nginx/>=1.5.6|<=1.7.3" }, { "package": { @@ -1710,13 +1637,13 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.22.1", - "affected_version_range": "vers:nginx/>=1.0.7|<=1.0.15|>=1.1.3|<=1.23.1" + "fixed_version": "1.6.1", + "affected_version_range": "vers:nginx/>=1.5.6|<=1.7.3" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2022/RBRRON6PYBJJM2XIAPQBFBVLR4Q6IHRA.html", + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2014/000144.html", "severities": [ { "value": "medium", @@ -1728,19 +1655,19 @@ "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2022-41741", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3556", "severities": [], - "reference_id": "CVE-2022-41741", + "reference_id": "CVE-2014-3556", "reference_type": "" }, { - "url": "https://nginx.org/download/patch.2022.mp4.txt", + "url": "https://nginx.org/download/patch.2014.starttls.txt", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://nginx.org/download/patch.2022.mp4.txt.asc", + "url": "https://nginx.org/download/patch.2014.starttls.txt.asc", "severities": [], "reference_id": "", "reference_type": "" @@ -1750,8 +1677,8 @@ "weaknesses": [] }, { - "unique_content_id": "db01da77157a7a773285dc98169416ec", - "summary": "SPDY heap buffer overflow", + "unique_content_id": "e4731a12d4f385fc4d0774714c3e79dc98b8ec9c1c648120e0aa196a0d165066", + "summary": "Excessive memory usage in HTTP/2", "affected_packages": [ { "package": { @@ -1762,8 +1689,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.5.12", - "affected_version_range": "vers:nginx/>=1.3.15|<=1.5.11" + "fixed_version": "1.15.6", + "affected_version_range": "vers:nginx/>=1.9.5|<=1.15.5" }, { "package": { @@ -1774,33 +1701,27 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.4.7", - "affected_version_range": "vers:nginx/>=1.3.15|<=1.5.11" + "fixed_version": "1.14.1", + "affected_version_range": "vers:nginx/>=1.9.5|<=1.15.5" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2014/000135.html", - "severities": [], - "reference_id": "", - "reference_type": "" - }, - { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-0133", - "severities": [], - "reference_id": "CVE-2014-0133", - "reference_type": "" - }, - { - "url": "https://nginx.org/download/patch.2014.spdy2.txt", - "severities": [], + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2018/000220.html", + "severities": [ + { + "value": "low", + "system": "generic_textual", + "scoring_elements": "" + } + ], "reference_id": "", "reference_type": "" }, { - "url": "https://nginx.org/download/patch.2014.spdy2.txt.asc", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2018-16843", "severities": [], - "reference_id": "", + "reference_id": "CVE-2018-16843", "reference_type": "" } ], @@ -1808,8 +1729,8 @@ "weaknesses": [] }, { - "unique_content_id": "e06ef4fb12b1b0817736222cc219c5be", - "summary": "Vulnerabilities with Windows 8.3 filename pseudonyms", + "unique_content_id": "e9adfcf58bd2f302fd81436744937e8ea8bae7e1d7133d54cc4097bb94e68656", + "summary": "Directory traversal vulnerability", "affected_packages": [ { "package": { @@ -1818,10 +1739,10 @@ "subpath": "", "version": "", "namespace": "", - "qualifiers": "os=windows" + "qualifiers": "" }, - "fixed_version": "0.8.33", - "affected_version_range": "vers:nginx/>=0.7.52|<=0.8.32" + "fixed_version": "0.8.17", + "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.16" }, { "package": { @@ -1830,18 +1751,25 @@ "subpath": "", "version": "", "namespace": "", - "qualifiers": "os=windows" + "qualifiers": "" }, - "fixed_version": "0.7.65", - "affected_version_range": "vers:nginx/>=0.7.52|<=0.8.32" + "fixed_version": "0.7.63", + "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.16" + } + ], + "references": [ + { + "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-3898", + "severities": [], + "reference_id": "CVE-2009-3898", + "reference_type": "" } ], - "references": [], "date_published": null, "weaknesses": [] }, { - "unique_content_id": "e17dde538a78c978602298541bcd29f0", + "unique_content_id": "ef80f06b34224fbde70a6a359ccf297c0ec2bfae9148973d3689a1c2acb888ad", "summary": "Memory disclosure in the ngx_http_mp4_module", "affected_packages": [ { @@ -1853,8 +1781,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.23.2", - "affected_version_range": "vers:nginx/>=1.0.7|<=1.0.15|>=1.1.3|<=1.23.1" + "fixed_version": "1.15.6", + "affected_version_range": "vers:nginx/>=1.0.7|<=1.0.15|>=1.1.3|<=1.15.5" }, { "package": { @@ -1865,13 +1793,13 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.22.1", - "affected_version_range": "vers:nginx/>=1.0.7|<=1.0.15|>=1.1.3|<=1.23.1" + "fixed_version": "1.14.1", + "affected_version_range": "vers:nginx/>=1.0.7|<=1.0.15|>=1.1.3|<=1.15.5" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2022/RBRRON6PYBJJM2XIAPQBFBVLR4Q6IHRA.html", + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2018/000221.html", "severities": [ { "value": "medium", @@ -1883,19 +1811,19 @@ "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2022-41742", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2018-16845", "severities": [], - "reference_id": "CVE-2022-41742", + "reference_id": "CVE-2018-16845", "reference_type": "" }, { - "url": "https://nginx.org/download/patch.2022.mp4.txt", + "url": "https://nginx.org/download/patch.2018.mp4.txt", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://nginx.org/download/patch.2022.mp4.txt.asc", + "url": "https://nginx.org/download/patch.2018.mp4.txt.asc", "severities": [], "reference_id": "", "reference_type": "" @@ -1905,8 +1833,8 @@ "weaknesses": [] }, { - "unique_content_id": "e4c6a0358264fb7523f6ee40f844854f", - "summary": "NULL pointer dereference in HTTP/3", + "unique_content_id": "f52c1d6763864aa721f3c5d6fa201712a04cea0851085e8129014e56ba7b4bbe", + "summary": "Excessive CPU usage in HTTP/2 with priority changes", "affected_packages": [ { "package": { @@ -1917,21 +1845,39 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.25.4", - "affected_version_range": "vers:nginx/1.25.3" + "fixed_version": "1.17.3", + "affected_version_range": "vers:nginx/>=1.9.5|<=1.17.2" + }, + { + "package": { + "name": "nginx", + "type": "nginx", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.16.1", + "affected_version_range": "vers:nginx/>=1.9.5|<=1.17.2" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2024/NW6MNW34VZ6HDIHH5YFBIJYZJN7FGNAV.html", - "severities": [], + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2019/000249.html", + "severities": [ + { + "value": "low", + "system": "generic_textual", + "scoring_elements": "" + } + ], "reference_id": "", "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2024-24989", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2019-9513", "severities": [], - "reference_id": "CVE-2024-24989", + "reference_id": "CVE-2019-9513", "reference_type": "" } ], @@ -1939,8 +1885,8 @@ "weaknesses": [] }, { - "unique_content_id": "e74396e2dc204fb095c802fe54d4d176", - "summary": "Stack-based buffer overflow with specially crafted request", + "unique_content_id": "f9a0149f8d0c6afe588cc7c0a170e45c828219c342b9d7ca12d0e830c68b752a", + "summary": "SPDY memory corruption", "affected_packages": [ { "package": { @@ -1951,43 +1897,31 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.5.0", - "affected_version_range": "vers:nginx/>=1.3.9|<=1.4.0" - }, - { - "package": { - "name": "nginx", - "type": "nginx", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.4.1", - "affected_version_range": "vers:nginx/>=1.3.9|<=1.4.0" + "fixed_version": "1.5.11", + "affected_version_range": "vers:nginx/1.5.10" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2013/000112.html", + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2014/000132.html", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2013-2028", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-0088", "severities": [], - "reference_id": "CVE-2013-2028", + "reference_id": "CVE-2014-0088", "reference_type": "" }, { - "url": "https://nginx.org/download/patch.2013.chunked.txt", + "url": "https://nginx.org/download/patch.2014.spdy.txt", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://nginx.org/download/patch.2013.chunked.txt.asc", + "url": "https://nginx.org/download/patch.2014.spdy.txt.asc", "severities": [], "reference_id": "", "reference_type": "" @@ -1997,8 +1931,8 @@ "weaknesses": [] }, { - "unique_content_id": "eb41c9a738129f7f76c5ff813d190621", - "summary": "Vulnerabilities with invalid UTF-8 sequence on Windows", + "unique_content_id": "fc72f81267258996f729b98893890074ad6155adcc3352d30a04765977836995", + "summary": "The renegotiation vulnerability in SSL protocol", "affected_packages": [ { "package": { @@ -2007,10 +1941,10 @@ "subpath": "", "version": "", "namespace": "", - "qualifiers": "os=windows" + "qualifiers": "" }, - "fixed_version": "0.8.41", - "affected_version_range": "vers:nginx/>=0.7.52|<=0.8.40" + "fixed_version": "0.8.23", + "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.22" }, { "package": { @@ -2019,17 +1953,29 @@ "subpath": "", "version": "", "namespace": "", - "qualifiers": "os=windows" + "qualifiers": "" }, - "fixed_version": "0.7.67", - "affected_version_range": "vers:nginx/>=0.7.52|<=0.8.40" + "fixed_version": "0.7.64", + "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.22" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2010-2266", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-3555", "severities": [], - "reference_id": "CVE-2010-2266", + "reference_id": "CVE-2009-3555", + "reference_type": "" + }, + { + "url": "https://nginx.org/download/patch.cve-2009-3555.txt", + "severities": [], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://nginx.org/download/patch.cve-2009-3555.txt.asc", + "severities": [], + "reference_id": "", "reference_type": "" } ], @@ -2037,8 +1983,8 @@ "weaknesses": [] }, { - "unique_content_id": "ef00adb6af6c2a00e81c8ec8de71eed6", - "summary": "Vulnerabilities with Windows file default stream", + "unique_content_id": "fcb04608ea5442dbf70575273074915efc16a95be9d8c84d5f3146f6917b3fb1", + "summary": "Excessive memory usage in HTTP/2 with zero length headers", "affected_packages": [ { "package": { @@ -2047,10 +1993,10 @@ "subpath": "", "version": "", "namespace": "", - "qualifiers": "os=windows" + "qualifiers": "" }, - "fixed_version": "0.8.40", - "affected_version_range": "vers:nginx/>=0.7.52|<=0.8.39" + "fixed_version": "1.17.3", + "affected_version_range": "vers:nginx/>=1.9.5|<=1.17.2" }, { "package": { @@ -2059,17 +2005,29 @@ "subpath": "", "version": "", "namespace": "", - "qualifiers": "os=windows" + "qualifiers": "" }, - "fixed_version": "0.7.66", - "affected_version_range": "vers:nginx/>=0.7.52|<=0.8.39" + "fixed_version": "1.16.1", + "affected_version_range": "vers:nginx/>=1.9.5|<=1.17.2" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2010-2263", + "url": "https://mailman.nginx.org/pipermail/nginx-announce/2019/000249.html", + "severities": [ + { + "value": "low", + "system": "generic_textual", + "scoring_elements": "" + } + ], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://nvd.nist.gov/vuln/detail/CVE-2019-9516", "severities": [], - "reference_id": "CVE-2010-2263", + "reference_id": "CVE-2019-9516", "reference_type": "" } ], @@ -2077,8 +2035,8 @@ "weaknesses": [] }, { - "unique_content_id": "f87492771be35866bf4dce017ea54dc8", - "summary": "Use-after-free in HTTP/3", + "unique_content_id": "fcb0ba0ce66c1f1cf3b4213fd6e9108ab9965d633582d3e9c070a792e02d9876", + "summary": "Null pointer dereference vulnerability", "affected_packages": [ { "package": { @@ -2089,21 +2047,63 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.25.4", - "affected_version_range": "vers:nginx/>=1.25.0|<=1.25.3" + "fixed_version": "0.8.14", + "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.13" + }, + { + "package": { + "name": "nginx", + "type": "nginx", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "0.7.62", + "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.13" + }, + { + "package": { + "name": "nginx", + "type": "nginx", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "0.6.39", + "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.13" + }, + { + "package": { + "name": "nginx", + "type": "nginx", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "0.5.38", + "affected_version_range": "vers:nginx/>=0.1.0|<=0.8.13" } ], "references": [ { - "url": "https://mailman.nginx.org/pipermail/nginx-announce/2024/NW6MNW34VZ6HDIHH5YFBIJYZJN7FGNAV.html", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-3896", + "severities": [], + "reference_id": "CVE-2009-3896", + "reference_type": "" + }, + { + "url": "https://nginx.org/download/patch.null.pointer.txt", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2024-24990", + "url": "https://nginx.org/download/patch.null.pointer.txt.asc", "severities": [], - "reference_id": "CVE-2024-24990", + "reference_id": "", "reference_type": "" } ], diff --git a/vulnerabilities/tests/test_data/openssl/security_advisories-importer-expected.json b/vulnerabilities/tests/test_data/openssl/security_advisories-importer-expected.json index fa6223064..60722cd00 100644 --- a/vulnerabilities/tests/test_data/openssl/security_advisories-importer-expected.json +++ b/vulnerabilities/tests/test_data/openssl/security_advisories-importer-expected.json @@ -1,7 +1,7 @@ [ { - "unique_content_id": "167751346aa8fefc0a6e3b73ccb1f1a0", - "summary": "A buffer overflow when Kerberos is enabled allowed attackers to execute arbitrary code by sending a long master key. Note that this flaw did not affect any released version of 0.9.6 or 0.9.7", + "unique_content_id": "01616cd468b12076531c0a0453c8766381afac45b3bae651b2535336c25195c6", + "summary": "A flaw in the ASN1 library allowed remote attackers to cause a denial of service by sending invalid encodings.", "affected_packages": [ { "package": { @@ -12,15 +12,15 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.9.7", - "affected_version_range": "vers:openssl/0.9.7-beta3" + "fixed_version": "0.9.6e", + "affected_version_range": "vers:openssl/0.9.6a|0.9.6b|0.9.6c|0.9.6d" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2002-0657", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2002-0659", "severities": [], - "reference_id": "CVE-2002-0657", + "reference_id": "CVE-2002-0659", "reference_type": "" }, { @@ -34,8 +34,8 @@ "weaknesses": [] }, { - "unique_content_id": "35448b5f7b3fba9f72b91c02f114fb54", - "summary": "Inproper handling of ASCII representations of integers on 64 bit platforms allowed remote attackers to cause a denial of service or possibly execute arbitrary code.", + "unique_content_id": "9bdebb1f707c4c32b8834d1c6d0b55faa70072728c35bc0215df164af8448367", + "summary": "A buffer overflow when Kerberos is enabled allowed attackers to execute arbitrary code by sending a long master key. Note that this flaw did not affect any released version of 0.9.6 or 0.9.7", "affected_packages": [ { "package": { @@ -46,15 +46,15 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.9.6e", - "affected_version_range": "vers:openssl/0.9.6|0.9.6a|0.9.6b|0.9.6c|0.9.6d" + "fixed_version": "0.9.7", + "affected_version_range": "vers:openssl/0.9.7-beta3" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2002-0655", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2002-0657", "severities": [], - "reference_id": "CVE-2002-0655", + "reference_id": "CVE-2002-0657", "reference_type": "" }, { @@ -68,8 +68,8 @@ "weaknesses": [] }, { - "unique_content_id": "829a6d1f23353afa49ace62ba465a58f", - "summary": "A buffer overflow allowed remote attackers to execute arbitrary code by sending a large client master key in SSL2 or a large session ID in SSL3.", + "unique_content_id": "db3632c3ff2c87ef3524c93e91dc8cbeca0778583bcb08c9a8807cbb282d31cb", + "summary": "Inproper handling of ASCII representations of integers on 64 bit platforms allowed remote attackers to cause a denial of service or possibly execute arbitrary code.", "affected_packages": [ { "package": { @@ -86,9 +86,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2002-0656", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2002-0655", "severities": [], - "reference_id": "CVE-2002-0656", + "reference_id": "CVE-2002-0655", "reference_type": "" }, { @@ -102,8 +102,8 @@ "weaknesses": [] }, { - "unique_content_id": "cd2aa8fefe14c523b0f404ea639582db", - "summary": "A flaw in the ASN1 library allowed remote attackers to cause a denial of service by sending invalid encodings.", + "unique_content_id": "f4f8760e71f028224b6bdbe5b477b90217df8ca6905036317584b92781c2a119", + "summary": "A buffer overflow allowed remote attackers to execute arbitrary code by sending a large client master key in SSL2 or a large session ID in SSL3.", "affected_packages": [ { "package": { @@ -115,14 +115,14 @@ "qualifiers": "" }, "fixed_version": "0.9.6e", - "affected_version_range": "vers:openssl/0.9.6a|0.9.6b|0.9.6c|0.9.6d" + "affected_version_range": "vers:openssl/0.9.6|0.9.6a|0.9.6b|0.9.6c|0.9.6d" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2002-0659", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2002-0656", "severities": [], - "reference_id": "CVE-2002-0659", + "reference_id": "CVE-2002-0656", "reference_type": "" }, { @@ -136,7 +136,7 @@ "weaknesses": [] }, { - "unique_content_id": "8544420c83cf74faff35e8829adaa340", + "unique_content_id": "49964979bdbf578d45f122df679ba527fd8fbf64cc2d077728fb1c7f506f4c7f", "summary": "The use of assertions when detecting buffer overflow attacks allowed remote attackers to cause a denial of service (crash) by sending certain messages to cause OpenSSL to abort from a failed assertion, as demonstrated using SSLv2 CLIENT_MASTER_KEY messages, which were not properly handled in s2_srvr.c.", "affected_packages": [ { @@ -170,7 +170,7 @@ "weaknesses": [] }, { - "unique_content_id": "61d2edb3343321c505bed6e2c93025b1", + "unique_content_id": "9a471da876825cebb089f856300f156b2987e0ffe50686b1646bb2041e7e4c8b", "summary": "sl3_get_record in s3_pkt.c did not perform a MAC computation if an incorrect block cipher padding was used, causing an information leak (timing discrepancy) that may make it easier to launch cryptographic attacks that rely on distinguishing between padding and MAC verification errors, possibly leading to extraction of the original plaintext, aka the \"Vaudenay timing attack.\"", "affected_packages": [ { @@ -216,7 +216,7 @@ "weaknesses": [] }, { - "unique_content_id": "4fbc2d1aad1223b8ab887ce8d4d07175", + "unique_content_id": "29882534d53b1efc839bf130322ad85c220fa6326b24268aeed6af66f2855d02", "summary": "RSA blinding was not enabled by default, which could allow local and remote attackers to obtain a server's private key by determining factors using timing differences on (1) the number of extra reductions during Montgomery reduction, and (2) the use of different integer multiplication algorithms (\"Karatsuba\" and normal).", "affected_packages": [ { @@ -262,7 +262,7 @@ "weaknesses": [] }, { - "unique_content_id": "a0eeb293e46b8d3bbd5029ccaa8585bd", + "unique_content_id": "ea79326dc573c9da310a5d90e901d9c1c6844afbc7ba492ee6edcf3fc6ed9208", "summary": "The SSL and TLS components allowed remote attackers to perform an unauthorized RSA private key operation via a modified Bleichenbacher attack that uses a large number of SSL or TLS connections using PKCS #1 v1.5 padding that caused OpenSSL to leak information regarding the relationship between ciphertext and the associated plaintext, aka the \"Klima-Pokorny-Rosa attack\"", "affected_packages": [ { @@ -308,8 +308,8 @@ "weaknesses": [] }, { - "unique_content_id": "23009992dbac485c71608f4cf9811ef2", - "summary": "Certain ASN.1 encodings that were rejected as invalid by the parser could trigger a bug in the deallocation of the corresponding data structure, corrupting the stack, leading to a crash.", + "unique_content_id": "038ee7715473ae9e8184e755bbc864397d9e9c4bdc7b878782197d5f445085ac", + "summary": "Incorrect tracking of the number of characters in certain ASN.1 inputs could allow remote attackers to cause a denial of service (crash) by sending an SSL client certificate that causes OpenSSL to read past the end of a buffer when the long form is used.", "affected_packages": [ { "package": { @@ -322,13 +322,25 @@ }, "fixed_version": "0.9.7c", "affected_version_range": "vers:openssl/0.9.7|0.9.7a|0.9.7b" + }, + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "0.9.6k", + "affected_version_range": "vers:openssl/0.9.6|0.9.6a|0.9.6b|0.9.6c|0.9.6d|0.9.6e|0.9.6f|0.9.6g|0.9.6h|0.9.6i|0.9.6j" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2003-0545", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2003-0544", "severities": [], - "reference_id": "CVE-2003-0545", + "reference_id": "CVE-2003-0544", "reference_type": "" }, { @@ -342,7 +354,7 @@ "weaknesses": [] }, { - "unique_content_id": "525144b2cfc83c2afb4746cbb043f665", + "unique_content_id": "2ba1e73cd00bc41e969ea310ec78534f4c6d5124ca0b871dc4ce322a4b34e232", "summary": "An integer overflow could allow remote attackers to cause a denial of service (crash) via an SSL client certificate with certain ASN.1 tag values.", "affected_packages": [ { @@ -388,8 +400,8 @@ "weaknesses": [] }, { - "unique_content_id": "b20ae6e077855796c5fa2ea663a88269", - "summary": "Incorrect tracking of the number of characters in certain ASN.1 inputs could allow remote attackers to cause a denial of service (crash) by sending an SSL client certificate that causes OpenSSL to read past the end of a buffer when the long form is used.", + "unique_content_id": "e510e167dfcfce7357fe0616e7ae6ff525c3c2325ea6e0011c06d1300f1d7c10", + "summary": "Certain ASN.1 encodings that were rejected as invalid by the parser could trigger a bug in the deallocation of the corresponding data structure, corrupting the stack, leading to a crash.", "affected_packages": [ { "package": { @@ -402,25 +414,13 @@ }, "fixed_version": "0.9.7c", "affected_version_range": "vers:openssl/0.9.7|0.9.7a|0.9.7b" - }, - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "0.9.6k", - "affected_version_range": "vers:openssl/0.9.6|0.9.6a|0.9.6b|0.9.6c|0.9.6d|0.9.6e|0.9.6f|0.9.6g|0.9.6h|0.9.6i|0.9.6j" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2003-0544", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2003-0545", "severities": [], - "reference_id": "CVE-2003-0544", + "reference_id": "CVE-2003-0545", "reference_type": "" }, { @@ -434,7 +434,7 @@ "weaknesses": [] }, { - "unique_content_id": "47507506fbd9633ba7a6429dc0db28b5", + "unique_content_id": "fb504a9108cb16e440dc0db440f4bae47f2683838b518db42a371fc0453d6a88", "summary": "A flaw in OpenSSL 0.9.6k (only) would cause certain ASN.1 sequences to trigger a large recursion. On platforms such as Windows this large recursion cannot be handled correctly and so the bug causes OpenSSL to crash. A remote attacker could exploit this flaw if they can send arbitrary ASN.1 sequences which would cause OpenSSL to crash. This could be performed for example by sending a client certificate to a SSL/TLS enabled server which is configured to accept them.", "affected_packages": [ { @@ -468,8 +468,8 @@ "weaknesses": [] }, { - "unique_content_id": "2c802d89f18645aa477b635d3a5242ad", - "summary": "The Codenomicon TLS Test Tool uncovered a null-pointer assignment in the do_change_cipher_spec() function. A remote attacker could perform a carefully crafted SSL/TLS handshake against a server that used the OpenSSL library in such a way as to cause a crash.", + "unique_content_id": "7a9fed2602761c2ae8073bce2e5e1dfa60cb84b83c4fe6e05906bbbaf5e46c7a", + "summary": "The Codenomicon TLS Test Tool found that some unknown message types were handled incorrectly, allowing a remote attacker to cause a denial of service (infinite loop).", "affected_packages": [ { "package": { @@ -480,31 +480,19 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.9.6m", - "affected_version_range": "vers:openssl/0.9.6c|0.9.6d|0.9.6e|0.9.6f|0.9.6g|0.9.6h|0.9.6i|0.9.6j|0.9.6k|0.9.6l" - }, - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "0.9.7d", - "affected_version_range": "vers:openssl/0.9.7|0.9.7a|0.9.7b|0.9.7c" + "fixed_version": "0.9.6d", + "affected_version_range": "vers:openssl/0.9.6|0.9.6a|0.9.6b|0.9.6c" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2004-0079", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2004-0081", "severities": [], - "reference_id": "CVE-2004-0079", + "reference_id": "CVE-2004-0081", "reference_type": "" }, { - "url": "https://www.openssl.org/news/secadv/20040317.txt", + "url": "https://www.openssl.org/news/secadv/20030317.txt", "severities": [], "reference_id": "", "reference_type": "" @@ -514,8 +502,8 @@ "weaknesses": [] }, { - "unique_content_id": "6f23a0db775050dc33df47c7cc883b11", - "summary": "The Codenomicon TLS Test Tool found that some unknown message types were handled incorrectly, allowing a remote attacker to cause a denial of service (infinite loop).", + "unique_content_id": "9d9976f31462bb2e67fbf400706c1d2b0299c697e42bf4d3b8dd8e57a37d8e6a", + "summary": "A flaw in SSL/TLS handshaking code when using Kerberos ciphersuites. A remote attacker could perform a carefully crafted SSL/TLS handshake against a server configured to use Kerberos ciphersuites in such a way as to cause OpenSSL to crash. Most applications have no ability to use Kerberos ciphersuites and will therefore be unaffected.", "affected_packages": [ { "package": { @@ -526,19 +514,19 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.9.6d", - "affected_version_range": "vers:openssl/0.9.6|0.9.6a|0.9.6b|0.9.6c" + "fixed_version": "0.9.7d", + "affected_version_range": "vers:openssl/0.9.7a|0.9.7b|0.9.7c" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2004-0081", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2004-0112", "severities": [], - "reference_id": "CVE-2004-0081", + "reference_id": "CVE-2004-0112", "reference_type": "" }, { - "url": "https://www.openssl.org/news/secadv/20030317.txt", + "url": "https://www.openssl.org/news/secadv/20040317.txt", "severities": [], "reference_id": "", "reference_type": "" @@ -548,9 +536,21 @@ "weaknesses": [] }, { - "unique_content_id": "cb0e8758b89ae43b1ed34bfb3c0b3b56", - "summary": "A flaw in SSL/TLS handshaking code when using Kerberos ciphersuites. A remote attacker could perform a carefully crafted SSL/TLS handshake against a server configured to use Kerberos ciphersuites in such a way as to cause OpenSSL to crash. Most applications have no ability to use Kerberos ciphersuites and will therefore be unaffected.", + "unique_content_id": "a467aec230d90bf340b7325fe9207425c4d35680a470268682407639819c56f6", + "summary": "The Codenomicon TLS Test Tool uncovered a null-pointer assignment in the do_change_cipher_spec() function. A remote attacker could perform a carefully crafted SSL/TLS handshake against a server that used the OpenSSL library in such a way as to cause a crash.", "affected_packages": [ + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "0.9.6m", + "affected_version_range": "vers:openssl/0.9.6c|0.9.6d|0.9.6e|0.9.6f|0.9.6g|0.9.6h|0.9.6i|0.9.6j|0.9.6k|0.9.6l" + }, { "package": { "name": "openssl", @@ -561,14 +561,14 @@ "qualifiers": "" }, "fixed_version": "0.9.7d", - "affected_version_range": "vers:openssl/0.9.7a|0.9.7b|0.9.7c" + "affected_version_range": "vers:openssl/0.9.7|0.9.7a|0.9.7b|0.9.7c" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2004-0112", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2004-0079", "severities": [], - "reference_id": "CVE-2004-0112", + "reference_id": "CVE-2004-0079", "reference_type": "" }, { @@ -582,7 +582,7 @@ "weaknesses": [] }, { - "unique_content_id": "de61ebaf88fec68edc50b1bbc3c82f15", + "unique_content_id": "5b55cf4a1e9c3add130bf345864834163a6924f0165a25458ddf710b31d56b70", "summary": "The der_chop script created temporary files insecurely which could allow local users to overwrite files via a symlink attack on temporary files. Note that it is quite unlikely that a user would be using the redundant der_chop script, and this script was removed from the OpenSSL distribution.", "affected_packages": [ { @@ -628,7 +628,7 @@ "weaknesses": [] }, { - "unique_content_id": "9cc871a9e62ad5ca419397816ae02f3f", + "unique_content_id": "cba43db55e749a2cd6a8e2b4a8859b0cfb99c57ebb384b08ff64687b69982e0c", "summary": "A deprecated option, SSL_OP_MISE_SSLV2_RSA_PADDING, could allow an attacker acting as a \"man in the middle\" to force a connection to downgrade to SSL 2.0 even if both parties support better protocols.", "affected_packages": [ { @@ -686,7 +686,7 @@ "weaknesses": [] }, { - "unique_content_id": "509415f8d684ef69f274426ff454ee18", + "unique_content_id": "d40f47b16b42d15836f11963090ae9bd8ee81396815649437c05a3763f5c0028", "summary": "Daniel Bleichenbacher discovered an attack on PKCS #1 v1.5 signatures where under certain circumstances it may be possible for an attacker to forge a PKCS #1 v1.5 signature that would be incorrectly verified by OpenSSL.", "affected_packages": [ { @@ -744,7 +744,7 @@ "weaknesses": [] }, { - "unique_content_id": "1ed97c8f77a2948144952bbf2df0d15f", + "unique_content_id": "1012d0129bc2bf8d506f3a5abe83570b93979b82add79f0167a08320e397d181", "summary": "Certain types of public key can take disproportionate amounts of time to process. This could be used by an attacker in a denial of service attack.", "affected_packages": [ { @@ -802,8 +802,8 @@ "weaknesses": [] }, { - "unique_content_id": "275102d3f86e163b329b3bd7e4032658", - "summary": "A buffer overflow was discovered in the SSL_get_shared_ciphers() utility function. An attacker could send a list of ciphers to an application that uses this function and overrun a buffer.", + "unique_content_id": "6ce834bf29c1216739243c40e4e7e13563b6e7ee37195b59489542cdae28c644", + "summary": "A flaw in the SSLv2 client code was discovered. When a client application used OpenSSL to create an SSLv2 connection to a malicious server, that server could cause the client to crash.", "affected_packages": [ { "package": { @@ -844,9 +844,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2006-3738", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2006-4343", "severities": [], - "reference_id": "CVE-2006-3738", + "reference_id": "CVE-2006-4343", "reference_type": "" }, { @@ -860,8 +860,8 @@ "weaknesses": [] }, { - "unique_content_id": "65804b3824faa47750e76089a0851d29", - "summary": "A flaw in the SSLv2 client code was discovered. When a client application used OpenSSL to create an SSLv2 connection to a malicious server, that server could cause the client to crash.", + "unique_content_id": "8280b343c51657b22636bc717abb349ca3c44f0c053bc1e4a5f0b36440229d47", + "summary": "A buffer overflow was discovered in the SSL_get_shared_ciphers() utility function. An attacker could send a list of ciphers to an application that uses this function and overrun a buffer.", "affected_packages": [ { "package": { @@ -902,9 +902,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2006-4343", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2006-3738", "severities": [], - "reference_id": "CVE-2006-4343", + "reference_id": "CVE-2006-3738", "reference_type": "" }, { @@ -918,7 +918,7 @@ "weaknesses": [] }, { - "unique_content_id": "95ecb527c6494eb3dc0e22337c257b02", + "unique_content_id": "9257f845c847e35c7d1aa8587eac8fecc3e42ea36da4a73525adfc9c552d92d6", "summary": "During the parsing of certain invalid ASN.1 structures an error condition is mishandled. This can result in an infinite loop which consumes system memory", "affected_packages": [ { @@ -964,7 +964,7 @@ "weaknesses": [] }, { - "unique_content_id": "987af90a510832e0adfe428cf642f8b3", + "unique_content_id": "6e1fe5317b3377fba03774a136517301651a683c7bd40e56718a77b14718f8ba", "summary": "A flaw was found in the SSL_get_shared_ciphers() utility function. An attacker could send a list of ciphers to an application that used this function and overrun a buffer with a single byte. Few applications make use of this vulnerable function and generally it is used only when applications are compiled for debugging.", "affected_packages": [ { @@ -998,7 +998,7 @@ "weaknesses": [] }, { - "unique_content_id": "df251bb60bdec54891d4de225180f2ee", + "unique_content_id": "af7a8ad59af270f7ef97f3219807aacf3e5ef68c009a1a127593c7ed0371393d", "summary": "A flaw in DTLS support. An attacker could create a malicious client or server that could trigger a heap overflow. This is possibly exploitable to run arbitrary code, but it has not been verified.", "affected_packages": [ { @@ -1032,8 +1032,8 @@ "weaknesses": [] }, { - "unique_content_id": "2583bf8ccba8c985bab919b69ccc00e5", - "summary": "Testing using the Codenomicon TLS test suite discovered a flaw in the handling of server name extension data in OpenSSL 0.9.8f and OpenSSL 0.9.8g. If OpenSSL has been compiled using the non-default TLS server name extensions, a remote attacker could send a carefully crafted packet to a server application using OpenSSL and cause it to crash.", + "unique_content_id": "0a025dba94a703c96c56234016505ec5bb2424a29bb0881b837d2a7e0fc0c9a4", + "summary": "Testing using the Codenomicon TLS test suite discovered a flaw if the 'Server Key exchange message' is omitted from a TLS handshake in OpenSSL 0.9.8f and OpenSSL 0.9.8g. If a client connects to a malicious server with particular cipher suites, the server could cause the client to crash.", "affected_packages": [ { "package": { @@ -1050,9 +1050,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2008-0891", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2008-1672", "severities": [], - "reference_id": "CVE-2008-0891", + "reference_id": "CVE-2008-1672", "reference_type": "" }, { @@ -1066,8 +1066,8 @@ "weaknesses": [] }, { - "unique_content_id": "707840f8f10854ba4abf1409b159f35d", - "summary": "Testing using the Codenomicon TLS test suite discovered a flaw if the 'Server Key exchange message' is omitted from a TLS handshake in OpenSSL 0.9.8f and OpenSSL 0.9.8g. If a client connects to a malicious server with particular cipher suites, the server could cause the client to crash.", + "unique_content_id": "31901d67d2f1a8a6e0558d82580f7223d7f5d8986fa025f202bbc2f8bfbcf282", + "summary": "Testing using the Codenomicon TLS test suite discovered a flaw in the handling of server name extension data in OpenSSL 0.9.8f and OpenSSL 0.9.8g. If OpenSSL has been compiled using the non-default TLS server name extensions, a remote attacker could send a carefully crafted packet to a server application using OpenSSL and cause it to crash.", "affected_packages": [ { "package": { @@ -1084,9 +1084,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2008-1672", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2008-0891", "severities": [], - "reference_id": "CVE-2008-1672", + "reference_id": "CVE-2008-0891", "reference_type": "" }, { @@ -1100,7 +1100,7 @@ "weaknesses": [] }, { - "unique_content_id": "a52c691f587165864b42caa4be445576", + "unique_content_id": "7537c1d90dd6c6ff6c065a4a9b2ebd8f7060d69f1c2f4e8d1029c6cd17dbac0c", "summary": "The Google Security Team discovered several functions inside OpenSSL incorrectly checked the result after calling the EVP_VerifyFinal function, allowing a malformed signature to be treated as a good signature rather than as an error. This issue affected the signature checks on DSA and ECDSA keys used with SSL/TLS. One way to exploit this flaw would be for a remote attacker who is in control of a malicious server or who can use a 'man in the middle' attack to present a malformed SSL/TLS signature from a certificate chain to a vulnerable client, bypassing validation.", "affected_packages": [ { @@ -1134,7 +1134,7 @@ "weaknesses": [] }, { - "unique_content_id": "e872aef605740cacbb7547101151f4c7", + "unique_content_id": "42f716c07ad6ec9ae3eaece55884154a042ca5fe1ebc7abc0b6bd1e56aabe942", "summary": "Fix denial of service flaw due in the DTLS implementation. A remote attacker could use this flaw to cause a DTLS server to crash.", "affected_packages": [ { @@ -1168,7 +1168,7 @@ "weaknesses": [] }, { - "unique_content_id": "2b44645ffc6197aaeb99296cc87b3258", + "unique_content_id": "2ca10b0c5e2883828105f49783b0369798b610871a821fd020a9cd541a82539e", "summary": "The function CMS_verify() does not correctly handle an error condition involving malformed signed attributes. This will cause an invalid set of signed attributes to appear valid and content digests will not be checked.", "affected_packages": [ { @@ -1202,8 +1202,8 @@ "weaknesses": [] }, { - "unique_content_id": "4fffdc4369dd44a30fae0836347f91de", - "summary": "When a malformed ASN1 structure is received it's contents are freed up and zeroed and an error condition returned. On a small number of platforms where sizeof(long) < sizeof(void *) (for example WIN64) this can cause an invalid memory access later resulting in a crash when some invalid structures are read, for example RSA public keys.", + "unique_content_id": "ec18943f7b002b1a3999bfb8b71078f6c0cc14fadd2a226accc81b7e3c07b57d", + "summary": "The function ASN1_STRING_print_ex() when used to print a BMPString or UniversalString will crash with an invalid memory access if the encoded length of the string is illegal. Any OpenSSL application which prints out the contents of a certificate could be affected by this bug, including SSL servers, clients and S/MIME software.", "affected_packages": [ { "package": { @@ -1220,9 +1220,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-0789", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-0590", "severities": [], - "reference_id": "CVE-2009-0789", + "reference_id": "CVE-2009-0590", "reference_type": "" }, { @@ -1236,8 +1236,8 @@ "weaknesses": [] }, { - "unique_content_id": "6ec3760bac617981cc8cd2369115f10e", - "summary": "The function ASN1_STRING_print_ex() when used to print a BMPString or UniversalString will crash with an invalid memory access if the encoded length of the string is illegal. Any OpenSSL application which prints out the contents of a certificate could be affected by this bug, including SSL servers, clients and S/MIME software.", + "unique_content_id": "f414a498973b8e2d69129426ea6a5e3201efd1b8c5f9f6a4f8f3cba543701cb3", + "summary": "When a malformed ASN1 structure is received it's contents are freed up and zeroed and an error condition returned. On a small number of platforms where sizeof(long) < sizeof(void *) (for example WIN64) this can cause an invalid memory access later resulting in a crash when some invalid structures are read, for example RSA public keys.", "affected_packages": [ { "package": { @@ -1254,9 +1254,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-0590", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-0789", "severities": [], - "reference_id": "CVE-2009-0590", + "reference_id": "CVE-2009-0789", "reference_type": "" }, { @@ -1270,8 +1270,8 @@ "weaknesses": [] }, { - "unique_content_id": "868b6df2d8ffc22c9f9d83fd7da54401", - "summary": "Fix a denial of service flaw in the DTLS implementation. In dtls1_process_out_of_seq_message() the check if the current message is already buffered was missing. For every new message was memory allocated, allowing an attacker to perform an denial of service attack against a DTLS server by sending out of seq handshake messages until there is no memory left.", + "unique_content_id": "12e1eced51b649340678cf2d6e9b206e411c2fcd76c9a2d2f4c358b4ce480589", + "summary": "Fix a denial of service flaw in the DTLS implementation. Records are buffered if they arrive with a future epoch to be processed after finishing the corresponding handshake. There is currently no limitation to this buffer allowing an attacker to perform a DOS attack to a DTLS server by sending records with future epochs until there is no memory left.", "affected_packages": [ { "package": { @@ -1288,19 +1288,19 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-1378", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-1377", "severities": [], - "reference_id": "CVE-2009-1378", + "reference_id": "CVE-2009-1377", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/abda7c114791fa7fe95672ec7a66fc4733c40dbc", + "url": "https://github.com/openssl/openssl/commit/88b48dc68024dcc437da4296c9fb04419b0ccbe1", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://web.archive.org/web/20101120211136/http://rt.openssl.org/Ticket/Display.html?id=1931&user=guest&pass=guest", + "url": "https://web.archive.org/web/20120306065500/http://rt.openssl.org/Ticket/Display.html?id=1930&user=guest&pass=guest", "severities": [], "reference_id": "", "reference_type": "" @@ -1310,8 +1310,8 @@ "weaknesses": [] }, { - "unique_content_id": "9233bcc1b091ea2d0fe8d8a2820191f5", - "summary": "Use-after-free vulnerability in the dtls1_retrieve_buffered_fragment function could cause a client accessing a malicious DTLS server to crash.", + "unique_content_id": "bac66dcd2f0ad0469f600dbec41e0ec28219aab575fd5319a4f6d71675deda30", + "summary": "Fix a denial of service flaw in the DTLS implementation. In dtls1_process_out_of_seq_message() the check if the current message is already buffered was missing. For every new message was memory allocated, allowing an attacker to perform an denial of service attack against a DTLS server by sending out of seq handshake messages until there is no memory left.", "affected_packages": [ { "package": { @@ -1328,19 +1328,19 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-1379", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-1378", "severities": [], - "reference_id": "CVE-2009-1379", + "reference_id": "CVE-2009-1378", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/561cbe567846a376153bea7f1f2d061e78029c2d", + "url": "https://github.com/openssl/openssl/commit/abda7c114791fa7fe95672ec7a66fc4733c40dbc", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://web.archive.org/web/20100824233642/http://rt.openssl.org/Ticket/Display.html?id=1923&user=guest&pass=guest", + "url": "https://web.archive.org/web/20101120211136/http://rt.openssl.org/Ticket/Display.html?id=1931&user=guest&pass=guest", "severities": [], "reference_id": "", "reference_type": "" @@ -1350,8 +1350,8 @@ "weaknesses": [] }, { - "unique_content_id": "e250eb725e8ae34ba3933779594935f6", - "summary": "Fix a denial of service flaw in the DTLS implementation. Records are buffered if they arrive with a future epoch to be processed after finishing the corresponding handshake. There is currently no limitation to this buffer allowing an attacker to perform a DOS attack to a DTLS server by sending records with future epochs until there is no memory left.", + "unique_content_id": "bd12a0b86dcdd5a9a410597243f1700603dd5cd3ca6f0c40ab08aaeafd7d4edf", + "summary": "Use-after-free vulnerability in the dtls1_retrieve_buffered_fragment function could cause a client accessing a malicious DTLS server to crash.", "affected_packages": [ { "package": { @@ -1368,19 +1368,19 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-1377", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-1379", "severities": [], - "reference_id": "CVE-2009-1377", + "reference_id": "CVE-2009-1379", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/88b48dc68024dcc437da4296c9fb04419b0ccbe1", + "url": "https://github.com/openssl/openssl/commit/561cbe567846a376153bea7f1f2d061e78029c2d", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://web.archive.org/web/20120306065500/http://rt.openssl.org/Ticket/Display.html?id=1930&user=guest&pass=guest", + "url": "https://web.archive.org/web/20100824233642/http://rt.openssl.org/Ticket/Display.html?id=1923&user=guest&pass=guest", "severities": [], "reference_id": "", "reference_type": "" @@ -1390,7 +1390,7 @@ "weaknesses": [] }, { - "unique_content_id": "0097aaf34c70d34f665917931de0a380", + "unique_content_id": "b28a70e21f739116e19415a8ce53ecc95060ceacba347960a8292cc70a46762b", "summary": "Fix a NULL pointer dereference if a DTLS server recieved ChangeCipherSpec as first record. A remote attacker could use this flaw to cause a DTLS server to crash", "affected_packages": [ { @@ -1424,7 +1424,7 @@ "weaknesses": [] }, { - "unique_content_id": "3ede4a6de30467e840dadb6b1a2f94fc", + "unique_content_id": "e4c27c5b08884c79d2350038aa3ea44e57ac58d20ea4dcf682658288b7ec4268", "summary": "Implement RFC5746 to address vulnerabilities in SSL/TLS renegotiation.", "affected_packages": [ { @@ -1458,7 +1458,7 @@ "weaknesses": [] }, { - "unique_content_id": "91d6f4b44c2f61e0b1d98cbec9e4633d", + "unique_content_id": "61e80d10d33dde52fc3c7bc32f19fe3763bffef204240f578b490986e1ce7aff", "summary": "A memory leak in the zlib_stateful_finish function in crypto/comp/c_zlib.c allows remote attackers to cause a denial of service via vectors that trigger incorrect calls to the CRYPTO_cleanup_all_ex_data function.", "affected_packages": [ { @@ -1492,7 +1492,7 @@ "weaknesses": [] }, { - "unique_content_id": "f07be07de5fe8173dc2934d11c36c94d", + "unique_content_id": "08e65d1f3043871ffe0f802544cb08ac0822cf486e7cb9aebb04b301c46b986c", "summary": "A missing return value check flaw was discovered in OpenSSL, that could possibly cause OpenSSL to call a Kerberos library function with invalid arguments, resulting in a NULL pointer dereference crash in the MIT Kerberos library. In certain configurations, a remote attacker could use this flaw to crash a TLS/SSL server using OpenSSL by requesting Kerberos cipher suites during the TLS handshake", "affected_packages": [ { @@ -1526,7 +1526,7 @@ "weaknesses": [] }, { - "unique_content_id": "e4f35efada1573e600eeb3f197a9654e", + "unique_content_id": "850ee33c668bfb81f14d0412e4339312cfc05088304246c02b4ec3cf8274f1b1", "summary": "It was discovered that OpenSSL did not always check the return value of the bn_wexpand() function. An attacker able to trigger a memory allocation failure in that function could cause an application using the OpenSSL library to crash or, possibly, execute arbitrary code", "affected_packages": [ { @@ -1560,7 +1560,7 @@ "weaknesses": [] }, { - "unique_content_id": "94276d565fb0e1af8800da5df17f96be", + "unique_content_id": "bd7aef7bfdb58b2311644f5ef6b9fba6252b4ee4823061cce018f34f38e61ac6", "summary": "In TLS connections, certain incorrectly formatted records can cause an OpenSSL client or server to crash due to a read attempt at NULL.", "affected_packages": [ { @@ -1594,7 +1594,7 @@ "weaknesses": [] }, { - "unique_content_id": "bfee13b4a1f7df094ab9f172cf3556c9", + "unique_content_id": "806fa09aede3c5095c3bf55d4973cc6160bf7786a6efe3201815ceeb30cccf2b", "summary": "An invalid Return value check in pkey_rsa_verifyrecover was discovered. When verification recovery fails for RSA keys an uninitialised buffer with an undefined length is returned instead of an error code. This could lead to an information leak.", "affected_packages": [ { @@ -1628,7 +1628,7 @@ "weaknesses": [] }, { - "unique_content_id": "fdfe8fe89fb08b0cedb50a64445793f9", + "unique_content_id": "f7669cb060a5572fa05fd4e5dcbb589def9270038f39957489fe982c2b723713", "summary": "A flaw in the handling of CMS structures containing OriginatorInfo was found which could lead to a write to invalid memory address or double free. CMS support is disabled by default in OpenSSL 0.9.8 versions.", "affected_packages": [ { @@ -1674,7 +1674,7 @@ "weaknesses": [] }, { - "unique_content_id": "76c3ba83fe766ac2a084b0bd3de847f5", + "unique_content_id": "7b65ee41c2d48ae2fc3ab1c1935814347695def01407b8da246cab5018fd4f01", "summary": "A flaw in the OpenSSL TLS server extension code parsing which on affected servers can be exploited in a buffer overrun attack. Any OpenSSL based TLS server is vulnerable if it is multi-threaded and uses OpenSSL's internal caching mechanism. Servers that are multi-process and/or disable internal session caching are NOT affected.", "affected_packages": [ { @@ -1720,21 +1720,9 @@ "weaknesses": [] }, { - "unique_content_id": "316f2dc208adb956396af86e8d35c818", - "summary": "A flaw in the OpenSSL SSL/TLS server code where an old bug workaround allows malicious clients to modify the stored session cache ciphersuite. In some cases the ciphersuite can be downgraded to a weaker one on subsequent connections. This issue only affects OpenSSL based SSL/TLS server if it uses OpenSSL's internal caching mechanisms and the SSL_OP_NETSCAPE_REUSE_CIPHER_CHANGE_BUG flag (many applications enable this by using the SSL_OP_ALL option).", + "unique_content_id": "93fa5cf53d6cabf247c30a66821d9a5e07a1013f64a2417d5e26ac28581c4301", + "summary": "An error in OpenSSL's experimental J-PAKE implementation which could lead to successful validation by someone with no knowledge of the shared secret. The OpenSSL Team still consider the implementation of J-PAKE to be experimental and is not compiled by default.", "affected_packages": [ - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "0.9.8q", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p" - }, { "package": { "name": "openssl", @@ -1750,9 +1738,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2010-4180", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2010-4252", "severities": [], - "reference_id": "CVE-2010-4180", + "reference_id": "CVE-2010-4252", "reference_type": "" }, { @@ -1766,9 +1754,21 @@ "weaknesses": [] }, { - "unique_content_id": "a65500a311ab1c4e556fa47df1b487e1", - "summary": "An error in OpenSSL's experimental J-PAKE implementation which could lead to successful validation by someone with no knowledge of the shared secret. The OpenSSL Team still consider the implementation of J-PAKE to be experimental and is not compiled by default.", + "unique_content_id": "e0c32279e2afef8a7c959758dd603e340e8b3ae83744f2af395802b4d7152546", + "summary": "A flaw in the OpenSSL SSL/TLS server code where an old bug workaround allows malicious clients to modify the stored session cache ciphersuite. In some cases the ciphersuite can be downgraded to a weaker one on subsequent connections. This issue only affects OpenSSL based SSL/TLS server if it uses OpenSSL's internal caching mechanisms and the SSL_OP_NETSCAPE_REUSE_CIPHER_CHANGE_BUG flag (many applications enable this by using the SSL_OP_ALL option).", "affected_packages": [ + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "0.9.8q", + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p" + }, { "package": { "name": "openssl", @@ -1784,9 +1784,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2010-4252", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2010-4180", "severities": [], - "reference_id": "CVE-2010-4252", + "reference_id": "CVE-2010-4180", "reference_type": "" }, { @@ -1800,7 +1800,7 @@ "weaknesses": [] }, { - "unique_content_id": "4d2690fa788437a1517d397eabb14249", + "unique_content_id": "b9846e705257211137a5d75434ca61d87844c9fae7bc25a5a943b397a57a32c2", "summary": "A buffer over-read flaw was discovered in the way OpenSSL parsed the Certificate Status Request TLS extensions in ClientHello TLS handshake messages. A remote attacker could possibly use this flaw to crash an SSL server using the affected OpenSSL functionality.", "affected_packages": [ { @@ -1846,9 +1846,21 @@ "weaknesses": [] }, { - "unique_content_id": "220a4682b4ef1cc32a29898f3057b9b3", - "summary": "Under certain circumstances OpenSSL's internal certificate verification routines can incorrectly accept a CRL whose nextUpdate field is in the past. Applications are only affected by the CRL checking vulnerability if they enable OpenSSL's internal CRL checking which is off by default. Applications which use their own custom CRL checking (such as Apache) are not affected.", + "unique_content_id": "63385b83187d8305d4b3a99688f51116e1e99e77469a4de02e39611bbc58cf10", + "summary": "OpenSSL server code for ephemeral ECDH ciphersuites is not thread-safe, and furthermore can crash if a client violates the protocol by sending handshake messages in incorrect order. Only server-side applications that specifically support ephemeral ECDH ciphersuites are affected, and only if ephemeral ECDH ciphersuites are enabled in the configuration.", "affected_packages": [ + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": null, + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r" + }, { "package": { "name": "openssl", @@ -1864,9 +1876,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2011-3207", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2011-3210", "severities": [], - "reference_id": "CVE-2011-3207", + "reference_id": "CVE-2011-3210", "reference_type": "" }, { @@ -1880,8 +1892,8 @@ "weaknesses": [] }, { - "unique_content_id": "990e85544590d4e2411449cfbc182afd", - "summary": "OpenSSL server code for ephemeral ECDH ciphersuites is not thread-safe, and furthermore can crash if a client violates the protocol by sending handshake messages in incorrect order. Only server-side applications that specifically support ephemeral ECDH ciphersuites are affected, and only if ephemeral ECDH ciphersuites are enabled in the configuration.", + "unique_content_id": "ceda83e23c529430797c0b2affbe99cfbd68a5919628c3a8921070972ad425d3", + "summary": "Under certain circumstances OpenSSL's internal certificate verification routines can incorrectly accept a CRL whose nextUpdate field is in the past. Applications are only affected by the CRL checking vulnerability if they enable OpenSSL's internal CRL checking which is off by default. Applications which use their own custom CRL checking (such as Apache) are not affected.", "affected_packages": [ { "package": { @@ -1892,27 +1904,15 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": null, - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r" - }, + "fixed_version": "1.0.0e", + "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d" + } + ], + "references": [ { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.0e", - "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d" - } - ], - "references": [ - { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2011-3210", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2011-3207", "severities": [], - "reference_id": "CVE-2011-3210", + "reference_id": "CVE-2011-3207", "reference_type": "" }, { @@ -1926,8 +1926,8 @@ "weaknesses": [] }, { - "unique_content_id": "48361e01b38b28352705c300f7ee407b", - "summary": "Support for handshake restarts for server gated cryptograpy (SGC) can be used in a denial-of-service attack.", + "unique_content_id": "392d936885fcdae2fb2b4200be4c4dbe8cb7fef88164723777c37de37b84d573", + "summary": "OpenSSL was susceptable an extension of the Vaudenay padding oracle attack on CBC mode encryption which enables an efficient plaintext recovery attack against the OpenSSL implementation of DTLS by exploiting timing differences arising during decryption processing.", "affected_packages": [ { "package": { @@ -1956,9 +1956,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2011-4619", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2011-4108", "severities": [], - "reference_id": "CVE-2011-4619", + "reference_id": "CVE-2011-4108", "reference_type": "" }, { @@ -1972,8 +1972,8 @@ "weaknesses": [] }, { - "unique_content_id": "5459b1f4a775b3122cdb0ec3ad815b3d", - "summary": "If X509_V_FLAG_POLICY_CHECK is set in OpenSSL 0.9.8, then a policy check failure can lead to a double-free. The bug does not occur unless this flag is set. Users of OpenSSL 1.0.0 are not affected", + "unique_content_id": "525a3a5ff9914fd1388fdd071f143b794e6c642f2e45beb7d7d0bc49a78057a3", + "summary": "A flaw in the fix to CVE-2011-4108 can be exploited in a denial of service attack. Only DTLS applications are affected.", "affected_packages": [ { "package": { @@ -1984,19 +1984,31 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.9.8s", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r" + "fixed_version": "0.9.8t", + "affected_version_range": "vers:openssl/0.9.8s" + }, + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.0g", + "affected_version_range": "vers:openssl/1.0.0f" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2011-4109", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2012-0050", "severities": [], - "reference_id": "CVE-2011-4109", + "reference_id": "CVE-2012-0050", "reference_type": "" }, { - "url": "https://www.openssl.org/news/secadv/20120104.txt", + "url": "https://www.openssl.org/news/secadv/20120118.txt", "severities": [], "reference_id": "", "reference_type": "" @@ -2006,8 +2018,8 @@ "weaknesses": [] }, { - "unique_content_id": "578281e8060ac1dc67b9d229e4b003ab", - "summary": "RFC 3779 data can be included in certificates, and if it is malformed, may trigger an assertion failure. This could be used in a denial-of-service attack. Builds of OpenSSL are only vulnerable if configured with \"enable-rfc3779\", which is not a default.", + "unique_content_id": "617f7a0525e9e761eae4eb9c93e690fabebd6717a3295b104064c694207f1897", + "summary": "If X509_V_FLAG_POLICY_CHECK is set in OpenSSL 0.9.8, then a policy check failure can lead to a double-free. The bug does not occur unless this flag is set. Users of OpenSSL 1.0.0 are not affected", "affected_packages": [ { "package": { @@ -2020,25 +2032,13 @@ }, "fixed_version": "0.9.8s", "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r" - }, - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.0f", - "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2011-4577", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2011-4109", "severities": [], - "reference_id": "CVE-2011-4577", + "reference_id": "CVE-2011-4109", "reference_type": "" }, { @@ -2052,8 +2052,8 @@ "weaknesses": [] }, { - "unique_content_id": "5af91d2aece046ccf3bc688d3dff09d5", - "summary": "A flaw in the fix to CVE-2011-4108 can be exploited in a denial of service attack. Only DTLS applications are affected.", + "unique_content_id": "985ab2093b4bed8444751c8a5f106add9b1f71fefbe400f56ff4a34d7fc29d00", + "summary": "OpenSSL failed to clear the bytes used as block cipher padding in SSL 3.0 records which could leak the contents of memory in some circumstances.", "affected_packages": [ { "package": { @@ -2064,8 +2064,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.9.8t", - "affected_version_range": "vers:openssl/0.9.8s" + "fixed_version": "0.9.8s", + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r" }, { "package": { @@ -2076,19 +2076,19 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.0g", - "affected_version_range": "vers:openssl/1.0.0f" + "fixed_version": "1.0.0f", + "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2012-0050", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2011-4576", "severities": [], - "reference_id": "CVE-2012-0050", + "reference_id": "CVE-2011-4576", "reference_type": "" }, { - "url": "https://www.openssl.org/news/secadv/20120118.txt", + "url": "https://www.openssl.org/news/secadv/20120104.txt", "severities": [], "reference_id": "", "reference_type": "" @@ -2098,8 +2098,8 @@ "weaknesses": [] }, { - "unique_content_id": "6a353734271d92996f12a08fde03f7bb", - "summary": "OpenSSL was susceptable an extension of the Vaudenay padding oracle attack on CBC mode encryption which enables an efficient plaintext recovery attack against the OpenSSL implementation of DTLS by exploiting timing differences arising during decryption processing.", + "unique_content_id": "a75d293b72e75c3618655c718811f59a039e176e1592a13e7fc6a723dd4003d6", + "summary": "RFC 3779 data can be included in certificates, and if it is malformed, may trigger an assertion failure. This could be used in a denial-of-service attack. Builds of OpenSSL are only vulnerable if configured with \"enable-rfc3779\", which is not a default.", "affected_packages": [ { "package": { @@ -2128,9 +2128,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2011-4108", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2011-4577", "severities": [], - "reference_id": "CVE-2011-4108", + "reference_id": "CVE-2011-4577", "reference_type": "" }, { @@ -2144,21 +2144,9 @@ "weaknesses": [] }, { - "unique_content_id": "85e39cd316fb40cbdc47d19d1f93fade", - "summary": "OpenSSL failed to clear the bytes used as block cipher padding in SSL 3.0 records which could leak the contents of memory in some circumstances.", + "unique_content_id": "b98fd56170c94c5fe71a1823c88ad50a789a513aa656f1cef217a11c83d645b7", + "summary": "A malicious TLS client can send an invalid set of GOST parameters which will cause the server to crash due to lack of error checking. This could be used in a denial-of-service attack. Only users of the OpenSSL GOST ENGINE are affected by this bug.", "affected_packages": [ - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "0.9.8s", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r" - }, { "package": { "name": "openssl", @@ -2174,9 +2162,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2011-4576", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2012-0027", "severities": [], - "reference_id": "CVE-2011-4576", + "reference_id": "CVE-2012-0027", "reference_type": "" }, { @@ -2190,9 +2178,21 @@ "weaknesses": [] }, { - "unique_content_id": "9c9b9e8b9a5f1a355656382f71722432", - "summary": "A malicious TLS client can send an invalid set of GOST parameters which will cause the server to crash due to lack of error checking. This could be used in a denial-of-service attack. Only users of the OpenSSL GOST ENGINE are affected by this bug.", + "unique_content_id": "c10f7480d6e0decea7f1d9b9884ea97b04025caa0c39bbc1338955d9ac46b48d", + "summary": "Support for handshake restarts for server gated cryptograpy (SGC) can be used in a denial-of-service attack.", "affected_packages": [ + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "0.9.8s", + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r" + }, { "package": { "name": "openssl", @@ -2208,9 +2208,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2012-0027", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2011-4619", "severities": [], - "reference_id": "CVE-2012-0027", + "reference_id": "CVE-2011-4619", "reference_type": "" }, { @@ -2224,7 +2224,7 @@ "weaknesses": [] }, { - "unique_content_id": "6744bf2a3fd6eba6b18d59fb648d443b", + "unique_content_id": "c60189dfbd7ddb73a1d2a470d59fa6fcb7bedad776a8d717a0cbca7d3b416095", "summary": "A weakness in the OpenSSL CMS and PKCS #7 code can be exploited using Bleichenbacher's attack on PKCS #1 v1.5 RSA padding also known as the million message attack (MMA). Only users of CMS, PKCS #7, or S/MIME decryption operations are affected, SSL/TLS applications are not affected by this issue.", "affected_packages": [ { @@ -2270,7 +2270,7 @@ "weaknesses": [] }, { - "unique_content_id": "c4e836c345751d38a3bff43c10e5a655", + "unique_content_id": "9d1e4715f7138b1a78fbf5251551b5d200ccd9ec52515b1b2939757df362997b", "summary": "Multiple numeric conversion errors, leading to a buffer overflow, were found in the way OpenSSL parsed ASN.1 (Abstract Syntax Notation One) data from BIO (OpenSSL's I/O abstraction) inputs. Specially-crafted DER (Distinguished Encoding Rules) encoded data read from a file or other BIO input could cause an application using the OpenSSL library to crash or, potentially, execute arbitrary code.", "affected_packages": [ { @@ -2328,7 +2328,7 @@ "weaknesses": [] }, { - "unique_content_id": "7451866670acf0bd4a5f0c9d74bdfb18", + "unique_content_id": "ea921fcdf273dfa8a452dab36604e137574b2bd9234e81b08a4885a267939e64", "summary": "It was discovered that the fix for CVE-2012-2110 released on 19 Apr 2012 was not sufficient to correct the issue for OpenSSL 0.9.8. This issue only affects OpenSSL 0.9.8v. OpenSSL 1.0.1a and 1.0.0i already contain a patch sufficient to correct CVE-2012-2110.", "affected_packages": [ { @@ -2362,7 +2362,7 @@ "weaknesses": [] }, { - "unique_content_id": "9a9efe32bb6fb903c9814b808b7f0206", + "unique_content_id": "0fd2dc9500a45c761c7a6ddadcaca6403b0dcaefd25ec7c8a9a2e4dba0211efe", "summary": "An integer underflow flaw, leading to a buffer over-read, was found in the way OpenSSL handled TLS 1.1, TLS 1.2, and DTLS (Datagram Transport Layer Security) application data record lengths when using a block cipher in CBC (cipher-block chaining) mode. A malicious TLS 1.1, TLS 1.2, or DTLS client or server could use this flaw to crash its connection peer.", "affected_packages": [ { @@ -2420,7 +2420,7 @@ "weaknesses": [] }, { - "unique_content_id": "d1003ac6fdcb1a2a4d7bca936e239b42", + "unique_content_id": "274bafa8474e5913afcb27cc6ffde809fb6f6ba505f13df3234f8ee946e218ee", "summary": "A weakness in the handling of CBC ciphersuites in SSL, TLS and DTLS which could lead to plaintext recovery by exploiting timing differences arising during MAC processing.", "affected_packages": [ { @@ -2478,33 +2478,9 @@ "weaknesses": [] }, { - "unique_content_id": "084bb9ad1da9dafc260f041cfdaf868e", - "summary": "A flaw in the OpenSSL handling of OCSP response verification can be exploited in a denial of service attack.", + "unique_content_id": "37fd821acfb83d5e24554010a0319b02b5c7c1c552d4dba2918bb1047836ed2c", + "summary": "A flaw in the OpenSSL handling of CBC ciphersuites in TLS 1.1 and TLS 1.2 on AES-NI supporting platforms can be exploited in a DoS attack.", "affected_packages": [ - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "0.9.8y", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x" - }, - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.0k", - "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j" - }, { "package": { "name": "openssl", @@ -2520,9 +2496,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2013-0166", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2012-2686", "severities": [], - "reference_id": "CVE-2013-0166", + "reference_id": "CVE-2012-2686", "reference_type": "" }, { @@ -2536,9 +2512,33 @@ "weaknesses": [] }, { - "unique_content_id": "9c755e2b9ac36e9d77e7aa63ca6b91e5", - "summary": "A flaw in the OpenSSL handling of CBC ciphersuites in TLS 1.1 and TLS 1.2 on AES-NI supporting platforms can be exploited in a DoS attack.", + "unique_content_id": "fcd18f8ddd7c4c680932ce9d21da72cd35ad71fe163ce5734f136cf4d1913002", + "summary": "A flaw in the OpenSSL handling of OCSP response verification can be exploited in a denial of service attack.", "affected_packages": [ + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "0.9.8y", + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x" + }, + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.0k", + "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j" + }, { "package": { "name": "openssl", @@ -2554,9 +2554,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2012-2686", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2013-0166", "severities": [], - "reference_id": "CVE-2012-2686", + "reference_id": "CVE-2013-0166", "reference_type": "" }, { @@ -2570,7 +2570,7 @@ "weaknesses": [] }, { - "unique_content_id": "cd972700acea991417121019f009bac1", + "unique_content_id": "bc2e1522ce53f1d9658df6561b069413fd1a1e237b8d127da67a245315e1763f", "summary": "A flaw in DTLS handling can cause an application using OpenSSL and DTLS to crash. This is not a vulnerability for OpenSSL prior to 1.0.0.", "affected_packages": [ { @@ -2616,7 +2616,7 @@ "weaknesses": [] }, { - "unique_content_id": "bd7c16b098a35e13b1659e8c4934253d", + "unique_content_id": "7628f9cd3cb03285c9bfdbb9b7dc222f54c2e5ae9498bce55eb751f6dfce660d", "summary": "A flaw in OpenSSL can cause an application using OpenSSL to crash when using TLS version 1.2. This issue only affected OpenSSL 1.0.1 versions.", "affected_packages": [ { @@ -2650,7 +2650,7 @@ "weaknesses": [] }, { - "unique_content_id": "0e3a3a12e8060b9395fe7b48a7276377", + "unique_content_id": "98a0e5556bb1bf1ef2d84156a75154a169ffed9e73af5bedc7e7d76c7e2dda3c", "summary": "A carefully crafted invalid TLS handshake could crash OpenSSL with a NULL pointer exception. A malicious server could use this flaw to crash a connecting client. This issue only affected OpenSSL 1.0.1 versions.", "affected_packages": [ { @@ -2684,7 +2684,7 @@ "weaknesses": [] }, { - "unique_content_id": "5d7762928fe0665ff593f8b93f0f7c2d", + "unique_content_id": "84057cab1e58fea9c99a32830b1f9459f608e4a1842a5c621e56d7570923cad5", "summary": "Fix for the attack described in the paper \"Recovering OpenSSL ECDSA Nonces Using the FLUSH+RELOAD Cache Side-channel Attack\"", "affected_packages": [ { @@ -2754,7 +2754,7 @@ "weaknesses": [] }, { - "unique_content_id": "3b9f07c3f3fc9a3177b7cba6994626f2", + "unique_content_id": "757f04cde75470cb2bec8053f5fc874a82bae6b35945ec483df2e28eeb0cfc78", "summary": "A missing bounds check in the handling of the TLS heartbeat extension can be used to reveal up to 64kB of memory to a connected client or server (a.k.a. Heartbleed). This issue did not affect versions of OpenSSL prior to 1.0.1.", "affected_packages": [ { @@ -2788,7 +2788,7 @@ "weaknesses": [] }, { - "unique_content_id": "5c73df85af33b3649d0f8f5cf48465d3", + "unique_content_id": "23f38bdcf51ed382203722a20b7d4821569824f9d019c122bf958aa76dd50613", "summary": "A race condition in the ssl3_read_bytes function can allow remote attackers to inject data across sessions or cause a denial of service. This flaw only affects multithreaded applications using OpenSSL 1.0.0 and 1.0.1, where SSL_MODE_RELEASE_BUFFERS is enabled, which is not the default and not common.", "affected_packages": [ { @@ -2834,7 +2834,7 @@ "weaknesses": [] }, { - "unique_content_id": "ee4174c785ef4de123c8f5c8c4fbf9b2", + "unique_content_id": "156f765a217953dbd4da2ecb89c9f1998f67752ff9a12bbb575d396f7f8902a2", "summary": "A flaw in the do_ssl3_write function can allow remote attackers to cause a denial of service via a NULL pointer dereference. This flaw only affects OpenSSL 1.0.0 and 1.0.1 where SSL_MODE_RELEASE_BUFFERS is enabled, which is not the default and not common.", "affected_packages": [ { @@ -2880,7 +2880,7 @@ "weaknesses": [] }, { - "unique_content_id": "4e8f724565b6429137ea959defa72090", + "unique_content_id": "3af893757d5d17f3214542da1f1511d519cfbcda8bc5691a205aadb469f130f3", "summary": "OpenSSL TLS clients enabling anonymous ECDH ciphersuites are subject to a denial of service attack.", "affected_packages": [ { @@ -2938,8 +2938,8 @@ "weaknesses": [] }, { - "unique_content_id": "a73f61be805e75d9468e11afb3158d45", - "summary": "An attacker can force the use of weak keying material in OpenSSL SSL/TLS clients and servers. This can be exploited by a Man-in-the-middle (MITM) attack where the attacker can decrypt and modify traffic from the attacked client and server.", + "unique_content_id": "1220fb598061d81d0d92e10093d9cf1e9de722b48ce1e08513ff839410106623", + "summary": "By sending an invalid DTLS handshake to an OpenSSL DTLS client the code can be made to recurse eventually crashing in a DoS attack. Only applications using OpenSSL as a DTLS client are affected.", "affected_packages": [ { "package": { @@ -2980,9 +2980,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-0224", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-0221", "severities": [], - "reference_id": "CVE-2014-0224", + "reference_id": "CVE-2014-0221", "reference_type": "" }, { @@ -2996,8 +2996,8 @@ "weaknesses": [] }, { - "unique_content_id": "bc12de8c2221021ccb7c3659b08cd3f5", - "summary": "By sending an invalid DTLS handshake to an OpenSSL DTLS client the code can be made to recurse eventually crashing in a DoS attack. Only applications using OpenSSL as a DTLS client are affected.", + "unique_content_id": "8e650cb3afbf00bdf5312f07bb03de889b8709b53e66779f3ff6664d49f060cb", + "summary": "An attacker can force the use of weak keying material in OpenSSL SSL/TLS clients and servers. This can be exploited by a Man-in-the-middle (MITM) attack where the attacker can decrypt and modify traffic from the attacked client and server.", "affected_packages": [ { "package": { @@ -3038,9 +3038,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-0221", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-0224", "severities": [], - "reference_id": "CVE-2014-0221", + "reference_id": "CVE-2014-0224", "reference_type": "" }, { @@ -3054,7 +3054,7 @@ "weaknesses": [] }, { - "unique_content_id": "da21b7edec2a01bd2495586e3e344a2c", + "unique_content_id": "e09b36d835f2209f6be06a5138c917c4210c32191bef1c9dc5a2faa1f8850e32", "summary": "A buffer overrun attack can be triggered by sending invalid DTLS fragments to an OpenSSL DTLS client or server. This is potentially exploitable to run arbitrary code on a vulnerable client or server. Only applications using OpenSSL as a DTLS client or server affected.", "affected_packages": [ { @@ -3112,33 +3112,9 @@ "weaknesses": [] }, { - "unique_content_id": "073034548d58e9674b4080cd0c36f8cb", - "summary": "A flaw in handling DTLS anonymous EC(DH) ciphersuites was found. OpenSSL DTLS clients enabling anonymous (EC)DH ciphersuites are subject to a denial of service attack. A malicious server can crash the client with a null pointer dereference (read) by specifying an anonymous (EC)DH ciphersuite and sending carefully crafted handshake messages.", + "unique_content_id": "173da4e79bb96a760519a18feb9667b22c727def897afd7cab56b2fc840ff141", + "summary": "A crash was found affecting SRP ciphersuites used in a Server Hello message. The issue affects OpenSSL clients and allows a malicious server to crash the client with a null pointer dereference (read) by specifying an SRP ciphersuite even though it was not properly negotiated with the client. This could lead to a Denial of Service.", "affected_packages": [ - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "0.9.8zb", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za" - }, - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.0n", - "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m" - }, { "package": { "name": "openssl", @@ -3154,9 +3130,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3510", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-5139", "severities": [], - "reference_id": "CVE-2014-3510", + "reference_id": "CVE-2014-5139", "reference_type": "" }, { @@ -3170,8 +3146,8 @@ "weaknesses": [] }, { - "unique_content_id": "27f89a41dfab2654a12a2d701b68ad9c", - "summary": "A DTLS flaw leading to memory exhaustion was found. An attacker can force openssl to consume large amounts of memory whilst processing DTLS handshake messages. This could lead to a Denial of Service attack.", + "unique_content_id": "2947a778fbea64d8f99d370af3a8d0169602ff5adff88d86ccf57a09c3fb556c", + "summary": "A DTLS memory leak from zero-length fragments was found. By sending carefully crafted DTLS packets an attacker could cause OpenSSL to leak memory. This could lead to a Denial of Service attack.", "affected_packages": [ { "package": { @@ -3183,7 +3159,7 @@ "qualifiers": "" }, "fixed_version": "0.9.8zb", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za" + "affected_version_range": "vers:openssl/0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za" }, { "package": { @@ -3195,7 +3171,7 @@ "qualifiers": "" }, "fixed_version": "1.0.0n", - "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m" + "affected_version_range": "vers:openssl/1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m" }, { "package": { @@ -3212,9 +3188,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3506", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3507", "severities": [], - "reference_id": "CVE-2014-3506", + "reference_id": "CVE-2014-3507", "reference_type": "" }, { @@ -3228,8 +3204,8 @@ "weaknesses": [] }, { - "unique_content_id": "48ecff4dbadf3f99198fcfb4138048d8", - "summary": "A DTLS memory leak from zero-length fragments was found. By sending carefully crafted DTLS packets an attacker could cause OpenSSL to leak memory. This could lead to a Denial of Service attack.", + "unique_content_id": "52bc0907465cbad85c1cf82eecf18885bbbe24de573bda4cdb9f8367f269a783", + "summary": "A SRP buffer overrun was found. A malicious client or server can send invalid SRP parameters and overrun an internal buffer. Only applications which are explicitly set up for SRP use are affected.", "affected_packages": [ { "package": { @@ -3240,74 +3216,16 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.9.8zb", - "affected_version_range": "vers:openssl/0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za" - }, - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.0n", - "affected_version_range": "vers:openssl/1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m" - }, - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.1i", - "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h" - } - ], - "references": [ - { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3507", - "severities": [], - "reference_id": "CVE-2014-3507", - "reference_type": "" - }, - { - "url": "https://www.openssl.org/news/secadv/20140806.txt", - "severities": [], - "reference_id": "", - "reference_type": "" - } - ], - "date_published": "2014-08-06T00:00:00+00:00", - "weaknesses": [] - }, - { - "unique_content_id": "4c289b7168ed3ac1dc649dd94e296ee2", - "summary": "A SRP buffer overrun was found. A malicious client or server can send invalid SRP parameters and overrun an internal buffer. Only applications which are explicitly set up for SRP use are affected.", - "affected_packages": [ - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.1i", - "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h" - } - ], - "references": [ - { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3512", - "severities": [], - "reference_id": "CVE-2014-3512", - "reference_type": "" + "fixed_version": "1.0.1i", + "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h" + } + ], + "references": [ + { + "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3512", + "severities": [], + "reference_id": "CVE-2014-3512", + "reference_type": "" }, { "url": "https://www.openssl.org/news/secadv/20140806.txt", @@ -3320,7 +3238,7 @@ "weaknesses": [] }, { - "unique_content_id": "913ba8a6e88c02283428f89a6d24952b", + "unique_content_id": "73269c9023356431d683604381118286fb8aeddcd87d0151e488b7255fa89f2c", "summary": "A Double Free was found when processing DTLS packets. An attacker can force an error condition which causes openssl to crash whilst processing DTLS packets due to memory being freed twice. This could lead to a Denial of Service attack.", "affected_packages": [ { @@ -3378,7 +3296,7 @@ "weaknesses": [] }, { - "unique_content_id": "a5d66943f85ab01f18b1181d5dccceb3", + "unique_content_id": "99661f6b61c2befbf0a840ac395f67ae171810c041a0400837c4e202fff1c6ef", "summary": "A flaw in the OpenSSL SSL/TLS server code causes the server to negotiate TLS 1.0 instead of higher protocol versions when the ClientHello message is badly fragmented. This allows a man-in-the-middle attacker to force a downgrade to TLS 1.0 even if both the server and the client support a higher protocol version, by modifying the client's TLS records.", "affected_packages": [ { @@ -3412,9 +3330,33 @@ "weaknesses": [] }, { - "unique_content_id": "cca76ec7e4ca1da60dc37bfb7065a74d", - "summary": "A crash was found affecting SRP ciphersuites used in a Server Hello message. The issue affects OpenSSL clients and allows a malicious server to crash the client with a null pointer dereference (read) by specifying an SRP ciphersuite even though it was not properly negotiated with the client. This could lead to a Denial of Service.", + "unique_content_id": "c96902798094fef86133d6163da3a0ef8e16161941fb0c9987451c3856334da2", + "summary": "A DTLS flaw leading to memory exhaustion was found. An attacker can force openssl to consume large amounts of memory whilst processing DTLS handshake messages. This could lead to a Denial of Service attack.", "affected_packages": [ + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "0.9.8zb", + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za" + }, + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.0n", + "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m" + }, { "package": { "name": "openssl", @@ -3430,9 +3372,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-5139", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3506", "severities": [], - "reference_id": "CVE-2014-5139", + "reference_id": "CVE-2014-3506", "reference_type": "" }, { @@ -3446,7 +3388,7 @@ "weaknesses": [] }, { - "unique_content_id": "db4e7a865c812a2f137555357a4ea54a", + "unique_content_id": "d999097e03330b37701e1a362f85711c43272b4fc8606896b221ff2c09a6f5cb", "summary": "A race condition was found in ssl_parse_serverhello_tlsext. If a multithreaded client connects to a malicious server using a resumed session and the server sends an ec point format extension, it could write up to 255 bytes to freed memory.", "affected_packages": [ { @@ -3492,7 +3434,7 @@ "weaknesses": [] }, { - "unique_content_id": "e1e9269594db16c804a566e20f436cd2", + "unique_content_id": "e111b3c925ff4930bf9df47e3c68ad219bfc78029011f026f5db9dfcb3623cba", "summary": "A flaw in OBJ_obj2txt may cause pretty printing functions such as X509_name_oneline, X509_name_print_ex, to leak some information from the stack. Applications may be affected if they echo pretty printing output to the attacker. OpenSSL SSL/TLS clients and servers themselves are not affected.", "affected_packages": [ { @@ -3550,8 +3492,8 @@ "weaknesses": [] }, { - "unique_content_id": "3dbf91d5443471c2da6cf221eddf9898", - "summary": "A flaw in the DTLS SRTP extension parsing code allows an attacker, who sends a carefully crafted handshake message, to cause OpenSSL to fail to free up to 64k of memory causing a memory leak. This could be exploited in a Denial Of Service attack. This issue affects OpenSSL 1.0.1 server implementations for both SSL/TLS and DTLS regardless of whether SRTP is used or configured. Implementations of OpenSSL that have been compiled with OPENSSL_NO_SRTP defined are not affected.", + "unique_content_id": "f3294bb2b90c0dac71eb21010721728aa9fbaf64cd7b1aff3bbe97099e5db16e", + "summary": "A flaw in handling DTLS anonymous EC(DH) ciphersuites was found. OpenSSL DTLS clients enabling anonymous (EC)DH ciphersuites are subject to a denial of service attack. A malicious server can crash the client with a null pointer dereference (read) by specifying an anonymous (EC)DH ciphersuite and sending carefully crafted handshake messages.", "affected_packages": [ { "package": { @@ -3562,36 +3504,54 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.1j", - "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i" + "fixed_version": "0.9.8zb", + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za" + }, + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.0n", + "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m" + }, + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.1i", + "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3513", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3510", "severities": [], - "reference_id": "CVE-2014-3513", + "reference_id": "CVE-2014-3510", "reference_type": "" }, { - "url": "https://www.openssl.org/news/secadv/20141015.txt", - "severities": [ - { - "value": "High", - "system": "generic_textual", - "scoring_elements": "" - } - ], + "url": "https://www.openssl.org/news/secadv/20140806.txt", + "severities": [], "reference_id": "", "reference_type": "" } ], - "date_published": "2014-10-15T00:00:00+00:00", + "date_published": "2014-08-06T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "63bf7bb20dcd1c7a3214c025ea53c1da", - "summary": "When an OpenSSL SSL/TLS/DTLS server receives a session ticket the integrity of that ticket is first verified. In the event of a session ticket integrity check failing, OpenSSL will fail to free memory causing a memory leak. By sending a large number of invalid session tickets an attacker could exploit this issue in a Denial Of Service attack.", + "unique_content_id": "0441ee6483168f14e3eb89495aa9144a146935020e60b4fafdd5de9dc52fbb05", + "summary": "When OpenSSL is configured with \"no-ssl3\" as a build option, servers could accept and complete a SSL 3.0 handshake, and clients could be configured to send them.", "affected_packages": [ { "package": { @@ -3603,7 +3563,7 @@ "qualifiers": "" }, "fixed_version": "0.9.8zc", - "affected_version_range": "vers:openssl/0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za|0.9.8zb" + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za|0.9.8zb" }, { "package": { @@ -3632,16 +3592,16 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3567", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3568", "severities": [], - "reference_id": "CVE-2014-3567", + "reference_id": "CVE-2014-3568", "reference_type": "" }, { "url": "https://www.openssl.org/news/secadv/20141015.txt", "severities": [ { - "value": "Moderate", + "value": "Low", "system": "generic_textual", "scoring_elements": "" } @@ -3654,7 +3614,7 @@ "weaknesses": [] }, { - "unique_content_id": "88aac050ad73754e929805f2ab5e64e7", + "unique_content_id": "4ee23c143c0a01cd7035e1646adaf2222725ad2c96447ffc524eb79d1ac532dd", "summary": "OpenSSL has added support for TLS_FALLBACK_SCSV to allow applications to block the ability for a MITM attacker to force a protocol downgrade. Some client applications (such as browsers) will reconnect using a downgraded protocol to work around interoperability bugs in older servers. This could be exploited by an active man-in-the-middle to downgrade connections to SSL 3.0 even if both sides of the connection support higher protocols. SSL 3.0 contains a number of weaknesses including POODLE (CVE-2014-3566). See also https://tools.ietf.org/html/draft-ietf-tls-downgrade-scsv-00 and https://www.openssl.org/~bodo/ssl-poodle.pdf", "affected_packages": [ { @@ -3699,8 +3659,48 @@ "weaknesses": [] }, { - "unique_content_id": "ba13f3aea682e9e1c5fab3672da07088", - "summary": "When OpenSSL is configured with \"no-ssl3\" as a build option, servers could accept and complete a SSL 3.0 handshake, and clients could be configured to send them.", + "unique_content_id": "c54c6fed589f1ca8024f1917126aae2983baa39871610960f380e5340ce50252", + "summary": "A flaw in the DTLS SRTP extension parsing code allows an attacker, who sends a carefully crafted handshake message, to cause OpenSSL to fail to free up to 64k of memory causing a memory leak. This could be exploited in a Denial Of Service attack. This issue affects OpenSSL 1.0.1 server implementations for both SSL/TLS and DTLS regardless of whether SRTP is used or configured. Implementations of OpenSSL that have been compiled with OPENSSL_NO_SRTP defined are not affected.", + "affected_packages": [ + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.1j", + "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i" + } + ], + "references": [ + { + "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3513", + "severities": [], + "reference_id": "CVE-2014-3513", + "reference_type": "" + }, + { + "url": "https://www.openssl.org/news/secadv/20141015.txt", + "severities": [ + { + "value": "High", + "system": "generic_textual", + "scoring_elements": "" + } + ], + "reference_id": "", + "reference_type": "" + } + ], + "date_published": "2014-10-15T00:00:00+00:00", + "weaknesses": [] + }, + { + "unique_content_id": "dcced98f8929707dec2045556ad27a5f407f0a6da5b0de6bb9cb0bf6c4eba16c", + "summary": "When an OpenSSL SSL/TLS/DTLS server receives a session ticket the integrity of that ticket is first verified. In the event of a session ticket integrity check failing, OpenSSL will fail to free memory causing a memory leak. By sending a large number of invalid session tickets an attacker could exploit this issue in a Denial Of Service attack.", "affected_packages": [ { "package": { @@ -3712,7 +3712,7 @@ "qualifiers": "" }, "fixed_version": "0.9.8zc", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za|0.9.8zb" + "affected_version_range": "vers:openssl/0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za|0.9.8zb" }, { "package": { @@ -3741,16 +3741,16 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3568", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3567", "severities": [], - "reference_id": "CVE-2014-3568", + "reference_id": "CVE-2014-3567", "reference_type": "" }, { "url": "https://www.openssl.org/news/secadv/20141015.txt", "severities": [ { - "value": "Low", + "value": "Moderate", "system": "generic_textual", "scoring_elements": "" } @@ -3763,7 +3763,7 @@ "weaknesses": [] }, { - "unique_content_id": "d615f85fc740c95b6b98e150b56d1ae3", + "unique_content_id": "753342c985991295f308ceffe0455636ac19375dc81d8e311fa5cf1d23473dd5", "summary": "When openssl is built with the no-ssl3 option and a SSL v3 ClientHello is received the ssl method would be set to NULL which could later result in a NULL pointer dereference.", "affected_packages": [ { @@ -3827,7 +3827,7 @@ "weaknesses": [] }, { - "unique_content_id": "16d87492de289b2cbfd7ba3ef7e106fc", + "unique_content_id": "38ffe37c3e05fc10c74d621c2a23b78e2b3238c88a8cec376705a04e80131162", "summary": "An OpenSSL client will accept a handshake using an ephemeral ECDH ciphersuite using an ECDSA certificate if the server key exchange message is omitted. This effectively removes forward secrecy from the ciphersuite.", "affected_packages": [ { @@ -3891,7 +3891,7 @@ "weaknesses": [] }, { - "unique_content_id": "3f5c428c988da21fcf75625d7764c31e", + "unique_content_id": "a7261d54aab29faf70f12bbfbdd3f3e78cf2beebfeb915dbd7a29714a8955fed", "summary": "OpenSSL accepts several non-DER-variations of certificate signature algorithm and signature encodings. OpenSSL also does not enforce a match between the signature algorithm between the signed and unsigned portions of the certificate. By modifying the contents of the signature algorithm or the encoding of the signature, it is possible to change the certificate's fingerprint. This does not allow an attacker to forge certificates, and does not affect certificate verification or OpenSSL servers/clients in any other way. It also does not affect common revocation mechanisms. Only custom applications that rely on the uniqueness of the fingerprint (e.g. certificate blacklists) may be affected.", "affected_packages": [ { @@ -3955,7 +3955,7 @@ "weaknesses": [] }, { - "unique_content_id": "b80715d645997362b4be69a335b46cd5", + "unique_content_id": "bf213d08073d8ca6d471398fe7b23b4ee5111732d9f7976e6ed6740944653e2d", "summary": "A carefully crafted DTLS message can cause a segmentation fault in OpenSSL due to a NULL pointer dereference. This could lead to a Denial Of Service attack.", "affected_packages": [ { @@ -4019,7 +4019,7 @@ "weaknesses": [] }, { - "unique_content_id": "ecbce64df0cdd160db419c6db1cd9dc4", + "unique_content_id": "60f12268a60e39fd28c928e89af1f4038210aff7ad1f1fd748b8968ca65dfbdd", "summary": "An OpenSSL client will accept the use of an RSA temporary key in a non-export RSA key exchange ciphersuite. A server could present a weak temporary key and downgrade the security of the session.", "affected_packages": [ { @@ -4083,9 +4083,21 @@ "weaknesses": [] }, { - "unique_content_id": "14a72a501af8865388558895f94f4719", - "summary": "An OpenSSL server will accept a DH certificate for client authentication without the certificate verify message. This effectively allows a client to authenticate without the use of a private key. This only affects servers which trust a client certificate authority which issues certificates containing DH keys: these are extremely rare and hardly ever encountered.", + "unique_content_id": "4247eafd0646ef018955aac7a30d2c023512a5b5f3a1803427473090a57766e5", + "summary": "Bignum squaring (BN_sqr) may produce incorrect results on some platforms, including x86_64. This bug occurs at random with a very low probability, and is not known to be exploitable in any way, though its exact impact is difficult to determine. The following has been determined: *) The probability of BN_sqr producing an incorrect result at random is very low: 1/2^64 on the single affected 32-bit platform (MIPS) and 1/2^128 on affected 64-bit platforms. *) On most platforms, RSA follows a different code path and RSA operations are not affected at all. For the remaining platforms (e.g. OpenSSL built without assembly support), pre-existing countermeasures thwart bug attacks [1]. *) Static ECDH is theoretically affected: it is possible to construct elliptic curve points that would falsely appear to be on the given curve. However, there is no known computationally feasible way to construct such points with low order, and so the security of static ECDH private keys is believed to be unaffected. *) Other routines known to be theoretically affected are modular exponentiation, primality testing, DSA, RSA blinding, JPAKE and SRP. No exploits are known and straightforward bug attacks fail - either the attacker cannot control when the bug triggers, or no private key material is involved.", "affected_packages": [ + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "0.9.8zd", + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za|0.9.8zb|0.9.8zc" + }, { "package": { "name": "openssl", @@ -4113,9 +4125,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0205", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3570", "severities": [], - "reference_id": "CVE-2015-0205", + "reference_id": "CVE-2014-3570", "reference_type": "" }, { @@ -4135,8 +4147,8 @@ "weaknesses": [] }, { - "unique_content_id": "ae55e9f4f7210581875a2de83cc058ec", - "summary": "A memory leak can occur in the dtls1_buffer_record function under certain conditions. In particular this could occur if an attacker sent repeated DTLS records with the same sequence number but for the next epoch. The memory leak could be exploited by an attacker in a Denial of Service attack through memory exhaustion.", + "unique_content_id": "f5bac2344614e13386f702b70ba31694e5db10133151e5372c410e6fbff702ca", + "summary": "An OpenSSL server will accept a DH certificate for client authentication without the certificate verify message. This effectively allows a client to authenticate without the use of a private key. This only affects servers which trust a client certificate authority which issues certificates containing DH keys: these are extremely rare and hardly ever encountered.", "affected_packages": [ { "package": { @@ -4165,16 +4177,16 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0206", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0205", "severities": [], - "reference_id": "CVE-2015-0206", + "reference_id": "CVE-2015-0205", "reference_type": "" }, { "url": "https://www.openssl.org/news/secadv/20150108.txt", "severities": [ { - "value": "Moderate", + "value": "Low", "system": "generic_textual", "scoring_elements": "" } @@ -4187,21 +4199,9 @@ "weaknesses": [] }, { - "unique_content_id": "f2f9de1344eacac2f17f6642b9655651", - "summary": "Bignum squaring (BN_sqr) may produce incorrect results on some platforms, including x86_64. This bug occurs at random with a very low probability, and is not known to be exploitable in any way, though its exact impact is difficult to determine. The following has been determined: *) The probability of BN_sqr producing an incorrect result at random is very low: 1/2^64 on the single affected 32-bit platform (MIPS) and 1/2^128 on affected 64-bit platforms. *) On most platforms, RSA follows a different code path and RSA operations are not affected at all. For the remaining platforms (e.g. OpenSSL built without assembly support), pre-existing countermeasures thwart bug attacks [1]. *) Static ECDH is theoretically affected: it is possible to construct elliptic curve points that would falsely appear to be on the given curve. However, there is no known computationally feasible way to construct such points with low order, and so the security of static ECDH private keys is believed to be unaffected. *) Other routines known to be theoretically affected are modular exponentiation, primality testing, DSA, RSA blinding, JPAKE and SRP. No exploits are known and straightforward bug attacks fail - either the attacker cannot control when the bug triggers, or no private key material is involved.", + "unique_content_id": "f94fa5bd638308939b95d4d520dd8e57678c3f4709d63229a75fe3868c15446d", + "summary": "A memory leak can occur in the dtls1_buffer_record function under certain conditions. In particular this could occur if an attacker sent repeated DTLS records with the same sequence number but for the next epoch. The memory leak could be exploited by an attacker in a Denial of Service attack through memory exhaustion.", "affected_packages": [ - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "0.9.8zd", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za|0.9.8zb|0.9.8zc" - }, { "package": { "name": "openssl", @@ -4229,16 +4229,16 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-3570", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0206", "severities": [], - "reference_id": "CVE-2014-3570", + "reference_id": "CVE-2015-0206", "reference_type": "" }, { "url": "https://www.openssl.org/news/secadv/20150108.txt", "severities": [ { - "value": "Low", + "value": "Moderate", "system": "generic_textual", "scoring_elements": "" } @@ -4251,7 +4251,7 @@ "weaknesses": [] }, { - "unique_content_id": "fde824bdb24f286066693f15a53a9c11", + "unique_content_id": "751dcb76349de0d4bd85b5a27c52b97bf0f472fc1bd4b3a334c67afd762a0bf1", "summary": "X509_to_X509_REQ NULL pointer deref. The function X509_to_X509_REQ will crash with a NULL pointer dereference if the certificate key is invalid. This function is rarely used in practice.", "affected_packages": [ { @@ -4327,7 +4327,7 @@ "weaknesses": [] }, { - "unique_content_id": "92852e9f71e2d4220063d01c7e871d0f", + "unique_content_id": "ae10e26137b18ce0f074a9e88ad800799cfa131e4c9075c49e5cd736bd4ae7ef", "summary": "Under certain conditions an OpenSSL 1.0.2 client can complete a handshake with an unseeded PRNG. If the handshake succeeds then the client random that has been used will have been generated from a PRNG with insufficient entropy and therefore the output may be predictable.", "affected_packages": [ { @@ -4367,8 +4367,8 @@ "weaknesses": [] }, { - "unique_content_id": "037837042ea4921162841a8a572dedb7", - "summary": "A vulnerability existed in previous versions of OpenSSL related to the processing of base64 encoded data. Any code path that reads base64 data from an untrusted source could be affected (such as the PEM processing routines). Maliciously crafted base 64 data could trigger a segmenation fault or memory corruption.", + "unique_content_id": "23b9fdf2b9a73946210388721d4df0de3a020ac58b1e6669c3696b33a602ec98", + "summary": "Multiblock corrupted pointer. OpenSSL 1.0.2 introduced the \"multiblock\" performance improvement. This feature only applies on 64 bit x86 architecture platforms that support AES NI instructions. A defect in the implementation of \"multiblock\" can cause OpenSSL's internal write buffer to become incorrectly set to NULL when using non-blocking IO. Typically, when the user application is using a socket BIO for writing, this will only result in a failed connection. However if some other BIO is used then it is likely that a segmentation fault will be triggered, thus enabling a potential DoS attack.", "affected_packages": [ { "package": { @@ -4379,20 +4379,48 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.9.8za", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y" + "fixed_version": "1.0.2a", + "affected_version_range": "vers:openssl/1.0.2" + } + ], + "references": [ + { + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0290", + "severities": [], + "reference_id": "CVE-2015-0290", + "reference_type": "" }, { - "package": { - "name": "openssl", + "url": "https://www.openssl.org/news/secadv/20150319.txt", + "severities": [ + { + "value": "Moderate", + "system": "generic_textual", + "scoring_elements": "" + } + ], + "reference_id": "", + "reference_type": "" + } + ], + "date_published": "2015-03-19T00:00:00+00:00", + "weaknesses": [] + }, + { + "unique_content_id": "3964ca62faf5fd2df7ebf079fc420e480621026d27cc10f9de31e2738a05936c", + "summary": "ASN.1 structure reuse memory corruption. Reusing a structure in ASN.1 parsing may allow an attacker to cause memory corruption via an invalid write. Such reuse is and has been strongly discouraged and is believed to be rare.", + "affected_packages": [ + { + "package": { + "name": "openssl", "type": "openssl", "subpath": "", "version": "", "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.0m", - "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l" + "fixed_version": "0.9.8zf", + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za|0.9.8zb|0.9.8zc|0.9.8zd|0.9.8ze" }, { "package": { @@ -4403,37 +4431,21 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.1h", - "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g" - } - ], - "references": [ - { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0292", - "severities": [], - "reference_id": "CVE-2015-0292", - "reference_type": "" + "fixed_version": "1.0.0r", + "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m|1.0.0n|1.0.0o|1.0.0p|1.0.0q" }, { - "url": "https://www.openssl.org/news/secadv/20150319.txt", - "severities": [ - { - "value": "Moderate", - "system": "generic_textual", - "scoring_elements": "" - } - ], - "reference_id": "", - "reference_type": "" - } - ], - "date_published": "2015-03-19T00:00:00+00:00", - "weaknesses": [] - }, - { - "unique_content_id": "5d5cb3ddc2d7d372e96fc9e7eb0e6172", - "summary": "Segmentation fault for invalid PSS parameters. The signature verification routines will crash with a NULL pointer dereference if presented with an ASN.1 signature using the RSA PSS algorithm and invalid parameters. Since these routines are used to verify certificate signature algorithms this can be used to crash any certificate verification operation and exploited in a DoS attack. Any application which performs certificate verification is vulnerable including OpenSSL clients and servers which enable client authentication.", - "affected_packages": [ + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.1m", + "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l" + }, { "package": { "name": "openssl", @@ -4449,9 +4461,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0208", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0287", "severities": [], - "reference_id": "CVE-2015-0208", + "reference_id": "CVE-2015-0287", "reference_type": "" }, { @@ -4471,7 +4483,7 @@ "weaknesses": [] }, { - "unique_content_id": "66636a0c48ff0f39676cc43ff2fad975", + "unique_content_id": "45236a4d12fbe78a8b2d6a428b53a890bddcc1dedee31cb6d41b20af54e9bbb3", "summary": "Segmentation fault in DTLSv1_listen. A defect in the implementation of DTLSv1_listen means that state is preserved in the SSL object from one invocation to the next that can lead to a segmentation fault. Errors processing the initial ClientHello can trigger this scenario. An example of such an error could be that a DTLS1.0 only client is attempting to connect to a DTLS1.2 only server.", "affected_packages": [ { @@ -4511,8 +4523,8 @@ "weaknesses": [] }, { - "unique_content_id": "6b326dde327d1535193796cfd337f305", - "summary": "DoS via reachable assert in SSLv2 servers. A malicious client can trigger an OPENSSL_assert in servers that both support SSLv2 and enable export cipher suites by sending a specially crafted SSLv2 CLIENT-MASTER-KEY message.", + "unique_content_id": "66bbf7b524be1160d1805c966d32418f2dd42b204296d6b885939dafb1ce52f5", + "summary": "PKCS#7 NULL pointer dereference. The PKCS#7 parsing code does not handle missing outer ContentInfo correctly. An attacker can craft malformed ASN.1-encoded PKCS#7 blobs with missing content and trigger a NULL pointer dereference on parsing. Applications that verify PKCS#7 signatures, decrypt PKCS#7 data or otherwise parse PKCS#7 structures from untrusted sources are affected. OpenSSL clients and servers are not affected.", "affected_packages": [ { "package": { @@ -4565,9 +4577,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0293", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0289", "severities": [], - "reference_id": "CVE-2015-0293", + "reference_id": "CVE-2015-0289", "reference_type": "" }, { @@ -4587,45 +4599,9 @@ "weaknesses": [] }, { - "unique_content_id": "7f14c539a7b1d7b62b178e81a164ca57", - "summary": "ASN.1 structure reuse memory corruption. Reusing a structure in ASN.1 parsing may allow an attacker to cause memory corruption via an invalid write. Such reuse is and has been strongly discouraged and is believed to be rare.", + "unique_content_id": "6bdb68d814ff5f69711b93446eb25ddf133d6fbb35bab358bb97b3c423bb5811", + "summary": "ClientHello sigalgs DoS. If a client connects to an OpenSSL 1.0.2 server and renegotiates with an invalid signature algorithms extension a NULL pointer dereference will occur. This can be exploited in a DoS attack against the server.", "affected_packages": [ - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "0.9.8zf", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za|0.9.8zb|0.9.8zc|0.9.8zd|0.9.8ze" - }, - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.0r", - "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m|1.0.0n|1.0.0o|1.0.0p|1.0.0q" - }, - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.1m", - "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l" - }, { "package": { "name": "openssl", @@ -4641,16 +4617,16 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0287", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0291", "severities": [], - "reference_id": "CVE-2015-0287", + "reference_id": "CVE-2015-0291", "reference_type": "" }, { "url": "https://www.openssl.org/news/secadv/20150319.txt", "severities": [ { - "value": "Moderate", + "value": "High", "system": "generic_textual", "scoring_elements": "" } @@ -4663,8 +4639,8 @@ "weaknesses": [] }, { - "unique_content_id": "87c491358b43983d41be3e34f577787f", - "summary": "Empty CKE with client auth and DHE. If client auth is used then a server can seg fault in the event of a DHE ciphersuite being selected and a zero length ClientKeyExchange message being sent by the client. This could be exploited in a DoS attack.", + "unique_content_id": "774c0aaa394ae3ac59c32105e791fec7c71c602f342a042afd485ed819983fc6", + "summary": "Segmentation fault for invalid PSS parameters. The signature verification routines will crash with a NULL pointer dereference if presented with an ASN.1 signature using the RSA PSS algorithm and invalid parameters. Since these routines are used to verify certificate signature algorithms this can be used to crash any certificate verification operation and exploited in a DoS attack. Any application which performs certificate verification is vulnerable including OpenSSL clients and servers which enable client authentication.", "affected_packages": [ { "package": { @@ -4681,9 +4657,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-1787", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0208", "severities": [], - "reference_id": "CVE-2015-1787", + "reference_id": "CVE-2015-0208", "reference_type": "" }, { @@ -4703,8 +4679,8 @@ "weaknesses": [] }, { - "unique_content_id": "8c8ab1d205efac4fa9eeb6888a73d02b", - "summary": "Multiblock corrupted pointer. OpenSSL 1.0.2 introduced the \"multiblock\" performance improvement. This feature only applies on 64 bit x86 architecture platforms that support AES NI instructions. A defect in the implementation of \"multiblock\" can cause OpenSSL's internal write buffer to become incorrectly set to NULL when using non-blocking IO. Typically, when the user application is using a socket BIO for writing, this will only result in a failed connection. However if some other BIO is used then it is likely that a segmentation fault will be triggered, thus enabling a potential DoS attack.", + "unique_content_id": "9928809c0f0a04e7ae6a89ccefdce3eb83e34e047f3470f7778b42182c3b0a3e", + "summary": "A vulnerability existed in previous versions of OpenSSL related to the processing of base64 encoded data. Any code path that reads base64 data from an untrusted source could be affected (such as the PEM processing routines). Maliciously crafted base 64 data could trigger a segmenation fault or memory corruption.", "affected_packages": [ { "package": { @@ -4715,37 +4691,21 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.2a", - "affected_version_range": "vers:openssl/1.0.2" - } - ], - "references": [ - { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0290", - "severities": [], - "reference_id": "CVE-2015-0290", - "reference_type": "" + "fixed_version": "0.9.8za", + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y" }, { - "url": "https://www.openssl.org/news/secadv/20150319.txt", - "severities": [ - { - "value": "Moderate", - "system": "generic_textual", - "scoring_elements": "" - } - ], - "reference_id": "", - "reference_type": "" - } - ], - "date_published": "2015-03-19T00:00:00+00:00", - "weaknesses": [] - }, - { - "unique_content_id": "9c790e8e82381b71bd62ae5a2403aa43", - "summary": "ClientHello sigalgs DoS. If a client connects to an OpenSSL 1.0.2 server and renegotiates with an invalid signature algorithms extension a NULL pointer dereference will occur. This can be exploited in a DoS attack against the server.", - "affected_packages": [ + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.0m", + "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l" + }, { "package": { "name": "openssl", @@ -4755,22 +4715,22 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.2a", - "affected_version_range": "vers:openssl/1.0.2" + "fixed_version": "1.0.1h", + "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0291", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0292", "severities": [], - "reference_id": "CVE-2015-0291", + "reference_id": "CVE-2015-0292", "reference_type": "" }, { "url": "https://www.openssl.org/news/secadv/20150319.txt", "severities": [ { - "value": "High", + "value": "Moderate", "system": "generic_textual", "scoring_elements": "" } @@ -4783,8 +4743,8 @@ "weaknesses": [] }, { - "unique_content_id": "a6996bfe711e793b22ceb3d47c975099", - "summary": "PKCS#7 NULL pointer dereference. The PKCS#7 parsing code does not handle missing outer ContentInfo correctly. An attacker can craft malformed ASN.1-encoded PKCS#7 blobs with missing content and trigger a NULL pointer dereference on parsing. Applications that verify PKCS#7 signatures, decrypt PKCS#7 data or otherwise parse PKCS#7 structures from untrusted sources are affected. OpenSSL clients and servers are not affected.", + "unique_content_id": "d86068f891546989943214dbe20bceca4d29250299395048df1666ebef7ede03", + "summary": "DoS via reachable assert in SSLv2 servers. A malicious client can trigger an OPENSSL_assert in servers that both support SSLv2 and enable export cipher suites by sending a specially crafted SSLv2 CLIENT-MASTER-KEY message.", "affected_packages": [ { "package": { @@ -4837,9 +4797,49 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0289", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0293", "severities": [], - "reference_id": "CVE-2015-0289", + "reference_id": "CVE-2015-0293", + "reference_type": "" + }, + { + "url": "https://www.openssl.org/news/secadv/20150319.txt", + "severities": [ + { + "value": "Moderate", + "system": "generic_textual", + "scoring_elements": "" + } + ], + "reference_id": "", + "reference_type": "" + } + ], + "date_published": "2015-03-19T00:00:00+00:00", + "weaknesses": [] + }, + { + "unique_content_id": "e2e31fceb4d827820c9a6c2c0144827a16d464ad33bd6139cf5e5c7389864c4c", + "summary": "Empty CKE with client auth and DHE. If client auth is used then a server can seg fault in the event of a DHE ciphersuite being selected and a zero length ClientKeyExchange message being sent by the client. This could be exploited in a DoS attack.", + "affected_packages": [ + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.2a", + "affected_version_range": "vers:openssl/1.0.2" + } + ], + "references": [ + { + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-1787", + "severities": [], + "reference_id": "CVE-2015-1787", "reference_type": "" }, { @@ -4859,7 +4859,7 @@ "weaknesses": [] }, { - "unique_content_id": "b91a75f67326a148c90e6ad45ba11839", + "unique_content_id": "f53c9570c9efdac69f3e8300699223b0497562e4c9fb9398fdf2f29ba05efb53", "summary": "Use After Free following d2i_ECPrivatekey error. A malformed EC private key file consumed via the d2i_ECPrivateKey function could cause a use after free condition. This, in turn, could cause a double free in several private key parsing functions (such as d2i_PrivateKey or EVP_PKCS82PKEY) and could lead to a DoS attack or memory corruption for applications that receive EC private keys from untrusted sources. This scenario is considered rare.", "affected_packages": [ { @@ -4935,7 +4935,7 @@ "weaknesses": [] }, { - "unique_content_id": "d0946aba30cf839fdbc468685b6bd683", + "unique_content_id": "fcff8a052ccf49c48dbe7f8d5a88a485de1a213799585647c2124b98ae5ccb52", "summary": "Segmentation fault in ASN1_TYPE_cmp. The function ASN1_TYPE_cmp will crash with an invalid read if an attempt is made to compare ASN.1 boolean types. Since ASN1_TYPE_cmp is used to check certificate signature algorithm consistency this can be used to crash any certificate verification operation and exploited in a DoS attack. Any application which performs certificate verification is vulnerable including OpenSSL clients and servers which enable client authentication.", "affected_packages": [ { @@ -5011,7 +5011,7 @@ "weaknesses": [] }, { - "unique_content_id": "c9cffc6fc71a28da39de00bca06f0ce3", + "unique_content_id": "2ad006bcecf434794b6cafb90c3e60eda8f3465baf0d60adf2eb0547f6075427", "summary": "If a NewSessionTicket is received by a multi-threaded client when attempting to reuse a previous ticket then a race condition can occur potentially leading to a double free of the ticket data.", "affected_packages": [ { @@ -5087,8 +5087,8 @@ "weaknesses": [] }, { - "unique_content_id": "154f6f04f63ee6fba925180ed9e059c1", - "summary": "X509_cmp_time does not properly check the length of the ASN1_TIME string and can read a few bytes out of bounds. In addition, X509_cmp_time accepts an arbitrary number of fractional seconds in the time string. An attacker can use this to craft malformed certificates and CRLs of various sizes and potentially cause a segmentation fault, resulting in a DoS on applications that verify certificates or CRLs. TLS clients that verify CRLs are affected. TLS clients and servers with client authentication enabled may be affected if they use custom verification callbacks.", + "unique_content_id": "0d6ca333ae5301c543aa3d5fee659526e6e7df19d6cd23503b080d44f393be29", + "summary": "The PKCS#7 parsing code does not handle missing inner EncryptedContent correctly. An attacker can craft malformed ASN.1-encoded PKCS#7 blobs with missing content and trigger a NULL pointer dereference on parsing. Applications that decrypt PKCS#7 data or otherwise parse PKCS#7 structures from untrusted sources are affected. OpenSSL clients and servers are not affected.", "affected_packages": [ { "package": { @@ -5141,9 +5141,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-1789", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-1790", "severities": [], - "reference_id": "CVE-2015-1789", + "reference_id": "CVE-2015-1790", "reference_type": "" }, { @@ -5163,8 +5163,8 @@ "weaknesses": [] }, { - "unique_content_id": "1d42619f9d572e6c6f831da1d4b5347c", - "summary": "This vulnerability does not affect current versions of OpenSSL. It existed in previous OpenSSL versions and was fixed in June 2014. If a DTLS peer receives application data between the ChangeCipherSpec and Finished messages, buffering of such data may cause an invalid free, resulting in a segmentation fault or potentially, memory corruption.", + "unique_content_id": "6693aa99959f40abe75da63ee98844b32d6c80ee49cd880d7211f82f39bff9bf", + "summary": "X509_cmp_time does not properly check the length of the ASN1_TIME string and can read a few bytes out of bounds. In addition, X509_cmp_time accepts an arbitrary number of fractional seconds in the time string. An attacker can use this to craft malformed certificates and CRLs of various sizes and potentially cause a segmentation fault, resulting in a DoS on applications that verify certificates or CRLs. TLS clients that verify CRLs are affected. TLS clients and servers with client authentication enabled may be affected if they use custom verification callbacks.", "affected_packages": [ { "package": { @@ -5175,8 +5175,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.9.8za", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y" + "fixed_version": "0.9.8zg", + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za|0.9.8zb|0.9.8zc|0.9.8zd|0.9.8ze|0.9.8zf" }, { "package": { @@ -5187,8 +5187,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.0m", - "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l" + "fixed_version": "1.0.0s", + "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m|1.0.0n|1.0.0o|1.0.0p|1.0.0q|1.0.0r" }, { "package": { @@ -5199,15 +5199,27 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.1h", - "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g" + "fixed_version": "1.0.1n", + "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l|1.0.1m" + }, + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.2b", + "affected_version_range": "vers:openssl/1.0.2|1.0.2a" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-8176", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-1789", "severities": [], - "reference_id": "CVE-2014-8176", + "reference_id": "CVE-2015-1789", "reference_type": "" }, { @@ -5227,8 +5239,8 @@ "weaknesses": [] }, { - "unique_content_id": "2b988a60b7d38da17ad12c1d84455a70", - "summary": "The PKCS#7 parsing code does not handle missing inner EncryptedContent correctly. An attacker can craft malformed ASN.1-encoded PKCS#7 blobs with missing content and trigger a NULL pointer dereference on parsing. Applications that decrypt PKCS#7 data or otherwise parse PKCS#7 structures from untrusted sources are affected. OpenSSL clients and servers are not affected.", + "unique_content_id": "be2ba7ab66a7f53457702397f237a1894566b9d27e7d776969c121a98b0b48c3", + "summary": "When processing an ECParameters structure OpenSSL enters an infinite loop if the curve specified is over a specially malformed binary polynomial field. This can be used to perform denial of service against any system which processes public keys, certificate requests or certificates. This includes TLS clients and TLS servers with client authentication enabled.", "affected_packages": [ { "package": { @@ -5239,8 +5251,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.9.8zg", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za|0.9.8zb|0.9.8zc|0.9.8zd|0.9.8ze|0.9.8zf" + "fixed_version": "0.9.8s", + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r" }, { "package": { @@ -5251,8 +5263,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.0s", - "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m|1.0.0n|1.0.0o|1.0.0p|1.0.0q|1.0.0r" + "fixed_version": "1.0.0e", + "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d" }, { "package": { @@ -5281,9 +5293,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-1790", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-1788", "severities": [], - "reference_id": "CVE-2015-1790", + "reference_id": "CVE-2015-1788", "reference_type": "" }, { @@ -5303,8 +5315,8 @@ "weaknesses": [] }, { - "unique_content_id": "303206c390cb78e168c8425d3c6d2c91", - "summary": "When verifying a signedData message the CMS code can enter an infinite loop if presented with an unknown hash function OID. This can be used to perform denial of service against any system which verifies signedData messages using the CMS code.", + "unique_content_id": "c6cf5f33fdcc803e66a88537cf41831c6b88ce19c6c320843d5c59c63c148c83", + "summary": "This vulnerability does not affect current versions of OpenSSL. It existed in previous OpenSSL versions and was fixed in June 2014. If a DTLS peer receives application data between the ChangeCipherSpec and Finished messages, buffering of such data may cause an invalid free, resulting in a segmentation fault or potentially, memory corruption.", "affected_packages": [ { "package": { @@ -5315,20 +5327,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.9.8zg", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za|0.9.8zb|0.9.8zc|0.9.8zd|0.9.8ze|0.9.8zf" - }, - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.0s", - "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m|1.0.0n|1.0.0o|1.0.0p|1.0.0q|1.0.0r" + "fixed_version": "0.9.8za", + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y" }, { "package": { @@ -5339,8 +5339,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.1n", - "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l|1.0.1m" + "fixed_version": "1.0.0m", + "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l" }, { "package": { @@ -5351,15 +5351,15 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.2b", - "affected_version_range": "vers:openssl/1.0.2|1.0.2a" + "fixed_version": "1.0.1h", + "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-1792", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2014-8176", "severities": [], - "reference_id": "CVE-2015-1792", + "reference_id": "CVE-2014-8176", "reference_type": "" }, { @@ -5379,8 +5379,8 @@ "weaknesses": [] }, { - "unique_content_id": "742341fd7596524c221d7ac8aa8025de", - "summary": "When processing an ECParameters structure OpenSSL enters an infinite loop if the curve specified is over a specially malformed binary polynomial field. This can be used to perform denial of service against any system which processes public keys, certificate requests or certificates. This includes TLS clients and TLS servers with client authentication enabled.", + "unique_content_id": "fbbe723124334c66dbc53652a1a157264900e602a74ed731a2223c212d189f15", + "summary": "When verifying a signedData message the CMS code can enter an infinite loop if presented with an unknown hash function OID. This can be used to perform denial of service against any system which verifies signedData messages using the CMS code.", "affected_packages": [ { "package": { @@ -5391,8 +5391,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.9.8s", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r" + "fixed_version": "0.9.8zg", + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za|0.9.8zb|0.9.8zc|0.9.8zd|0.9.8ze|0.9.8zf" }, { "package": { @@ -5403,8 +5403,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.0e", - "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d" + "fixed_version": "1.0.0s", + "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m|1.0.0n|1.0.0o|1.0.0p|1.0.0q|1.0.0r" }, { "package": { @@ -5433,9 +5433,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-1788", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-1792", "severities": [], - "reference_id": "CVE-2015-1788", + "reference_id": "CVE-2015-1792", "reference_type": "" }, { @@ -5455,7 +5455,7 @@ "weaknesses": [] }, { - "unique_content_id": "78795bf94381c0a1772ed444fb576c91", + "unique_content_id": "77f031f81329fda29782191d97de2003d3b4fadda5cae0ddf20bcd4ba0958c6e", "summary": "An error in the implementation of the alternative certificate chain logic could allow an attacker to cause certain checks on untrusted certificates to be bypassed, such as the CA flag, enabling them to use a valid leaf certificate to act as a CA and \"issue\" an invalid certificate.", "affected_packages": [ { @@ -5507,7 +5507,7 @@ "weaknesses": [] }, { - "unique_content_id": "34e7fc0f12a532fb0e3f133767651b82", + "unique_content_id": "6b2da461b684884127216718edee478e331e8b64439b5c98f36f9284ead68922", "summary": "If a client receives a ServerKeyExchange for an anonymous DH ciphersuite with the value of p set to 0 then a seg fault can occur leading to a possible denial of service attack.", "affected_packages": [ { @@ -5547,8 +5547,8 @@ "weaknesses": [] }, { - "unique_content_id": "3c8cc92c8be75ecbbf22aa5caa33bfa9", - "summary": "The signature verification routines will crash with a NULL pointer dereference if presented with an ASN.1 signature using the RSA PSS algorithm and absent mask generation function parameter. Since these routines are used to verify certificate signature algorithms this can be used to crash any certificate verification operation and exploited in a DoS attack. Any application which performs certificate verification is vulnerable including OpenSSL clients and servers which enable client authentication.", + "unique_content_id": "434c0477ef7b438f9b58ddb4cf5d072f24523f7231cf77fb3d492dc0ae358d03", + "summary": "If PSK identity hints are received by a multi-threaded client then the values are wrongly updated in the parent SSL_CTX structure. This can result in a race condition potentially leading to a double free of the identify hint data.", "affected_packages": [ { "package": { @@ -5559,8 +5559,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.1q", - "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l|1.0.1m|1.0.1n|1.0.1o|1.0.1p" + "fixed_version": "1.0.0t", + "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0h|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m|1.0.0n|1.0.0o|1.0.0p|1.0.0q|1.0.0r|1.0.0s" }, { "package": { @@ -5571,37 +5571,9 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.2e", - "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d" - } - ], - "references": [ - { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-3194", - "severities": [], - "reference_id": "CVE-2015-3194", - "reference_type": "" + "fixed_version": "1.0.1p", + "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l|1.0.1m|1.0.1n|1.0.1o" }, - { - "url": "https://www.openssl.org/news/secadv/20151203.txt", - "severities": [ - { - "value": "Moderate", - "system": "generic_textual", - "scoring_elements": "" - } - ], - "reference_id": "", - "reference_type": "" - } - ], - "date_published": "2015-12-03T00:00:00+00:00", - "weaknesses": [] - }, - { - "unique_content_id": "58623263c1b67d72553e0282afd5d03a", - "summary": "If PSK identity hints are received by a multi-threaded client then the values are wrongly updated in the parent SSL_CTX structure. This can result in a race condition potentially leading to a double free of the identify hint data.", - "affected_packages": [ { "package": { "name": "openssl", @@ -5611,21 +5583,37 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.0t", - "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0h|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m|1.0.0n|1.0.0o|1.0.0p|1.0.0q|1.0.0r|1.0.0s" - }, + "fixed_version": "1.0.2d", + "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c" + } + ], + "references": [ { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.1p", - "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l|1.0.1m|1.0.1n|1.0.1o" + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-3196", + "severities": [], + "reference_id": "CVE-2015-3196", + "reference_type": "" }, + { + "url": "https://www.openssl.org/news/secadv/20151203.txt", + "severities": [ + { + "value": "Low", + "system": "generic_textual", + "scoring_elements": "" + } + ], + "reference_id": "", + "reference_type": "" + } + ], + "date_published": "2015-12-03T00:00:00+00:00", + "weaknesses": [] + }, + { + "unique_content_id": "7c4c38c81c872cfcb7ae77bc45b1a78760ddda5aa1ebf6e061d41443c7a0870a", + "summary": "There is a carry propagating bug in the x86_64 Montgomery squaring procedure. No EC algorithms are affected. Analysis suggests that attacks against RSA and DSA as a result of this defect would be very difficult to perform and are not believed likely. Attacks against DH are considered just feasible (although very difficult) because most of the work necessary to deduce information about a private key may be performed offline. The amount of resources required for such an attack would be very significant and likely only accessible to a limited number of attackers. An attacker would additionally need online access to an unpatched system using the target private key in a scenario with persistent DH parameters and a private key that is shared between multiple clients. For example this can occur by default in OpenSSL DHE based SSL/TLS ciphersuites.", + "affected_packages": [ { "package": { "name": "openssl", @@ -5635,22 +5623,22 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.2d", - "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c" + "fixed_version": "1.0.2e", + "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-3196", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-3193", "severities": [], - "reference_id": "CVE-2015-3196", + "reference_id": "CVE-2015-3193", "reference_type": "" }, { "url": "https://www.openssl.org/news/secadv/20151203.txt", "severities": [ { - "value": "Low", + "value": "Moderate", "system": "generic_textual", "scoring_elements": "" } @@ -5663,7 +5651,7 @@ "weaknesses": [] }, { - "unique_content_id": "aa54b531fb7b90075a099e3d74098089", + "unique_content_id": "7eedb5c223cb23e47aa9ce69cf53869fc975e43e734731737790f5355c57c46f", "summary": "When presented with a malformed X509_ATTRIBUTE structure OpenSSL will leak memory. This structure is used by the PKCS#7 and CMS routines so any application which reads PKCS#7 or CMS data from untrusted sources is affected. SSL/TLS is not affected.", "affected_packages": [ { @@ -5739,48 +5727,8 @@ "weaknesses": [] }, { - "unique_content_id": "c49999301ee8aa01a9ddd428979f0bc4", - "summary": "There is a carry propagating bug in the x86_64 Montgomery squaring procedure. No EC algorithms are affected. Analysis suggests that attacks against RSA and DSA as a result of this defect would be very difficult to perform and are not believed likely. Attacks against DH are considered just feasible (although very difficult) because most of the work necessary to deduce information about a private key may be performed offline. The amount of resources required for such an attack would be very significant and likely only accessible to a limited number of attackers. An attacker would additionally need online access to an unpatched system using the target private key in a scenario with persistent DH parameters and a private key that is shared between multiple clients. For example this can occur by default in OpenSSL DHE based SSL/TLS ciphersuites.", - "affected_packages": [ - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.2e", - "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d" - } - ], - "references": [ - { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-3193", - "severities": [], - "reference_id": "CVE-2015-3193", - "reference_type": "" - }, - { - "url": "https://www.openssl.org/news/secadv/20151203.txt", - "severities": [ - { - "value": "Moderate", - "system": "generic_textual", - "scoring_elements": "" - } - ], - "reference_id": "", - "reference_type": "" - } - ], - "date_published": "2015-12-03T00:00:00+00:00", - "weaknesses": [] - }, - { - "unique_content_id": "a1b7aec7c53c8018f9f0fc9118de71b4", - "summary": "A malicious client can negotiate SSLv2 ciphers that have been disabled on the server and complete SSLv2 handshakes even if all SSLv2 ciphers have been disabled, provided that the SSLv2 protocol was not also disabled via SSL_OP_NO_SSLv2.", + "unique_content_id": "826e677a591d0d5e808454bc70f127fab4629f8ee5c2f16bc03c03740fc52661", + "summary": "The signature verification routines will crash with a NULL pointer dereference if presented with an ASN.1 signature using the RSA PSS algorithm and absent mask generation function parameter. Since these routines are used to verify certificate signature algorithms this can be used to crash any certificate verification operation and exploited in a DoS attack. Any application which performs certificate verification is vulnerable including OpenSSL clients and servers which enable client authentication.", "affected_packages": [ { "package": { @@ -5791,8 +5739,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.1r", - "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l|1.0.1m|1.0.1n|1.0.1o|1.0.1p|1.0.1q" + "fixed_version": "1.0.1q", + "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l|1.0.1m|1.0.1n|1.0.1o|1.0.1p" }, { "package": { @@ -5803,22 +5751,22 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.2f", - "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d|1.0.2e" + "fixed_version": "1.0.2e", + "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-3197", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-3194", "severities": [], - "reference_id": "CVE-2015-3197", + "reference_id": "CVE-2015-3194", "reference_type": "" }, { - "url": "https://www.openssl.org/news/secadv/20160128.txt", + "url": "https://www.openssl.org/news/secadv/20151203.txt", "severities": [ { - "value": "Low", + "value": "Moderate", "system": "generic_textual", "scoring_elements": "" } @@ -5827,11 +5775,11 @@ "reference_type": "" } ], - "date_published": "2016-01-28T00:00:00+00:00", + "date_published": "2015-12-03T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "bb0ba32b691bb5c4273824bad2f457a9", + "unique_content_id": "202a2aec8d017aab9c615cfdaf94d9a7137c18c8e41c2d999025759310199b81", "summary": "Historically OpenSSL usually only ever generated DH parameters based on \"safe\" primes. More recently (in version 1.0.2) support was provided for generating X9.42 style parameter files such as those required for RFC 5114 support. The primes used in such files may not be \"safe\". Where an application is using DH configured with parameters based on primes that are not \"safe\" then an attacker could use this fact to find a peer's private DH exponent. This attack requires that the attacker complete multiple handshakes in which the peer uses the same private DH exponent. For example this could be used to discover a TLS server's private DH exponent if it's reusing the private DH exponent or it's using a static DH ciphersuite. OpenSSL provides the option SSL_OP_SINGLE_DH_USE for ephemeral DH (DHE) in TLS. It is not on by default. If the option is not set then the server reuses the same private DH exponent for the life of the server process and would be vulnerable to this attack. It is believed that many popular applications do set this option and would therefore not be at risk. OpenSSL before 1.0.2f will reuse the key if: - SSL_CTX_set_tmp_dh()/SSL_set_tmp_dh() is used and SSL_OP_SINGLE_DH_USE is not set. - SSL_CTX_set_tmp_dh_callback()/SSL_set_tmp_dh_callback() is used, and both the parameters and the key are set and SSL_OP_SINGLE_DH_USE is not used. This is an undocumted feature and parameter files don't contain the key. - Static DH ciphersuites are used. The key is part of the certificate and so it will always reuse it. This is only supported in 1.0.2. It will not reuse the key for DHE ciphers suites if: - SSL_OP_SINGLE_DH_USE is set - SSL_CTX_set_tmp_dh_callback()/SSL_set_tmp_dh_callback() is used and the callback does not provide the key, only the parameters. The callback is almost always used like this. Non-safe primes are generated by OpenSSL when using: - genpkey with the dh_rfc5114 option. This will write an X9.42 style file including the prime-order subgroup size \"q\". This is supported since the 1.0.2 version. Older versions can't read files generated in this way. - dhparam with the -dsaparam option. This has always been documented as requiring the single use. The fix for this issue adds an additional check where a \"q\" parameter is available (as is the case in X9.42 based parameters). This detects the only known attack, and is the only possible defense for static DH ciphersuites. This could have some performance impact. Additionally the SSL_OP_SINGLE_DH_USE option has been switched on by default and cannot be disabled. This could have some performance impact.", "affected_packages": [ { @@ -5871,8 +5819,8 @@ "weaknesses": [] }, { - "unique_content_id": "1e32ac05e706f05b60d0c367814faf5b", - "summary": "A double free bug was discovered when OpenSSL parses malformed DSA private keys and could lead to a DoS attack or memory corruption for applications that receive DSA private keys from untrusted sources. This scenario is considered rare.", + "unique_content_id": "582bb190d8800ea86907f44769c22a29e8f34079c0b2ce5e09052db99707480b", + "summary": "A malicious client can negotiate SSLv2 ciphers that have been disabled on the server and complete SSLv2 handshakes even if all SSLv2 ciphers have been disabled, provided that the SSLv2 protocol was not also disabled via SSL_OP_NO_SSLv2.", "affected_packages": [ { "package": { @@ -5883,8 +5831,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.1s", - "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l|1.0.1m|1.0.1n|1.0.1o|1.0.1p|1.0.1q|1.0.1r" + "fixed_version": "1.0.1r", + "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l|1.0.1m|1.0.1n|1.0.1o|1.0.1p|1.0.1q" }, { "package": { @@ -5895,19 +5843,19 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.2g", - "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d|1.0.2e|1.0.2f" + "fixed_version": "1.0.2f", + "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d|1.0.2e" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-0705", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-3197", "severities": [], - "reference_id": "CVE-2016-0705", + "reference_id": "CVE-2015-3197", "reference_type": "" }, { - "url": "https://www.openssl.org/news/secadv/20160301.txt", + "url": "https://www.openssl.org/news/secadv/20160128.txt", "severities": [ { "value": "Low", @@ -5919,11 +5867,11 @@ "reference_type": "" } ], - "date_published": "2016-03-01T00:00:00+00:00", + "date_published": "2016-01-28T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "356419ba58928dd92651de3bd8726759", + "unique_content_id": "0dc285b8adde395581c94e422ef09ae80752d8b5b7e8177bee2bd05a9044f07c", "summary": "This issue only affected versions of OpenSSL prior to March 19th 2015 at which time the code was refactored to address vulnerability CVE-2015-0293. s2_srvr.c did not enforce that clear-key-length is 0 for non-export ciphers. If clear-key bytes are present for these ciphers, they *displace* encrypted-key bytes. This leads to an efficient divide-and-conquer key recovery attack: if an eavesdropper has intercepted an SSLv2 handshake, they can use the server as an oracle to determine the SSLv2 master-key, using only 16 connections to the server and negligible computation. More importantly, this leads to a more efficient version of DROWN that is effective against non-export ciphersuites, and requires no significant computation.", "affected_packages": [ { @@ -5999,7 +5947,7 @@ "weaknesses": [] }, { - "unique_content_id": "4f983dc0849c0739895c99ff8042ef0f", + "unique_content_id": "3bb968a563522f059f423c3561dfed5d17a6d5c4d6bd3d1715133146dcc94142", "summary": "A cross-protocol attack was discovered that could lead to decryption of TLS sessions by using a server supporting SSLv2 and EXPORT cipher suites as a Bleichenbacher RSA padding oracle. Note that traffic between clients and non-vulnerable servers can be decrypted provided another server supporting SSLv2 and EXPORT ciphers (even with a different protocol such as SMTP, IMAP or POP) shares the RSA keys of the non-vulnerable server. This vulnerability is known as DROWN (CVE-2016-0800). Recovering one session key requires the attacker to perform approximately 2^50 computation, as well as thousands of connections to the affected server. A more efficient variant of the DROWN attack exists against unpatched OpenSSL servers using versions that predate 1.0.2a, 1.0.1m, 1.0.0r and 0.9.8zf released on 19/Mar/2015 (see CVE-2016-0703 below). Users can avoid this issue by disabling the SSLv2 protocol in all their SSL/TLS servers, if they've not done so already. Disabling all SSLv2 ciphers is also sufficient, provided the patches for CVE-2015-3197 (fixed in OpenSSL 1.0.1r and 1.0.2f) have been deployed. Servers that have not disabled the SSLv2 protocol, and are not patched for CVE-2015-3197 are vulnerable to DROWN even if all SSLv2 ciphers are nominally disabled, because malicious clients can force the use of SSLv2 with EXPORT ciphers. OpenSSL 1.0.2g and 1.0.1s deploy the following mitigation against DROWN: SSLv2 is now by default disabled at build-time. Builds that are not configured with \"enable-ssl2\" will not support SSLv2. Even if \"enable-ssl2\" is used, users who want to negotiate SSLv2 via the version-flexible SSLv23_method() will need to explicitly call either of: SSL_CTX_clear_options(ctx, SSL_OP_NO_SSLv2); or SSL_clear_options(ssl, SSL_OP_NO_SSLv2); as appropriate. Even if either of those is used, or the application explicitly uses the version-specific SSLv2_method() or its client or server variants, SSLv2 ciphers vulnerable to exhaustive search key recovery have been removed. Specifically, the SSLv2 40-bit EXPORT ciphers, and SSLv2 56-bit DES are no longer available. In addition, weak ciphers in SSLv3 and up are now disabled in default builds of OpenSSL. Builds that are not configured with \"enable-weak-ssl-ciphers\" will not provide any \"EXPORT\" or \"LOW\" strength ciphers.", "affected_packages": [ { @@ -6051,8 +5999,8 @@ "weaknesses": [] }, { - "unique_content_id": "5115d9fca6da89c0f09b18c66063043e", - "summary": "A side-channel attack was found which makes use of cache-bank conflicts on the Intel Sandy-Bridge microarchitecture which could lead to the recovery of RSA keys. The ability to exploit this issue is limited as it relies on an attacker who has control of code in a thread running on the same hyper-threaded core as the victim thread which is performing decryptions.", + "unique_content_id": "54d1b0ccbb4b663c9a43e3d2a6be131b6b5a0413fbb5f22cee822ed6936d94fe", + "summary": "In the BN_hex2bn function the number of hex digits is calculated using an int value |i|. Later |bn_expand| is called with a value of |i * 4|. For large values of |i| this can result in |bn_expand| not allocating any memory because |i * 4| is negative. This can leave the internal BIGNUM data field as NULL leading to a subsequent NULL ptr deref. For very large values of |i|, the calculation |i * 4| could be a positive value smaller than |i|. In this case memory is allocated to the internal BIGNUM data field, but it is insufficiently sized leading to heap corruption. A similar issue exists in BN_dec2bn. This could have security consequences if BN_hex2bn/BN_dec2bn is ever called by user applications with very large untrusted hex/dec data. This is anticipated to be a rare occurrence. All OpenSSL internal usage of these functions use data that is not expected to be untrusted, e.g. config file data or application command line arguments. If user developed applications generate config file data based on untrusted data then it is possible that this could also lead to security consequences. This is also anticipated to be rare.", "affected_packages": [ { "package": { @@ -6081,9 +6029,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-0702", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-0797", "severities": [], - "reference_id": "CVE-2016-0702", + "reference_id": "CVE-2016-0797", "reference_type": "" }, { @@ -6103,7 +6051,7 @@ "weaknesses": [] }, { - "unique_content_id": "56718964514021ad2571d5e9bb4e1ba9", + "unique_content_id": "5cee408201ad50518a04c7597ae547e01069a3e3a71411bb5d03665d395c9c3f", "summary": "The SRP user database lookup method SRP_VBASE_get_by_user had confusing memory management semantics; the returned pointer was sometimes newly allocated, and sometimes owned by the callee. The calling code has no way of distinguishing these two cases. Specifically, SRP servers that configure a secret seed to hide valid login information are vulnerable to a memory leak: an attacker connecting with an invalid username can cause a memory leak of around 300 bytes per connection. Servers that do not configure SRP, or configure SRP but do not configure a seed are not vulnerable. In Apache, the seed directive is known as SSLSRPUnknownUserSeed. To mitigate the memory leak, the seed handling in SRP_VBASE_get_by_user is now disabled even if the user has configured a seed. Applications are advised to migrate to SRP_VBASE_get1_by_user. However, note that OpenSSL makes no strong guarantees about the indistinguishability of valid and invalid logins. In particular, computations are currently not carried out in constant time.", "affected_packages": [ { @@ -6155,7 +6103,7 @@ "weaknesses": [] }, { - "unique_content_id": "65ffc54cdd6e37ee324ff207835500d6", + "unique_content_id": "84fcbdaee2028d10d0a154f4562e0212135d6cce3bfd9eda6b933c8e302f6351", "summary": "The internal |fmtstr| function used in processing a \"%s\" format string in the BIO_*printf functions could overflow while calculating the length of a string and cause an OOB read when printing very long strings. Additionally the internal |doapr_outch| function can attempt to write to an OOB memory location (at an offset from the NULL pointer) in the event of a memory allocation failure. In 1.0.2 and below this could be caused where the size of a buffer to be allocated is greater than INT_MAX. E.g. this could be in processing a very long \"%s\" format string. Memory leaks can also occur. The first issue may mask the second issue dependent on compiler behaviour. These problems could enable attacks where large amounts of untrusted data is passed to the BIO_*printf functions. If applications use these functions in this way then they could be vulnerable. OpenSSL itself uses these functions when printing out human-readable dumps of ASN.1 data. Therefore applications that print this data could be vulnerable if the data is from untrusted sources. OpenSSL command line applications could also be vulnerable where they print out ASN.1 data, or if untrusted data is passed as command line arguments. Libssl is not considered directly vulnerable. Additionally certificates etc received via remote connections via libssl are also unlikely to be able to trigger these issues because of message size limits enforced within libssl.", "affected_packages": [ { @@ -6207,8 +6155,8 @@ "weaknesses": [] }, { - "unique_content_id": "ca04670f15a036f2d20611d996b2e03d", - "summary": "This issue only affected versions of OpenSSL prior to March 19th 2015 at which time the code was refactored to address the vulnerability CVE-2015-0293. s2_srvr.c overwrite the wrong bytes in the master-key when applying Bleichenbacher protection for export cipher suites. This provides a Bleichenbacher oracle, and could potentially allow more efficient variants of the DROWN attack.", + "unique_content_id": "8f5d81b6201854025eba1228dc3dbb1562bdefdd101afb131581d6c49722d872", + "summary": "A double free bug was discovered when OpenSSL parses malformed DSA private keys and could lead to a DoS attack or memory corruption for applications that receive DSA private keys from untrusted sources. This scenario is considered rare.", "affected_packages": [ { "package": { @@ -6219,8 +6167,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "0.9.8zf", - "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za|0.9.8zb|0.9.8zc|0.9.8zd|0.9.8ze" + "fixed_version": "1.0.1s", + "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l|1.0.1m|1.0.1n|1.0.1o|1.0.1p|1.0.1q|1.0.1r" }, { "package": { @@ -6231,9 +6179,37 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.0r", - "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m|1.0.0n|1.0.0o|1.0.0p|1.0.0q" + "fixed_version": "1.0.2g", + "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d|1.0.2e|1.0.2f" + } + ], + "references": [ + { + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-0705", + "severities": [], + "reference_id": "CVE-2016-0705", + "reference_type": "" }, + { + "url": "https://www.openssl.org/news/secadv/20160301.txt", + "severities": [ + { + "value": "Low", + "system": "generic_textual", + "scoring_elements": "" + } + ], + "reference_id": "", + "reference_type": "" + } + ], + "date_published": "2016-03-01T00:00:00+00:00", + "weaknesses": [] + }, + { + "unique_content_id": "c2f87f5ea625ae3e87ab3a3ec82e47995b16601835ef7be500414932928f3c69", + "summary": "A side-channel attack was found which makes use of cache-bank conflicts on the Intel Sandy-Bridge microarchitecture which could lead to the recovery of RSA keys. The ability to exploit this issue is limited as it relies on an attacker who has control of code in a thread running on the same hyper-threaded core as the victim thread which is performing decryptions.", + "affected_packages": [ { "package": { "name": "openssl", @@ -6243,8 +6219,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.1m", - "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l" + "fixed_version": "1.0.1s", + "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l|1.0.1m|1.0.1n|1.0.1o|1.0.1p|1.0.1q|1.0.1r" }, { "package": { @@ -6255,22 +6231,22 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.2a", - "affected_version_range": "vers:openssl/1.0.2" + "fixed_version": "1.0.2g", + "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d|1.0.2e|1.0.2f" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-0704", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-0702", "severities": [], - "reference_id": "CVE-2016-0704", + "reference_id": "CVE-2016-0702", "reference_type": "" }, { "url": "https://www.openssl.org/news/secadv/20160301.txt", "severities": [ { - "value": "Moderate", + "value": "Low", "system": "generic_textual", "scoring_elements": "" } @@ -6283,8 +6259,8 @@ "weaknesses": [] }, { - "unique_content_id": "dcfad5e453c456b47b7dcb85f3bbf948", - "summary": "In the BN_hex2bn function the number of hex digits is calculated using an int value |i|. Later |bn_expand| is called with a value of |i * 4|. For large values of |i| this can result in |bn_expand| not allocating any memory because |i * 4| is negative. This can leave the internal BIGNUM data field as NULL leading to a subsequent NULL ptr deref. For very large values of |i|, the calculation |i * 4| could be a positive value smaller than |i|. In this case memory is allocated to the internal BIGNUM data field, but it is insufficiently sized leading to heap corruption. A similar issue exists in BN_dec2bn. This could have security consequences if BN_hex2bn/BN_dec2bn is ever called by user applications with very large untrusted hex/dec data. This is anticipated to be a rare occurrence. All OpenSSL internal usage of these functions use data that is not expected to be untrusted, e.g. config file data or application command line arguments. If user developed applications generate config file data based on untrusted data then it is possible that this could also lead to security consequences. This is also anticipated to be rare.", + "unique_content_id": "e7bba3f95fb4b39e7b5f6a6297935e8cfcadbbabda552ee1b06e65e9282ab672", + "summary": "This issue only affected versions of OpenSSL prior to March 19th 2015 at which time the code was refactored to address the vulnerability CVE-2015-0293. s2_srvr.c overwrite the wrong bytes in the master-key when applying Bleichenbacher protection for export cipher suites. This provides a Bleichenbacher oracle, and could potentially allow more efficient variants of the DROWN attack.", "affected_packages": [ { "package": { @@ -6295,8 +6271,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.1s", - "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l|1.0.1m|1.0.1n|1.0.1o|1.0.1p|1.0.1q|1.0.1r" + "fixed_version": "0.9.8zf", + "affected_version_range": "vers:openssl/0.9.8|0.9.8a|0.9.8b|0.9.8c|0.9.8d|0.9.8e|0.9.8f|0.9.8g|0.9.8h|0.9.8i|0.9.8j|0.9.8k|0.9.8l|0.9.8m|0.9.8n|0.9.8o|0.9.8p|0.9.8q|0.9.8r|0.9.8s|0.9.8t|0.9.8u|0.9.8v|0.9.8w|0.9.8x|0.9.8y|0.9.8za|0.9.8zb|0.9.8zc|0.9.8zd|0.9.8ze" }, { "package": { @@ -6307,22 +6283,46 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.2g", - "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d|1.0.2e|1.0.2f" + "fixed_version": "1.0.0r", + "affected_version_range": "vers:openssl/1.0.0|1.0.0a|1.0.0b|1.0.0c|1.0.0d|1.0.0e|1.0.0f|1.0.0g|1.0.0i|1.0.0j|1.0.0k|1.0.0l|1.0.0m|1.0.0n|1.0.0o|1.0.0p|1.0.0q" + }, + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.1m", + "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l" + }, + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.2a", + "affected_version_range": "vers:openssl/1.0.2" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-0797", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-0704", "severities": [], - "reference_id": "CVE-2016-0797", + "reference_id": "CVE-2016-0704", "reference_type": "" }, { "url": "https://www.openssl.org/news/secadv/20160301.txt", "severities": [ { - "value": "Low", + "value": "Moderate", "system": "generic_textual", "scoring_elements": "" } @@ -6335,8 +6335,8 @@ "weaknesses": [] }, { - "unique_content_id": "0d33c0311add27a6e1a49d7a3d965c38", - "summary": "When ASN.1 data is read from a BIO using functions such as d2i_CMS_bio() a short invalid encoding can casuse allocation of large amounts of memory potentially consuming excessive resources or exhausting memory. Any application parsing untrusted data through d2i BIO functions is affected. The memory based functions such as d2i_X509() are *not* affected. Since the memory based functions are used by the TLS library, TLS applications are not affected.", + "unique_content_id": "2455be4d3319416de5807835a5e13cb7a40a862fcb21503efe33c9b0836132bc", + "summary": "This issue affected versions of OpenSSL prior to April 2015. The bug causing the vulnerability was fixed on April 18th 2015, and released as part of the June 11th 2015 security releases. The security impact of the bug was not known at the time. In previous versions of OpenSSL, ASN.1 encoding the value zero represented as a negative integer can cause a buffer underflow with an out-of-bounds write in i2c_ASN1_INTEGER. The ASN.1 parser does not normally create \"negative zeroes\" when parsing ASN.1 input, and therefore, an attacker cannot trigger this bug. However, a second, independent bug revealed that the ASN.1 parser (specifically, d2i_ASN1_TYPE) can misinterpret a large universal tag as a negative zero value. Large universal tags are not present in any common ASN.1 structures (such as X509) but are accepted as part of ANY structures. Therefore, if an application deserializes untrusted ASN.1 structures containing an ANY field, and later reserializes them, an attacker may be able to trigger an out-of-bounds write. This has been shown to cause memory corruption that is potentially exploitable with some malloc implementations. Applications that parse and re-encode X509 certificates are known to be vulnerable. Applications that verify RSA signatures on X509 certificates may also be vulnerable; however, only certificates with valid signatures trigger ASN.1 re-encoding and hence the bug. Specifically, since OpenSSL's default TLS X509 chain verification code verifies the certificate chain from root to leaf, TLS handshakes could only be targeted with valid certificates issued by trusted Certification Authorities.", "affected_packages": [ { "package": { @@ -6347,8 +6347,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.1t", - "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l|1.0.1m|1.0.1n|1.0.1o|1.0.1p|1.0.1q|1.0.1r|1.0.1s" + "fixed_version": "1.0.1o", + "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l|1.0.1m|1.0.1n" }, { "package": { @@ -6359,22 +6359,22 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.2h", - "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d|1.0.2e|1.0.2f|1.0.2g" + "fixed_version": "1.0.2c", + "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-2109", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-2108", "severities": [], - "reference_id": "CVE-2016-2109", + "reference_id": "CVE-2016-2108", "reference_type": "" }, { "url": "https://www.openssl.org/news/secadv/20160503.txt", "severities": [ { - "value": "Low", + "value": "High", "system": "generic_textual", "scoring_elements": "" } @@ -6387,8 +6387,8 @@ "weaknesses": [] }, { - "unique_content_id": "6646efbc2c3440a5aaedd5479df16fe0", - "summary": "An overflow can occur in the EVP_EncryptUpdate() function. If an attacker is able to supply very large amounts of input data after a previous call to EVP_EncryptUpdate() with a partial block then a length check can overflow resulting in a heap corruption. Following an analysis of all OpenSSL internal usage of the EVP_EncryptUpdate() function all usage is one of two forms. The first form is where the EVP_EncryptUpdate() call is known to be the first called function after an EVP_EncryptInit(), and therefore that specific call must be safe. The second form is where the length passed to EVP_EncryptUpdate() can be seen from the code to be some small value and therefore there is no possibility of an overflow. Since all instances are one of these two forms, it is believed that there can be no overflows in internal code due to this problem. It should be noted that EVP_DecryptUpdate() can call EVP_EncryptUpdate() in certain code paths. Also EVP_CipherUpdate() is a synonym for EVP_EncryptUpdate(). All instances of these calls have also been analysed too and it is believed there are no instances in internal usage where an overflow could occur. This could still represent a security issue for end user code that calls this function directly.", + "unique_content_id": "3eadfec35b5b88ba68ecb0d97d2cba4203556ca8be6c28566ff28d045dbeeaba", + "summary": "An overflow can occur in the EVP_EncodeUpdate() function which is used for Base64 encoding of binary data. If an attacker is able to supply very large amounts of input data then a length check can overflow resulting in a heap corruption. Internally to OpenSSL the EVP_EncodeUpdate() function is primarly used by the PEM_write_bio* family of functions. These are mainly used within the OpenSSL command line applications. These internal uses are not considered vulnerable because all calls are bounded with length checks so no overflow is possible. User applications that call these APIs directly with large amounts of untrusted data may be vulnerable. (Note: Initial analysis suggested that the PEM_write_bio* were vulnerable, and this is reflected in the patch commit message. This is no longer believed to be the case).", "affected_packages": [ { "package": { @@ -6417,9 +6417,9 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-2106", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-2105", "severities": [], - "reference_id": "CVE-2016-2106", + "reference_id": "CVE-2016-2105", "reference_type": "" }, { @@ -6439,8 +6439,8 @@ "weaknesses": [] }, { - "unique_content_id": "80621d002083a0f1c1d9267b2575c2af", - "summary": "An overflow can occur in the EVP_EncodeUpdate() function which is used for Base64 encoding of binary data. If an attacker is able to supply very large amounts of input data then a length check can overflow resulting in a heap corruption. Internally to OpenSSL the EVP_EncodeUpdate() function is primarly used by the PEM_write_bio* family of functions. These are mainly used within the OpenSSL command line applications. These internal uses are not considered vulnerable because all calls are bounded with length checks so no overflow is possible. User applications that call these APIs directly with large amounts of untrusted data may be vulnerable. (Note: Initial analysis suggested that the PEM_write_bio* were vulnerable, and this is reflected in the patch commit message. This is no longer believed to be the case).", + "unique_content_id": "45c33cd5992b2f757ade809ec1b55e35aed7fa0d57bb8b46c8f7ab46d4cf5d81", + "summary": "A MITM attacker can use a padding oracle attack to decrypt traffic when the connection uses an AES CBC cipher and the server support AES-NI. This issue was introduced as part of the fix for Lucky 13 padding attack (CVE-2013-0169). The padding check was rewritten to be in constant time by making sure that always the same bytes are read and compared against either the MAC or padding bytes. But it no longer checked that there was enough data to have both the MAC and padding bytes.", "affected_packages": [ { "package": { @@ -6469,16 +6469,22 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-2105", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-2107", "severities": [], - "reference_id": "CVE-2016-2105", + "reference_id": "CVE-2016-2107", + "reference_type": "" + }, + { + "url": "https://github.com/openssl/openssl/commit/68595c0c2886e7942a14f98c17a55a88afb6c292", + "severities": [], + "reference_id": "", "reference_type": "" }, { "url": "https://www.openssl.org/news/secadv/20160503.txt", "severities": [ { - "value": "Low", + "value": "High", "system": "generic_textual", "scoring_elements": "" } @@ -6491,7 +6497,7 @@ "weaknesses": [] }, { - "unique_content_id": "9448f7ccc33194fa36bbdb2f40e749b2", + "unique_content_id": "79d98ea5b970167fc32b2dd513af82c8f21fc88f5863ff80c40bf92a86567dc8", "summary": "ASN1 Strings that are over 1024 bytes can cause an overread in applications using the X509_NAME_oneline() function on EBCDIC systems. This could result in arbitrary stack data being returned in the buffer.", "affected_packages": [ { @@ -6543,8 +6549,8 @@ "weaknesses": [] }, { - "unique_content_id": "eaa2fce419eaf5b4ea668e9106c1fd43", - "summary": "A MITM attacker can use a padding oracle attack to decrypt traffic when the connection uses an AES CBC cipher and the server support AES-NI. This issue was introduced as part of the fix for Lucky 13 padding attack (CVE-2013-0169). The padding check was rewritten to be in constant time by making sure that always the same bytes are read and compared against either the MAC or padding bytes. But it no longer checked that there was enough data to have both the MAC and padding bytes.", + "unique_content_id": "86acb94d7c04bbcbd8c25c43ae292bd04c94a03e34fdc267053638c248e0b7f3", + "summary": "When ASN.1 data is read from a BIO using functions such as d2i_CMS_bio() a short invalid encoding can casuse allocation of large amounts of memory potentially consuming excessive resources or exhausting memory. Any application parsing untrusted data through d2i BIO functions is affected. The memory based functions such as d2i_X509() are *not* affected. Since the memory based functions are used by the TLS library, TLS applications are not affected.", "affected_packages": [ { "package": { @@ -6573,22 +6579,16 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-2107", - "severities": [], - "reference_id": "CVE-2016-2107", - "reference_type": "" - }, - { - "url": "https://github.com/openssl/openssl/commit/68595c0c2886e7942a14f98c17a55a88afb6c292", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-2109", "severities": [], - "reference_id": "", + "reference_id": "CVE-2016-2109", "reference_type": "" }, { "url": "https://www.openssl.org/news/secadv/20160503.txt", "severities": [ { - "value": "High", + "value": "Low", "system": "generic_textual", "scoring_elements": "" } @@ -6601,8 +6601,8 @@ "weaknesses": [] }, { - "unique_content_id": "eadc3ef5343caffdb16fc7a845983d99", - "summary": "This issue affected versions of OpenSSL prior to April 2015. The bug causing the vulnerability was fixed on April 18th 2015, and released as part of the June 11th 2015 security releases. The security impact of the bug was not known at the time. In previous versions of OpenSSL, ASN.1 encoding the value zero represented as a negative integer can cause a buffer underflow with an out-of-bounds write in i2c_ASN1_INTEGER. The ASN.1 parser does not normally create \"negative zeroes\" when parsing ASN.1 input, and therefore, an attacker cannot trigger this bug. However, a second, independent bug revealed that the ASN.1 parser (specifically, d2i_ASN1_TYPE) can misinterpret a large universal tag as a negative zero value. Large universal tags are not present in any common ASN.1 structures (such as X509) but are accepted as part of ANY structures. Therefore, if an application deserializes untrusted ASN.1 structures containing an ANY field, and later reserializes them, an attacker may be able to trigger an out-of-bounds write. This has been shown to cause memory corruption that is potentially exploitable with some malloc implementations. Applications that parse and re-encode X509 certificates are known to be vulnerable. Applications that verify RSA signatures on X509 certificates may also be vulnerable; however, only certificates with valid signatures trigger ASN.1 re-encoding and hence the bug. Specifically, since OpenSSL's default TLS X509 chain verification code verifies the certificate chain from root to leaf, TLS handshakes could only be targeted with valid certificates issued by trusted Certification Authorities.", + "unique_content_id": "fb73586b842fb010ced45dc708d8346e3aded542fe78c11f03f83bf754997edd", + "summary": "An overflow can occur in the EVP_EncryptUpdate() function. If an attacker is able to supply very large amounts of input data after a previous call to EVP_EncryptUpdate() with a partial block then a length check can overflow resulting in a heap corruption. Following an analysis of all OpenSSL internal usage of the EVP_EncryptUpdate() function all usage is one of two forms. The first form is where the EVP_EncryptUpdate() call is known to be the first called function after an EVP_EncryptInit(), and therefore that specific call must be safe. The second form is where the length passed to EVP_EncryptUpdate() can be seen from the code to be some small value and therefore there is no possibility of an overflow. Since all instances are one of these two forms, it is believed that there can be no overflows in internal code due to this problem. It should be noted that EVP_DecryptUpdate() can call EVP_EncryptUpdate() in certain code paths. Also EVP_CipherUpdate() is a synonym for EVP_EncryptUpdate(). All instances of these calls have also been analysed too and it is believed there are no instances in internal usage where an overflow could occur. This could still represent a security issue for end user code that calls this function directly.", "affected_packages": [ { "package": { @@ -6613,8 +6613,8 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.1o", - "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l|1.0.1m|1.0.1n" + "fixed_version": "1.0.1t", + "affected_version_range": "vers:openssl/1.0.1|1.0.1a|1.0.1b|1.0.1c|1.0.1d|1.0.1e|1.0.1f|1.0.1g|1.0.1h|1.0.1i|1.0.1j|1.0.1k|1.0.1l|1.0.1m|1.0.1n|1.0.1o|1.0.1p|1.0.1q|1.0.1r|1.0.1s" }, { "package": { @@ -6625,22 +6625,22 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.0.2c", - "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b" + "fixed_version": "1.0.2h", + "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d|1.0.2e|1.0.2f|1.0.2g" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-2108", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-2106", "severities": [], - "reference_id": "CVE-2016-2108", + "reference_id": "CVE-2016-2106", "reference_type": "" }, { "url": "https://www.openssl.org/news/secadv/20160503.txt", "severities": [ { - "value": "High", + "value": "Low", "system": "generic_textual", "scoring_elements": "" } @@ -6653,7 +6653,7 @@ "weaknesses": [] }, { - "unique_content_id": "4c10365eacf49048d2ca1f3d490de4c2", + "unique_content_id": "7cf78e4965685dd994d47b5b4648c8671c19b75994e4b56ad143255738c4b716", "summary": "Avoid some undefined pointer arithmetic A common idiom in the codebase is to check limits in the following manner: \"p + len > limit\" Where \"p\" points to some malloc'd data of SIZE bytes and limit == p + SIZE \"len\" here could be from some externally supplied data (e.g. from a TLS message). The rules of C pointer arithmetic are such that \"p + len\" is only well defined where len <= SIZE. Therefore the above idiom is actually undefined behaviour. For example this could cause problems if some malloc implementation provides an address for \"p\" such that \"p + len\" actually overflows for values of len that are too big and therefore p + len < limit.", "affected_packages": [ { @@ -6705,7 +6705,7 @@ "weaknesses": [] }, { - "unique_content_id": "69c98b0d04f2bf1a2d1f044b54108625", + "unique_content_id": "41d1b686cc25b51e538b1294c03f9bd49194604c0a5b1878a85ef935c82f0573", "summary": "Operations in the DSA signing algorithm should run in constant time in order to avoid side channel attacks. A flaw in the OpenSSL DSA implementation means that a non-constant time codepath is followed for certain operations. This has been demonstrated through a cache-timing attack to be sufficient for an attacker to recover the private DSA key.", "affected_packages": [ { @@ -6757,7 +6757,7 @@ "weaknesses": [] }, { - "unique_content_id": "c3ef560f8d241b1b75cdef3199faa45c", + "unique_content_id": "39ea5c947d194650d344e5adcd4353a31075fe76556175678092991fef56935a", "summary": "The function TS_OBJ_print_bio() misuses OBJ_obj2txt(): the return value is the total length the OID text representation would use and not the amount of data written. This will result in OOB reads when large OIDs are presented.", "affected_packages": [ { @@ -6809,7 +6809,7 @@ "weaknesses": [] }, { - "unique_content_id": "e29c5c80d781403086304ecb4fce7a59", + "unique_content_id": "fcd1d51451689926072528e12c9206f1c13c61ed97e42c132b0667ea48870171", "summary": "The function BN_bn2dec() does not check the return value of BN_div_word(). This can cause an OOB write if an application uses this function with an overly large BIGNUM. This could be a problem if an overly large certificate or CRL is printed out from an untrusted source. TLS is not affected because record limits will reject an oversized certificate before it is parsed.", "affected_packages": [ { @@ -6861,7 +6861,7 @@ "weaknesses": [] }, { - "unique_content_id": "bad085048774b51abab2b4e37c3868a0", + "unique_content_id": "2dcc23a57bd50008fd1ff1dd5ab1e15ac70d58a3b621d70b039cee4339be5439", "summary": "A flaw in the DTLS replay attack protection mechanism means that records that arrive for future epochs update the replay protection \"window\" before the MAC for the record has been validated. This could be exploited by an attacker by sending a record for the next epoch (which does not have to decrypt or have a valid MAC), with a very large sequence number. This means that all subsequent legitimate packets are dropped causing a denial of service for a specific DTLS connection.", "affected_packages": [ { @@ -6925,7 +6925,7 @@ "weaknesses": [] }, { - "unique_content_id": "c541cb508cce45e8ffa33b03c44a7706", + "unique_content_id": "24d135d43dac5961bd8e824a6be06bf737548a27b6908a9bdb06c4cf6be7da66", "summary": "In a DTLS connection where handshake messages are delivered out-of-order those messages that OpenSSL is not yet ready to process will be buffered for later use. Under certain circumstances, a flaw in the logic means that those messages do not get removed from the buffer even though the handshake has been completed. An attacker could force up to approx. 15 messages to remain in the buffer when they are no longer required. These messages will be cleared when the DTLS connection is closed. The default maximum size for a message is 100k. Therefore the attacker could force an additional 1500k to be consumed per connection. By opening many simulataneous connections an attacker could cause a DoS attack through memory exhaustion.", "affected_packages": [ { @@ -6989,7 +6989,7 @@ "weaknesses": [] }, { - "unique_content_id": "cfaace2e186847527636a2195766fc52", + "unique_content_id": "e43493ec8a73bb371bf163314718c77edbe7d72190cd2e88e09e3a65d4500cdb", "summary": "If a server uses SHA512 for TLS session ticket HMAC it is vulnerable to a DoS attack where a malformed ticket will result in an OOB read which will ultimately crash. The use of SHA512 in TLS session tickets is comparatively rare as it requires a custom server callback and ticket lookup mechanism.", "affected_packages": [ { @@ -7053,7 +7053,7 @@ "weaknesses": [] }, { - "unique_content_id": "2af63a761bf4ddbbaeb92afa382151cf", + "unique_content_id": "c6b031581915c5cc5b42df4000da62b01be62afbba15c264e9c189aac336f855", "summary": "An overflow can occur in MDC2_Update() either if called directly or through the EVP_DigestUpdate() function using MDC2. If an attacker is able to supply very large amounts of input data after a previous call to EVP_EncryptUpdate() with a partial block then a length check can overflow resulting in a heap corruption. The amount of data needed is comparable to SIZE_MAX which is impractical on most platforms.", "affected_packages": [ { @@ -7117,7 +7117,7 @@ "weaknesses": [] }, { - "unique_content_id": "659c848c83841e30d1052e8d49e18051", + "unique_content_id": "d87e634ab174d154043776ba4b3c6659d5f37175726b216710c42ec5144d3d95", "summary": "Because DES (and triple-DES) has only a 64-bit block size, birthday attacks are a real concern. For example, with the ability to run Javascript in a browser, it is possible to send enough traffic to cause a collision, and then use that information to recover something like a session Cookie. Triple-DES, which shows up as \u201cDES-CBC3\u201d in an OpenSSL cipher string, is still used on the Web, and major browsers are not yet willing to completely disable it. If you run a server, you should disable triple-DES. This is generally a configuration issue. If you run an old server that doesn\u2019t support any better ciphers than DES or RC4, you should upgrade. For 1.0.2 and 1.0.1, we removed the triple-DES ciphers from the \u201cHIGH\u201d keyword and put them into \u201cMEDIUM.\u201d Note that we did not remove them from the \u201cDEFAULT\u201d keyword. For the 1.1.0 release, we treat triple-DES just like we are treating RC4. It is not compiled by default; you have to use \u201cenable-weak-ssl-ciphers\u201d as a config option. Even when those ciphers are compiled, triple-DES is only in the \u201cMEDIUM\u201d keyword. In addition we also removed it from the \u201cDEFAULT\u201d keyword.", "affected_packages": [ { @@ -7157,7 +7157,7 @@ "weaknesses": [] }, { - "unique_content_id": "17585a9b090ed55460ac0cad6c3b5f6e", + "unique_content_id": "335b68ab52a7311993d1ba47eab3fb676dfaee4b10a497bd992f7cbcb13edd67", "summary": "In OpenSSL 1.0.2 and earlier some missing message length checks can result in OOB reads of up to 2 bytes beyond an allocated buffer. There is a theoretical DoS risk but this has not been observed in practice on common platforms. The messages affected are client certificate, client certificate request and server certificate. As a result the attack can only be performed against a client or a server which enables client authentication.", "affected_packages": [ { @@ -7221,7 +7221,53 @@ "weaknesses": [] }, { - "unique_content_id": "8ee9b8d2efa51108b44de0e5f0671902", + "unique_content_id": "4eaf09765e65f727f0d87c8ee1340fb5157d0195ebc4d87f7d243b7de0540731", + "summary": "A TLS message includes 3 bytes for its length in the header for the message. This would allow for messages up to 16Mb in length. Messages of this length are excessive and OpenSSL includes a check to ensure that a peer is sending reasonably sized messages in order to avoid too much memory being consumed to service a connection. A flaw in the logic of version 1.1.0 means that memory for the message is allocated too early, prior to the excessive message length check. Due to way memory is allocated in OpenSSL this could mean an attacker could force up to 21Mb to be allocated to service a connection. This could lead to a Denial of Service through memory exhaustion. However, the excessive message length check still takes place, and this would cause the connection to immediately fail. Assuming that the application calls SSL_free() on the failed conneciton in a timely manner then the 21Mb of allocated memory will then be immediately freed again. Therefore the excessive memory allocation will be transitory in nature. This then means that there is only a security impact if: 1) The application does not call SSL_free() in a timely manner in the event that the connection fails or 2) The application is working in a constrained environment where there is very little free memory or 3) The attacker initiates multiple connection attempts such that there are multiple connections in a state where memory has been allocated for the connection; SSL_free() has not yet been called; and there is insufficient memory to service the multiple requests. Except in the instance of (1) above any Denial Of Service is likely to be transitory because as soon as the connection fails the memory is subsequently freed again in the SSL_free() call. However there is an increased risk during this period of application crashes due to the lack of memory - which would then mean a more serious Denial of Service.", + "affected_packages": [ + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.1.0a", + "affected_version_range": "vers:openssl/1.1.0" + } + ], + "references": [ + { + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-6307", + "severities": [], + "reference_id": "CVE-2016-6307", + "reference_type": "" + }, + { + "url": "https://github.com/openssl/openssl/commit/4b390b6c3f8df925dc92a3dd6b022baa9a2f4650", + "severities": [], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://www.openssl.org/news/secadv/20160922.txt", + "severities": [ + { + "value": "Low", + "system": "generic_textual", + "scoring_elements": "" + } + ], + "reference_id": "", + "reference_type": "" + } + ], + "date_published": "2016-09-21T00:00:00+00:00", + "weaknesses": [] + }, + { + "unique_content_id": "afa659ff3079acf90f6cbb1cc37cf907f479517ed3f41f22ff048c397dccc1e5", "summary": "A DTLS message includes 3 bytes for its length in the header for the message. This would allow for messages up to 16Mb in length. Messages of this length are excessive and OpenSSL includes a check to ensure that a peer is sending reasonably sized messages in order to avoid too much memory being consumed to service a connection. A flaw in the logic of version 1.1.0 means that memory for the message is allocated too early, prior to the excessive message length check. Due to way memory is allocated in OpenSSL this could mean an attacker could force up to 21Mb to be allocated to service a connection. This could lead to a Denial of Service through memory exhaustion. However, the excessive message length check still takes place, and this would cause the connection to immediately fail. Assuming that the application calls SSL_free() on the failed conneciton in a timely manner then the 21Mb of allocated memory will then be immediately freed again. Therefore the excessive memory allocation will be transitory in nature. This then means that there is only a security impact if: 1) The application does not call SSL_free() in a timely manner in the event that the connection fails or 2) The application is working in a constrained environment where there is very little free memory or 3) The attacker initiates multiple connection attempts such that there are multiple connections in a state where memory has been allocated for the connection; SSL_free() has not yet been called; and there is insufficient memory to service the multiple requests. Except in the instance of (1) above any Denial Of Service is likely to be transitory because as soon as the connection fails the memory is subsequently freed again in the SSL_free() call. However there is an increased risk during this period of application crashes due to the lack of memory - which would then mean a more serious Denial of Service.", "affected_packages": [ { @@ -7267,8 +7313,8 @@ "weaknesses": [] }, { - "unique_content_id": "f598dbb4cacf63ed93e588c1db8ff5b8", - "summary": "A TLS message includes 3 bytes for its length in the header for the message. This would allow for messages up to 16Mb in length. Messages of this length are excessive and OpenSSL includes a check to ensure that a peer is sending reasonably sized messages in order to avoid too much memory being consumed to service a connection. A flaw in the logic of version 1.1.0 means that memory for the message is allocated too early, prior to the excessive message length check. Due to way memory is allocated in OpenSSL this could mean an attacker could force up to 21Mb to be allocated to service a connection. This could lead to a Denial of Service through memory exhaustion. However, the excessive message length check still takes place, and this would cause the connection to immediately fail. Assuming that the application calls SSL_free() on the failed conneciton in a timely manner then the 21Mb of allocated memory will then be immediately freed again. Therefore the excessive memory allocation will be transitory in nature. This then means that there is only a security impact if: 1) The application does not call SSL_free() in a timely manner in the event that the connection fails or 2) The application is working in a constrained environment where there is very little free memory or 3) The attacker initiates multiple connection attempts such that there are multiple connections in a state where memory has been allocated for the connection; SSL_free() has not yet been called; and there is insufficient memory to service the multiple requests. Except in the instance of (1) above any Denial Of Service is likely to be transitory because as soon as the connection fails the memory is subsequently freed again in the SSL_free() call. However there is an increased risk during this period of application crashes due to the lack of memory - which would then mean a more serious Denial of Service.", + "unique_content_id": "76c39ca965fe72efb7732d5f6a6388c6f20696a7d03dcbe47c0c548a60e41ca8", + "summary": "OpenSSL 1.1.0 SSL/TLS will hang during a call to SSL_peek() if the peer sends an empty record. This could be exploited by a malicious peer in a Denial Of Service attack.", "affected_packages": [ { "package": { @@ -7285,13 +7331,13 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-6307", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-6305", "severities": [], - "reference_id": "CVE-2016-6307", + "reference_id": "CVE-2016-6305", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/4b390b6c3f8df925dc92a3dd6b022baa9a2f4650", + "url": "https://github.com/openssl/openssl/commit/63658103d4441924f8dbfc517b99bb54758a98b9", "severities": [], "reference_id": "", "reference_type": "" @@ -7300,7 +7346,7 @@ "url": "https://www.openssl.org/news/secadv/20160922.txt", "severities": [ { - "value": "Low", + "value": "Moderate", "system": "generic_textual", "scoring_elements": "" } @@ -7309,11 +7355,11 @@ "reference_type": "" } ], - "date_published": "2016-09-21T00:00:00+00:00", + "date_published": "2016-09-22T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "3b3ff4143b6859104d216a310d58db58", + "unique_content_id": "aef1aa2ae8685c93c4869930f90ef8cd3bcc3fbadf949e4238d182e8fd2684a0", "summary": "A malicious client can send an excessively large OCSP Status Request extension. If that client continually requests renegotiation, sending a large OCSP Status Request extension each time, then there will be unbounded memory growth on the server. This will eventually lead to a Denial Of Service attack through memory exhaustion. Servers with a default configuration are vulnerable even if they do not support OCSP. Builds using the \"no-ocsp\" build time option are not affected. Servers using OpenSSL versions prior to 1.0.1g are not vulnerable in a default configuration, instead only if an application explicitly enables OCSP stapling support.", "affected_packages": [ { @@ -7395,8 +7441,8 @@ "weaknesses": [] }, { - "unique_content_id": "ec3000e978936c5dc59eeb71d14f61d0", - "summary": "OpenSSL 1.1.0 SSL/TLS will hang during a call to SSL_peek() if the peer sends an empty record. This could be exploited by a malicious peer in a Denial Of Service attack.", + "unique_content_id": "2c4faeb5ab598f4bcd7363261f979466b7ea8c02e2bfa6f68c53ef466d115f77", + "summary": "This issue only affects OpenSSL 1.1.0a, released on 22nd September 2016. The patch applied to address CVE-2016-6307 resulted in an issue where if a message larger than approx 16k is received then the underlying buffer to store the incoming message is reallocated and moved. Unfortunately a dangling pointer to the old location is left which results in an attempt to write to the previously freed location. This is likely to result in a crash, however it could potentially lead to execution of arbitrary code.", "affected_packages": [ { "package": { @@ -7407,28 +7453,28 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.1.0a", - "affected_version_range": "vers:openssl/1.1.0" + "fixed_version": "1.1.0b", + "affected_version_range": "vers:openssl/1.1.0a" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-6305", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-6309", "severities": [], - "reference_id": "CVE-2016-6305", + "reference_id": "CVE-2016-6309", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/63658103d4441924f8dbfc517b99bb54758a98b9", + "url": "https://github.com/openssl/openssl/commit/acacbfa7565c78d2273c0b2a2e5e803f44afefeb", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://www.openssl.org/news/secadv/20160922.txt", + "url": "https://www.openssl.org/news/secadv/20160926.txt", "severities": [ { - "value": "Moderate", + "value": "Critical", "system": "generic_textual", "scoring_elements": "" } @@ -7437,11 +7483,11 @@ "reference_type": "" } ], - "date_published": "2016-09-22T00:00:00+00:00", + "date_published": "2016-09-26T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "76efc0216d0391eac89b5097852a6f7e", + "unique_content_id": "3dea46bb518540bd2524894a1c99e33282ca9610f926cb1a2a6ab62fee7e9a8a", "summary": "This issue only affects OpenSSL 1.0.2i, released on 22nd September 2016. A bug fix which included a CRL sanity check was added to OpenSSL 1.1.0 but was omitted from OpenSSL 1.0.2i. As a result any attempt to use CRLs in OpenSSL 1.0.2i will crash with a null pointer exception.", "affected_packages": [ { @@ -7487,8 +7533,8 @@ "weaknesses": [] }, { - "unique_content_id": "ec731ec05e8399f02edc888b078cfcf1", - "summary": "This issue only affects OpenSSL 1.1.0a, released on 22nd September 2016. The patch applied to address CVE-2016-6307 resulted in an issue where if a message larger than approx 16k is received then the underlying buffer to store the incoming message is reallocated and moved. Unfortunately a dangling pointer to the old location is left which results in an attempt to write to the previously freed location. This is likely to result in a crash, however it could potentially lead to execution of arbitrary code.", + "unique_content_id": "ced21cf334c43c1968b1e630d0e5e466fc113b5ec477a716a9d2328d84a41e08", + "summary": "Applications parsing invalid CMS structures can crash with a NULL pointer dereference. This is caused by a bug in the handling of the ASN.1 CHOICE type in OpenSSL 1.1.0 which can result in a NULL value being passed to the structure callback if an attempt is made to free certain invalid encodings. Only CHOICE structures using a callback which do not handle NULL value are affected.", "affected_packages": [ { "package": { @@ -7499,28 +7545,28 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.1.0b", - "affected_version_range": "vers:openssl/1.1.0a" + "fixed_version": "1.1.0c", + "affected_version_range": "vers:openssl/1.1.0|1.1.0a|1.1.0b" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-6309", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-7053", "severities": [], - "reference_id": "CVE-2016-6309", + "reference_id": "CVE-2016-7053", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/acacbfa7565c78d2273c0b2a2e5e803f44afefeb", + "url": "https://github.com/openssl/openssl/commit/610b66267e41a32805ab54cbc580c5a6d5826cb4", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://www.openssl.org/news/secadv/20160926.txt", + "url": "https://www.openssl.org/news/secadv/20161110.txt", "severities": [ { - "value": "Critical", + "value": "Moderate", "system": "generic_textual", "scoring_elements": "" } @@ -7529,11 +7575,11 @@ "reference_type": "" } ], - "date_published": "2016-09-26T00:00:00+00:00", + "date_published": "2016-11-10T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "2260cd2fea019c35edd74053d43afbfa", + "unique_content_id": "de494f6e53a555a8a467bd0841b9b26accb6e568bdb3f941b8100b02f3325224", "summary": "TLS connections using *-CHACHA20-POLY1305 ciphersuites are susceptible to a DoS attack by corrupting larger payloads. This can result in an OpenSSL crash. This issue is not considered to be exploitable beyond a DoS.", "affected_packages": [ { @@ -7579,53 +7625,7 @@ "weaknesses": [] }, { - "unique_content_id": "ad064a076d4f4136c4ff5cc9a1c32cb4", - "summary": "Applications parsing invalid CMS structures can crash with a NULL pointer dereference. This is caused by a bug in the handling of the ASN.1 CHOICE type in OpenSSL 1.1.0 which can result in a NULL value being passed to the structure callback if an attempt is made to free certain invalid encodings. Only CHOICE structures using a callback which do not handle NULL value are affected.", - "affected_packages": [ - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.1.0c", - "affected_version_range": "vers:openssl/1.1.0|1.1.0a|1.1.0b" - } - ], - "references": [ - { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-7053", - "severities": [], - "reference_id": "CVE-2016-7053", - "reference_type": "" - }, - { - "url": "https://github.com/openssl/openssl/commit/610b66267e41a32805ab54cbc580c5a6d5826cb4", - "severities": [], - "reference_id": "", - "reference_type": "" - }, - { - "url": "https://www.openssl.org/news/secadv/20161110.txt", - "severities": [ - { - "value": "Moderate", - "system": "generic_textual", - "scoring_elements": "" - } - ], - "reference_id": "", - "reference_type": "" - } - ], - "date_published": "2016-11-10T00:00:00+00:00", - "weaknesses": [] - }, - { - "unique_content_id": "c46f2f9d6517a007f907f8a2e4c84820", + "unique_content_id": "f65656f9a1a4be03cdd849a3aa82992f4af18eae8d67af063c16232e3f59f754", "summary": "There is a carry propagating bug in the Broadwell-specific Montgomery multiplication procedure that handles input lengths divisible by, but longer than 256 bits. Analysis suggests that attacks against RSA, DSA and DH private keys are impossible. This is because the subroutine in question is not used in operations with the private key itself and an input of the attacker's direct choice. Otherwise the bug can manifest itself as transient authentication and key negotiation failures or reproducible erroneous outcome of public-key operations with specially crafted input. Among EC algorithms only Brainpool P-512 curves are affected and one presumably can attack ECDH key negotiation. Impact was not analyzed in detail, because pre-requisites for attack are considered unlikely. Namely multiple clients have to choose the curve in question and the server has to share the private key among them, neither of which is default behaviour. Even then only clients that chose the curve will be affected.", "affected_packages": [ { @@ -7689,8 +7689,8 @@ "weaknesses": [] }, { - "unique_content_id": "6f703a0f132094abbd39fd883ed6e241", - "summary": "If a malicious server supplies bad parameters for a DHE or ECDHE key exchange then this can result in the client attempting to dereference a NULL pointer leading to a client crash. This could be exploited in a Denial of Service attack.", + "unique_content_id": "16f408917ccdd649067c701789a6f062b284973f8da7ae2ce42116010005ffc2", + "summary": "If an SSL/TLS server or client is running on a 32-bit host, and a specific cipher is being used, then a truncated packet can cause that server or client to perform an out-of-bounds read, usually resulting in a crash. For OpenSSL 1.1.0, the crash can be triggered when using CHACHA20/POLY1305; users should upgrade to 1.1.0d. For Openssl 1.0.2, the crash can be triggered when using RC4-MD5; users who have not disabled that algorithm should update to 1.0.2k", "affected_packages": [ { "package": { @@ -7703,17 +7703,35 @@ }, "fixed_version": "1.1.0d", "affected_version_range": "vers:openssl/1.1.0|1.1.0a|1.1.0b|1.1.0c" + }, + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.2k", + "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d|1.0.2e|1.0.2f|1.0.2g|1.0.2h|1.0.2i|1.0.2j" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2017-3730", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2017-3731", "severities": [], - "reference_id": "CVE-2017-3730", + "reference_id": "CVE-2017-3731", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/efbe126e3ebb9123ac9d058aa2bb044261342aaa", + "url": "https://github.com/openssl/openssl/commit/00d965474b22b54e4275232bc71ee0c699c5cd21", + "severities": [], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://github.com/openssl/openssl/commit/51d009043670a627d6abe66894126851cf3690e9", "severities": [], "reference_id": "", "reference_type": "" @@ -7735,8 +7753,8 @@ "weaknesses": [] }, { - "unique_content_id": "b14fc26f1382b65b58128617820053c3", - "summary": "There is a carry propagating bug in the x86_64 Montgomery squaring procedure. No EC algorithms are affected. Analysis suggests that attacks against RSA and DSA as a result of this defect would be very difficult to perform and are not believed likely. Attacks against DH are considered just feasible (although very difficult) because most of the work necessary to deduce information about a private key may be performed offline. The amount of resources required for such an attack would be very significant and likely only accessible to a limited number of attackers. An attacker would additionally need online access to an unpatched system using the target private key in a scenario with persistent DH parameters and a private key that is shared between multiple clients. For example this can occur by default in OpenSSL DHE based SSL/TLS ciphersuites. Note: This issue is very similar to CVE-2015-3193 but must be treated as a separate problem.", + "unique_content_id": "a98bf1ba98e36233e2f7857bfdf284a1cedd8cfe0d07d9f913d8b075bee096f1", + "summary": "If a malicious server supplies bad parameters for a DHE or ECDHE key exchange then this can result in the client attempting to dereference a NULL pointer leading to a client crash. This could be exploited in a Denial of Service attack.", "affected_packages": [ { "package": { @@ -7749,35 +7767,17 @@ }, "fixed_version": "1.1.0d", "affected_version_range": "vers:openssl/1.1.0|1.1.0a|1.1.0b|1.1.0c" - }, - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.2k", - "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d|1.0.2e|1.0.2f|1.0.2g|1.0.2h|1.0.2i|1.0.2j" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2017-3732", - "severities": [], - "reference_id": "CVE-2017-3732", - "reference_type": "" - }, - { - "url": "https://github.com/openssl/openssl/commit/a59b90bf491410f1f2bc4540cc21f1980fd14c5b", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2017-3730", "severities": [], - "reference_id": "", + "reference_id": "CVE-2017-3730", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/760d04342a495ee86bf5adc71a91d126af64397f", + "url": "https://github.com/openssl/openssl/commit/efbe126e3ebb9123ac9d058aa2bb044261342aaa", "severities": [], "reference_id": "", "reference_type": "" @@ -7799,8 +7799,8 @@ "weaknesses": [] }, { - "unique_content_id": "c271a33647e7cdefdce8ed38c15e1bb7", - "summary": "If an SSL/TLS server or client is running on a 32-bit host, and a specific cipher is being used, then a truncated packet can cause that server or client to perform an out-of-bounds read, usually resulting in a crash. For OpenSSL 1.1.0, the crash can be triggered when using CHACHA20/POLY1305; users should upgrade to 1.1.0d. For Openssl 1.0.2, the crash can be triggered when using RC4-MD5; users who have not disabled that algorithm should update to 1.0.2k", + "unique_content_id": "d525e54aa33322501d8c100f7be2df5900113d09c8409a50ce37b77478001f13", + "summary": "There is a carry propagating bug in the x86_64 Montgomery squaring procedure. No EC algorithms are affected. Analysis suggests that attacks against RSA and DSA as a result of this defect would be very difficult to perform and are not believed likely. Attacks against DH are considered just feasible (although very difficult) because most of the work necessary to deduce information about a private key may be performed offline. The amount of resources required for such an attack would be very significant and likely only accessible to a limited number of attackers. An attacker would additionally need online access to an unpatched system using the target private key in a scenario with persistent DH parameters and a private key that is shared between multiple clients. For example this can occur by default in OpenSSL DHE based SSL/TLS ciphersuites. Note: This issue is very similar to CVE-2015-3193 but must be treated as a separate problem.", "affected_packages": [ { "package": { @@ -7829,19 +7829,19 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2017-3731", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2017-3732", "severities": [], - "reference_id": "CVE-2017-3731", + "reference_id": "CVE-2017-3732", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/00d965474b22b54e4275232bc71ee0c699c5cd21", + "url": "https://github.com/openssl/openssl/commit/a59b90bf491410f1f2bc4540cc21f1980fd14c5b", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/51d009043670a627d6abe66894126851cf3690e9", + "url": "https://github.com/openssl/openssl/commit/760d04342a495ee86bf5adc71a91d126af64397f", "severities": [], "reference_id": "", "reference_type": "" @@ -7863,7 +7863,7 @@ "weaknesses": [] }, { - "unique_content_id": "e5c015c5ea09f74ca8830fc675109209", + "unique_content_id": "70561a8c52747e3882749934d40dc3c52e1a6fccd239925f1ff317779b30257e", "summary": "During a renegotiation handshake if the Encrypt-Then-Mac extension is negotiated where it was not in the original handshake (or vice-versa) then this can cause OpenSSL to crash (dependent on ciphersuite). Both clients and servers are affected.", "affected_packages": [ { @@ -7909,7 +7909,7 @@ "weaknesses": [] }, { - "unique_content_id": "3e3d332a535202d4a355d9c6f46f8511", + "unique_content_id": "6e51a8310007cae6d2dd2da43402f0ce33e9bc503675618ae3ed5e22435384c0", "summary": "While parsing an IPAdressFamily extension in an X.509 certificate, it is possible to do a one-byte overread. This would result in an incorrect text display of the certificate.", "affected_packages": [ { @@ -7973,7 +7973,7 @@ "weaknesses": [] }, { - "unique_content_id": "135805c0fbb3f388567abe5a782e3678", + "unique_content_id": "723a84486e608c93ef84d012c9a3bdbec50fc03f94b6af7f2e3c6db35c4870db", "summary": "There is a carry propagating bug in the x86_64 Montgomery squaring procedure. No EC algorithms are affected. Analysis suggests that attacks against RSA and DSA as a result of this defect would be very difficult to perform and are not believed likely. Attacks against DH are considered just feasible (although very difficult) because most of the work necessary to deduce information about a private key may be performed offline. The amount of resources required for such an attack would be very significant and likely only accessible to a limited number of attackers. An attacker would additionally need online access to an unpatched system using the target private key in a scenario with persistent DH parameters and a private key that is shared between multiple clients. This only affects processors that support the BMI1, BMI2 and ADX extensions like Intel Broadwell (5th generation) and later or AMD Ryzen.", "affected_packages": [ { @@ -8015,59 +8015,13 @@ "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/4443cf7aa0099e5ce615c18cee249fff77fb0871", - "severities": [], - "reference_id": "", - "reference_type": "" - }, - { - "url": "https://www.openssl.org/news/secadv/20171102.txt", - "severities": [ - { - "value": "Moderate", - "system": "generic_textual", - "scoring_elements": "" - } - ], - "reference_id": "", - "reference_type": "" - } - ], - "date_published": "2017-11-02T00:00:00+00:00", - "weaknesses": [] - }, - { - "unique_content_id": "37c832cd6a7a445e21de6bcaae2e6aad", - "summary": "OpenSSL 1.0.2 (starting from version 1.0.2b) introduced an \"error state\" mechanism. The intent was that if a fatal error occurred during a handshake then OpenSSL would move into the error state and would immediately fail if you attempted to continue the handshake. This works as designed for the explicit handshake functions (SSL_do_handshake(), SSL_accept() and SSL_connect()), however due to a bug it does not work correctly if SSL_read() or SSL_write() is called directly. In that scenario, if the handshake fails then a fatal error will be returned in the initial function call. If SSL_read()/SSL_write() is subsequently called by the application for the same SSL object then it will succeed and the data is passed without being decrypted/encrypted directly from the SSL/TLS record layer. In order to exploit this issue an application bug would have to be present that resulted in a call to SSL_read()/SSL_write() being issued after having already received a fatal error.", - "affected_packages": [ - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.2n", - "affected_version_range": "vers:openssl/1.0.2b|1.0.2c|1.0.2d|1.0.2e|1.0.2f|1.0.2g|1.0.2h|1.0.2i|1.0.2j|1.0.2k|1.0.2l|1.0.2m" - } - ], - "references": [ - { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2017-3737", - "severities": [], - "reference_id": "CVE-2017-3737", - "reference_type": "" - }, - { - "url": "https://github.com/openssl/openssl/commit/898fb884b706aaeb283de4812340bb0bde8476dc", + "url": "https://github.com/openssl/openssl/commit/4443cf7aa0099e5ce615c18cee249fff77fb0871", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://www.openssl.org/news/secadv/20171207.txt", + "url": "https://www.openssl.org/news/secadv/20171102.txt", "severities": [ { "value": "Moderate", @@ -8079,11 +8033,11 @@ "reference_type": "" } ], - "date_published": "2017-12-07T00:00:00+00:00", + "date_published": "2017-11-02T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "fe526b02e32f024f79ab16ad59c5cd59", + "unique_content_id": "9855d6d4847a8dac0b2ec4a4a8556a921f9a32c035e43bb98f4201ab12df0d4c", "summary": "There is an overflow bug in the AVX2 Montgomery multiplication procedure used in exponentiation with 1024-bit moduli. No EC algorithms are affected. Analysis suggests that attacks against RSA and DSA as a result of this defect would be very difficult to perform and are not believed likely. Attacks against DH1024 are considered just feasible, because most of the work necessary to deduce information about a private key may be performed offline. The amount of resources required for such an attack would be significant. However, for an attack on TLS to be meaningful, the server would have to share the DH1024 private key among multiple clients, which is no longer an option since CVE-2016-0701. This only affects processors that support the AVX2 but not ADX extensions like Intel Haswell (4th generation). Note: The impact from this issue is similar to CVE-2017-3736, CVE-2017-3732 and CVE-2015-3193. Due to the low severity of this issue we are not issuing a new release of OpenSSL 1.1.0 at this time. The fix will be included in OpenSSL 1.1.0h when it becomes available. The fix is also available in commit e502cc86d in the OpenSSL git repository.", "affected_packages": [ { @@ -8147,8 +8101,8 @@ "weaknesses": [] }, { - "unique_content_id": "0add28e4bf2017a49afa086624548363", - "summary": "Because of an implementation bug the PA-RISC CRYPTO_memcmp function is effectively reduced to only comparing the least significant bit of each byte. This allows an attacker to forge messages that would be considered as authenticated in an amount of tries lower than that guaranteed by the security claims of the scheme. The module can only be compiled by the HP-UX assembler, so that only HP-UX PA-RISC targets are affected.", + "unique_content_id": "99b0a08fcb1d6012836e07da86ee39aec6568240922f58640fcb7c9b8f561492", + "summary": "OpenSSL 1.0.2 (starting from version 1.0.2b) introduced an \"error state\" mechanism. The intent was that if a fatal error occurred during a handshake then OpenSSL would move into the error state and would immediately fail if you attempted to continue the handshake. This works as designed for the explicit handshake functions (SSL_do_handshake(), SSL_accept() and SSL_connect()), however due to a bug it does not work correctly if SSL_read() or SSL_write() is called directly. In that scenario, if the handshake fails then a fatal error will be returned in the initial function call. If SSL_read()/SSL_write() is subsequently called by the application for the same SSL object then it will succeed and the data is passed without being decrypted/encrypted directly from the SSL/TLS record layer. In order to exploit this issue an application bug would have to be present that resulted in a call to SSL_read()/SSL_write() being issued after having already received a fatal error.", "affected_packages": [ { "package": { @@ -8159,25 +8113,25 @@ "namespace": "", "qualifiers": "" }, - "fixed_version": "1.1.0h", - "affected_version_range": "vers:openssl/1.1.0|1.1.0a|1.1.0b|1.1.0c|1.1.0d|1.1.0e|1.1.0f|1.1.0g" + "fixed_version": "1.0.2n", + "affected_version_range": "vers:openssl/1.0.2b|1.0.2c|1.0.2d|1.0.2e|1.0.2f|1.0.2g|1.0.2h|1.0.2i|1.0.2j|1.0.2k|1.0.2l|1.0.2m" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2018-0733", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2017-3737", "severities": [], - "reference_id": "CVE-2018-0733", + "reference_id": "CVE-2017-3737", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/56d5a4bfcaf37fa420aef2bb881aa55e61cf5f2f", + "url": "https://github.com/openssl/openssl/commit/898fb884b706aaeb283de4812340bb0bde8476dc", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://www.openssl.org/news/secadv/20180327.txt", + "url": "https://www.openssl.org/news/secadv/20171207.txt", "severities": [ { "value": "Moderate", @@ -8189,11 +8143,11 @@ "reference_type": "" } ], - "date_published": "2018-03-27T00:00:00+00:00", + "date_published": "2017-12-07T00:00:00+00:00", "weaknesses": [] }, { - "unique_content_id": "fd56a1d08c404d18a2425bde4a2cc222", + "unique_content_id": "40730ed1276c0a934bcd453d832b1b05ea61d1aeddf8d2a88ed31efc6625e1c9", "summary": "Constructed ASN.1 types with a recursive definition (such as can be found in PKCS7) could eventually exceed the stack given malicious input with excessive recursion. This could result in a Denial Of Service attack. There are no such structures used within SSL/TLS that come from untrusted sources so this is considered safe.", "affected_packages": [ { @@ -8257,7 +8211,53 @@ "weaknesses": [] }, { - "unique_content_id": "5ce5c73a388c1721baa86dd346bc5cca", + "unique_content_id": "f62f0a22bd4695353076d3dc1b2e7670ed0bd9607d774a7cc31c86086cacb015", + "summary": "Because of an implementation bug the PA-RISC CRYPTO_memcmp function is effectively reduced to only comparing the least significant bit of each byte. This allows an attacker to forge messages that would be considered as authenticated in an amount of tries lower than that guaranteed by the security claims of the scheme. The module can only be compiled by the HP-UX assembler, so that only HP-UX PA-RISC targets are affected.", + "affected_packages": [ + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.1.0h", + "affected_version_range": "vers:openssl/1.1.0|1.1.0a|1.1.0b|1.1.0c|1.1.0d|1.1.0e|1.1.0f|1.1.0g" + } + ], + "references": [ + { + "url": "https://nvd.nist.gov/vuln/detail/CVE-2018-0733", + "severities": [], + "reference_id": "CVE-2018-0733", + "reference_type": "" + }, + { + "url": "https://github.com/openssl/openssl/commit/56d5a4bfcaf37fa420aef2bb881aa55e61cf5f2f", + "severities": [], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://www.openssl.org/news/secadv/20180327.txt", + "severities": [ + { + "value": "Moderate", + "system": "generic_textual", + "scoring_elements": "" + } + ], + "reference_id": "", + "reference_type": "" + } + ], + "date_published": "2018-03-27T00:00:00+00:00", + "weaknesses": [] + }, + { + "unique_content_id": "fdffac35f130aaa543d59035f255119efd80363f868a8aac69b3b5036d4e9052", "summary": "The OpenSSL RSA Key generation algorithm has been shown to be vulnerable to a cache timing side channel attack. An attacker with sufficient access to mount cache timing attacks during the RSA key generation process could recover the private key.", "affected_packages": [ { @@ -8321,7 +8321,7 @@ "weaknesses": [] }, { - "unique_content_id": "891a444705c4d9e9d6d9514e6152b93d", + "unique_content_id": "52b60416f56fbd4cf154ad29a878e1a745b607dcee1653acb5985fa68607508b", "summary": "During key agreement in a TLS handshake using a DH(E) based ciphersuite a malicious server can send a very large prime value to the client. This will cause the client to spend an unreasonably long period of time generating a key for this prime resulting in a hang until the client has finished. This could be exploited in a Denial Of Service attack.", "affected_packages": [ { @@ -8385,7 +8385,7 @@ "weaknesses": [] }, { - "unique_content_id": "3193861b88f934ec25c275d622932dc2", + "unique_content_id": "b208d67bce0a078a253edbd6b6f817f83f3c7e0f384dae57fd11f43aa6645a78", "summary": "The OpenSSL ECDSA signature algorithm has been shown to be vulnerable to a timing side channel attack. An attacker could use variations in the signing algorithm to recover the private key.", "affected_packages": [ { @@ -8449,7 +8449,7 @@ "weaknesses": [] }, { - "unique_content_id": "c6585613e6f674c7ea39eefc5057e85d", + "unique_content_id": "d2c4e2cf5d78c3a480feea4e1721e0acbb60155c70d8b6a30a282b546f09afcf", "summary": "The OpenSSL DSA signature algorithm has been shown to be vulnerable to a timing side channel attack. An attacker could use variations in the signing algorithm to recover the private key.", "affected_packages": [ { @@ -8531,7 +8531,7 @@ "weaknesses": [] }, { - "unique_content_id": "a86eaada3e2c85065180d5d7eb1d3a31", + "unique_content_id": "dd129503db8d87d87f40d36a21b3e7ad7a51515303ca1ddff0a7722bf6b6b809", "summary": "OpenSSL ECC scalar multiplication, used in e.g. ECDSA and ECDH, has been shown to be vulnerable to a microarchitecture timing side channel attack. An attacker with sufficient access to mount local timing attacks during ECDSA signature generation could recover the private key.", "affected_packages": [ { @@ -8595,7 +8595,7 @@ "weaknesses": [] }, { - "unique_content_id": "bd17aac4dde8bee4fba0c673c8287082", + "unique_content_id": "d2ba9b6bba240765f8121e99e081e43b48475b118a7c16aed6cc5556d5b6be89", "summary": "If an application encounters a fatal protocol error and then calls SSL_shutdown() twice (once to send a close_notify, and once to receive one) then OpenSSL can respond differently to the calling application if a 0 byte record is received with invalid padding compared to if a 0 byte record is received with an invalid MAC. If the application then behaves differently based on that in a way that is detectable to the remote peer, then this amounts to a padding oracle that could be used to decrypt data. In order for this to be exploitable \"non-stitched\" ciphersuites must be in use. Stitched ciphersuites are optimised implementations of certain commonly used ciphersuites. Also the application must call SSL_shutdown() twice even if a protocol error has occurred (applications should not do this but some do anyway). AEAD ciphersuites are not impacted.", "affected_packages": [ { @@ -8641,7 +8641,7 @@ "weaknesses": [] }, { - "unique_content_id": "939439dfee2c7c3ef79f3f7fa3e5f90b", + "unique_content_id": "a182ef84f10d8869b39936326cf01831942571fde976d293e8cbb7f9182371de", "summary": "ChaCha20-Poly1305 is an AEAD cipher, and requires a unique nonce input for every encryption operation. RFC 7539 specifies that the nonce value (IV) should be 96 bits (12 bytes). OpenSSL allows a variable nonce length and front pads the nonce with 0 bytes if it is less than 12 bytes. However it also incorrectly allows a nonce to be set of up to 16 bytes. In this case only the last 12 bytes are significant and any additional leading bytes are ignored. It is a requirement of using this cipher that nonce values are unique. Messages encrypted using a reused nonce value are susceptible to serious confidentiality and integrity attacks. If an application changes the default nonce length to be longer than 12 bytes and then makes a change to the leading bytes of the nonce expecting the new value to be a new unique nonce then such an application could inadvertently encrypt messages with a reused nonce. Additionally the ignored bytes in a long nonce are not covered by the integrity guarantee of this cipher. Any application that relies on the integrity of these ignored leading bytes of a long nonce may be further affected. Any OpenSSL internal use of this cipher, including in SSL/TLS, is safe because no such use sets such a long nonce value. However user applications that use this cipher directly and set a non-default nonce length to be longer than 12 bytes may be vulnerable. OpenSSL versions 1.1.1 and 1.1.0 are affected by this issue. Due to the limited scope of affected deployments this has been assessed as low severity and therefore we are not creating new releases at this time.", "affected_packages": [ { @@ -8705,7 +8705,7 @@ "weaknesses": [] }, { - "unique_content_id": "4213f363ba037058475897c693173044", + "unique_content_id": "edd85067182fe9c90b55fc43bfb734f907e3209f959fe776fbca8d96c71accb6", "summary": "OpenSSL has internal defaults for a directory tree where it can find a configuration file as well as certificates used for verification in TLS. This directory is most commonly referred to as OPENSSLDIR, and is configurable with the --prefix / --openssldir configuration options. For OpenSSL versions 1.1.0 and 1.1.1, the mingw configuration targets assume that resulting programs and libraries are installed in a Unix-like environment and the default prefix for program installation as well as for OPENSSLDIR should be '/usr/local'. However, mingw programs are Windows programs, and as such, find themselves looking at sub-directories of 'C:/usr/local', which may be world writable, which enables untrusted users to modify OpenSSL's default configuration, insert CA certificates, modify (or even replace) existing engine modules, etc. For OpenSSL 1.0.2, '/usr/local/ssl' is used as default for OPENSSLDIR on all Unix and Windows targets, including Visual C builds. However, some build instructions for the diverse Windows targets on 1.0.2 encourage you to specify your own --prefix. OpenSSL versions 1.1.1, 1.1.0 and 1.0.2 are affected by this issue. Due to the limited scope of affected deployments this has been assessed as low severity and therefore we are not creating new releases at this time.", "affected_packages": [ { @@ -8793,53 +8793,7 @@ "weaknesses": [] }, { - "unique_content_id": "05226413367dc1d93fc68106f47a330c", - "summary": "OpenSSL 1.1.1 introduced a rewritten random number generator (RNG). This was intended to include protection in the event of a fork() system call in order to ensure that the parent and child processes did not share the same RNG state. However this protection was not being used in the default case. A partial mitigation for this issue is that the output from a high precision timer is mixed into the RNG state so the likelihood of a parent and child process sharing state is significantly reduced. If an application already calls OPENSSL_init_crypto() explicitly using OPENSSL_INIT_ATFORK then this problem does not occur at all.", - "affected_packages": [ - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.1.1d", - "affected_version_range": "vers:openssl/1.1.1|1.1.1a|1.1.1b|1.1.1c" - } - ], - "references": [ - { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2019-1549", - "severities": [], - "reference_id": "CVE-2019-1549", - "reference_type": "" - }, - { - "url": "https://github.com/openssl/openssl/commit/1b0fe00e2704b5e20334a16d3c9099d1ba2ef1be", - "severities": [], - "reference_id": "", - "reference_type": "" - }, - { - "url": "https://www.openssl.org/news/secadv/20190910.txt", - "severities": [ - { - "value": "Low", - "system": "generic_textual", - "scoring_elements": "" - } - ], - "reference_id": "", - "reference_type": "" - } - ], - "date_published": "2019-09-10T00:00:00+00:00", - "weaknesses": [] - }, - { - "unique_content_id": "45ac1a1229fc8b49656c3e6fd99221cd", + "unique_content_id": "07c966215a883c2032c38139472d5ff371ad61b8affa5e951c49f96438ab07cc", "summary": "In situations where an attacker receives automated notification of the success or failure of a decryption attempt an attacker, after sending a very large number of messages to be decrypted, can recover a CMS/PKCS7 transported encryption key or decrypt any RSA encrypted message that was encrypted with the public RSA key, using a Bleichenbacher padding oracle attack. Applications are not affected if they use a certificate together with the private RSA key to the CMS_decrypt or PKCS7_decrypt functions to select the correct recipient info to decrypt.", "affected_packages": [ { @@ -8921,7 +8875,7 @@ "weaknesses": [] }, { - "unique_content_id": "c251f1e3c85429b0daa07cb6ea7d1e67", + "unique_content_id": "1608445a20cee1c7f70bf4d4567f869870a5bda078ae3054db819f7197868284", "summary": "Normally in OpenSSL EC groups always have a co-factor present and this is used in side channel resistant code paths. However, in some cases, it is possible to construct a group using explicit parameters (instead of using a named curve). In those cases it is possible that such a group does not have the cofactor present. This can occur even where all the parameters match a known named curve. If such a curve is used then OpenSSL falls back to non-side channel resistant code paths which may result in full key recovery during an ECDSA signature operation. In order to be vulnerable an attacker would have to have the ability to time the creation of a large number of signatures where explicit parameters with no co-factor present are in use by an application using libcrypto. For the avoidance of doubt libssl is not vulnerable because explicit parameters are never used.", "affected_packages": [ { @@ -9003,7 +8957,53 @@ "weaknesses": [] }, { - "unique_content_id": "70a045decd4328c7ff88c8a1d969e8c4", + "unique_content_id": "efa3c01bca1f8857f755aac0413f6b448077604f02470f2887ccf96682279dba", + "summary": "OpenSSL 1.1.1 introduced a rewritten random number generator (RNG). This was intended to include protection in the event of a fork() system call in order to ensure that the parent and child processes did not share the same RNG state. However this protection was not being used in the default case. A partial mitigation for this issue is that the output from a high precision timer is mixed into the RNG state so the likelihood of a parent and child process sharing state is significantly reduced. If an application already calls OPENSSL_init_crypto() explicitly using OPENSSL_INIT_ATFORK then this problem does not occur at all.", + "affected_packages": [ + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.1.1d", + "affected_version_range": "vers:openssl/1.1.1|1.1.1a|1.1.1b|1.1.1c" + } + ], + "references": [ + { + "url": "https://nvd.nist.gov/vuln/detail/CVE-2019-1549", + "severities": [], + "reference_id": "CVE-2019-1549", + "reference_type": "" + }, + { + "url": "https://github.com/openssl/openssl/commit/1b0fe00e2704b5e20334a16d3c9099d1ba2ef1be", + "severities": [], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://www.openssl.org/news/secadv/20190910.txt", + "severities": [ + { + "value": "Low", + "system": "generic_textual", + "scoring_elements": "" + } + ], + "reference_id": "", + "reference_type": "" + } + ], + "date_published": "2019-09-10T00:00:00+00:00", + "weaknesses": [] + }, + { + "unique_content_id": "1386c9f10ab439a308d3b6c4bfa71d7f17de4bb9b041d065029c422a0d559caf", "summary": "There is an overflow bug in the x64_64 Montgomery squaring procedure used in exponentiation with 512-bit moduli. No EC algorithms are affected. Analysis suggests that attacks against 2-prime RSA1024, 3-prime RSA1536, and DSA1024 as a result of this defect would be very difficult to perform and are not believed likely. Attacks against DH512 are considered just feasible. However, for an attack the target would have to re-use the DH512 private key, which is not recommended anyway. Also applications directly using the low level API BN_mod_exp may be affected if they use BN_FLG_CONSTTIME.", "affected_packages": [ { @@ -9067,7 +9067,7 @@ "weaknesses": [] }, { - "unique_content_id": "afb9d94adcf86f7b0de8aa4f7ff7c6b4", + "unique_content_id": "5657b64f70e97033e61583196c24a7a9e4b643cd241052028cb0a2b764adbe7e", "summary": "Server or client applications that call the SSL_check_chain() function during or after a TLS 1.3 handshake may crash due to a NULL pointer dereference as a result of incorrect handling of the \"signature_algorithms_cert\" TLS extension. The crash occurs if an invalid or unrecognised signature algorithm is received from the peer. This could be exploited by a malicious peer in a Denial of Service attack. OpenSSL version 1.1.1d, 1.1.1e, and 1.1.1f are affected by this issue. This issue did not affect OpenSSL versions prior to 1.1.1d.", "affected_packages": [ { @@ -9113,7 +9113,7 @@ "weaknesses": [] }, { - "unique_content_id": "56010436497977628dcea6e96888d450", + "unique_content_id": "8291dd784cec9b49787a85058d536e4d4c9a136bdc21bcfb7e975a2c41218195", "summary": "The Raccoon attack exploits a flaw in the TLS specification which can lead to an attacker being able to compute the pre-master secret in connections which have used a Diffie-Hellman (DH) based ciphersuite. In such a case this would result in the attacker being able to eavesdrop on all encrypted communications sent over that TLS connection. The attack can only be exploited if an implementation re-uses a DH secret across multiple TLS connections. Note that this issue only impacts DH ciphersuites and not ECDH ciphersuites. This issue affects OpenSSL 1.0.2 which is out of support and no longer receiving public updates. OpenSSL 1.1.1 is not vulnerable to this issue.", "affected_packages": [ { @@ -9153,7 +9153,7 @@ "weaknesses": [] }, { - "unique_content_id": "87b17158b6ad69a4d8043755547f45ad", + "unique_content_id": "a5da7dab57b99ce22236cb42d5329a816d2abf2481a6857c5b4ce16acb8b940f", "summary": "The X.509 GeneralName type is a generic type for representing different types of names. One of those name types is known as EDIPartyName. OpenSSL provides a function GENERAL_NAME_cmp which compares different instances of a GENERAL_NAME to see if they are equal or not. This function behaves incorrectly when both GENERAL_NAMEs contain an EDIPARTYNAME. A NULL pointer dereference and a crash may occur leading to a possible denial of service attack. OpenSSL itself uses the GENERAL_NAME_cmp function for two purposes: 1) Comparing CRL distribution point names between an available CRL and a CRL distribution point embedded in an X509 certificate 2) When verifying that a timestamp response token signer matches the timestamp authority name (exposed via the API functions TS_RESP_verify_response and TS_RESP_verify_token) If an attacker can control both items being compared then that attacker could trigger a crash. For example if the attacker can trick a client or server into checking a malicious certificate against a malicious CRL then this may occur. Note that some applications automatically download CRLs based on a URL embedded in a certificate. This checking happens prior to the signatures on the certificate and CRL being verified. OpenSSL's s_server, s_client and verify tools have support for the \"-crl_download\" option which implements automatic CRL downloading and this attack has been demonstrated to work against those tools. Note that an unrelated bug means that affected versions of OpenSSL cannot parse or construct correct encodings of EDIPARTYNAME. However it is possible to construct a malformed EDIPARTYNAME that OpenSSL's parser will accept and hence trigger this attack. All OpenSSL 1.1.1 and 1.0.2 versions are affected by this issue. Other OpenSSL releases are out of support and have not been checked.", "affected_packages": [ { @@ -9217,8 +9217,8 @@ "weaknesses": [] }, { - "unique_content_id": "510307f6edf17f0620c4a096bb61df0c", - "summary": "The OpenSSL public API function X509_issuer_and_serial_hash() attempts to create a unique hash value based on the issuer and serial number data contained within an X509 certificate. However it fails to correctly handle any errors that may occur while parsing the issuer field (which might occur if the issuer field is maliciously constructed). This may subsequently result in a NULL pointer deref and a crash leading to a potential denial of service attack. The function X509_issuer_and_serial_hash() is never directly called by OpenSSL itself so applications are only vulnerable if they use this function directly and they use it on certificates that may have been obtained from untrusted sources. OpenSSL versions 1.1.1i and below are affected by this issue. Users of these versions should upgrade to OpenSSL 1.1.1j. OpenSSL versions 1.0.2x and below are affected by this issue. However OpenSSL 1.0.2 is out of support and no longer receiving public updates. Premium support customers of OpenSSL 1.0.2 should upgrade to 1.0.2y. Other users should upgrade to 1.1.1j.", + "unique_content_id": "37127413ec3efbf57f25327ebbe739c46f14fb0992651a32236c3fc60a12e4a4", + "summary": "Calls to EVP_CipherUpdate, EVP_EncryptUpdate and EVP_DecryptUpdate may overflow the output length argument in some cases where the input length is close to the maximum permissable length for an integer on the platform. In such cases the return value from the function call will be 1 (indicating success), but the output length value will be negative. This could cause applications to behave incorrectly or crash. OpenSSL versions 1.1.1i and below are affected by this issue. Users of these versions should upgrade to OpenSSL 1.1.1j. OpenSSL versions 1.0.2x and below are affected by this issue. However OpenSSL 1.0.2 is out of support and no longer receiving public updates. Premium support customers of OpenSSL 1.0.2 should upgrade to 1.0.2y. Other users should upgrade to 1.1.1j.", "affected_packages": [ { "package": { @@ -9247,19 +9247,19 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2021-23841", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2021-23840", "severities": [], - "reference_id": "CVE-2021-23841", + "reference_id": "CVE-2021-23840", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/122a19ab48091c657f7cb1fb3af9fc07bd557bbf", + "url": "https://github.com/openssl/openssl/commit/6a51b9e1d0cf0bf8515f7201b68fb0a3482b3dc1", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/8252ee4d90f3f2004d3d0aeeed003ad49c9a7807", + "url": "https://github.com/openssl/openssl/commit/9b1129239f3ebb1d1c98ce9ed41d5c9476c47cb2", "severities": [], "reference_id": "", "reference_type": "" @@ -9268,7 +9268,7 @@ "url": "https://www.openssl.org/news/secadv/20210216.txt", "severities": [ { - "value": "Moderate", + "value": "Low", "system": "generic_textual", "scoring_elements": "" } @@ -9281,8 +9281,8 @@ "weaknesses": [] }, { - "unique_content_id": "62778ba1713cdf9851ef92f4d2f46fa7", - "summary": "Calls to EVP_CipherUpdate, EVP_EncryptUpdate and EVP_DecryptUpdate may overflow the output length argument in some cases where the input length is close to the maximum permissable length for an integer on the platform. In such cases the return value from the function call will be 1 (indicating success), but the output length value will be negative. This could cause applications to behave incorrectly or crash. OpenSSL versions 1.1.1i and below are affected by this issue. Users of these versions should upgrade to OpenSSL 1.1.1j. OpenSSL versions 1.0.2x and below are affected by this issue. However OpenSSL 1.0.2 is out of support and no longer receiving public updates. Premium support customers of OpenSSL 1.0.2 should upgrade to 1.0.2y. Other users should upgrade to 1.1.1j.", + "unique_content_id": "4f747fd9c1e01f00c514c9af30852970db6eb6c9b83462affe737ebd3b893a0d", + "summary": "The OpenSSL public API function X509_issuer_and_serial_hash() attempts to create a unique hash value based on the issuer and serial number data contained within an X509 certificate. However it fails to correctly handle any errors that may occur while parsing the issuer field (which might occur if the issuer field is maliciously constructed). This may subsequently result in a NULL pointer deref and a crash leading to a potential denial of service attack. The function X509_issuer_and_serial_hash() is never directly called by OpenSSL itself so applications are only vulnerable if they use this function directly and they use it on certificates that may have been obtained from untrusted sources. OpenSSL versions 1.1.1i and below are affected by this issue. Users of these versions should upgrade to OpenSSL 1.1.1j. OpenSSL versions 1.0.2x and below are affected by this issue. However OpenSSL 1.0.2 is out of support and no longer receiving public updates. Premium support customers of OpenSSL 1.0.2 should upgrade to 1.0.2y. Other users should upgrade to 1.1.1j.", "affected_packages": [ { "package": { @@ -9311,19 +9311,19 @@ ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2021-23840", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2021-23841", "severities": [], - "reference_id": "CVE-2021-23840", + "reference_id": "CVE-2021-23841", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/6a51b9e1d0cf0bf8515f7201b68fb0a3482b3dc1", + "url": "https://github.com/openssl/openssl/commit/122a19ab48091c657f7cb1fb3af9fc07bd557bbf", "severities": [], "reference_id": "", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/9b1129239f3ebb1d1c98ce9ed41d5c9476c47cb2", + "url": "https://github.com/openssl/openssl/commit/8252ee4d90f3f2004d3d0aeeed003ad49c9a7807", "severities": [], "reference_id": "", "reference_type": "" @@ -9332,7 +9332,7 @@ "url": "https://www.openssl.org/news/secadv/20210216.txt", "severities": [ { - "value": "Low", + "value": "Moderate", "system": "generic_textual", "scoring_elements": "" } @@ -9345,7 +9345,7 @@ "weaknesses": [] }, { - "unique_content_id": "ebbc5ad78a20128d4894106ef368c8f1", + "unique_content_id": "d94a89c4d33239b6b8b49b765224bdb2cff846ce52cf8d1bfd59e6401fd406d7", "summary": "OpenSSL 1.0.2 supports SSLv2. If a client attempts to negotiate SSLv2 with a server that is configured to support both SSLv2 and more recent SSL and TLS versions then a check is made for a version rollback attack when unpadding an RSA signature. Clients that support SSL or TLS versions greater than SSLv2 are supposed to use a special form of padding. A server that supports greater than SSLv2 is supposed to reject connection attempts from a client where this special form of padding is present, because this indicates that a version rollback has occurred (i.e. both client and server support greater than SSLv2, and yet this is the version that is being requested). The implementation of this padding check inverted the logic so that the connection attempt is accepted if the padding is present, and rejected if it is absent. This means that such as server will accept a connection if a version rollback attack has occurred. Further the server will erroneously reject a connection if a normal SSLv2 connection attempt is made. Only OpenSSL 1.0.2 servers from version 1.0.2s to 1.0.2x are affected by this issue. In order to be vulnerable a 1.0.2 server must: 1) have configured SSLv2 support at compile time (this is off by default), 2) have configured SSLv2 support at runtime (this is off by default), 3) have configured SSLv2 ciphersuites (these are not in the default ciphersuite list) OpenSSL 1.1.1 does not have SSLv2 support and therefore is not vulnerable to this issue. The underlying error is in the implementation of the RSA_padding_check_SSLv23() function. This also affects the RSA_SSLV23_PADDING padding mode used by various other functions. Although 1.1.1 does not support SSLv2 the RSA_padding_check_SSLv23() function still exists, as does the RSA_SSLV23_PADDING padding mode. Applications that directly call that function or use that padding mode will encounter this issue. However since there is no support for the SSLv2 protocol in 1.1.1 this is considered a bug and not a security issue in that version. OpenSSL 1.0.2 is out of support and no longer receiving public updates. Premium support customers of OpenSSL 1.0.2 should upgrade to 1.0.2y. Other users should upgrade to 1.1.1j.", "affected_packages": [ { @@ -9391,8 +9391,8 @@ "weaknesses": [] }, { - "unique_content_id": "8017a45e047c6a8a07ddcef5b019a5a9", - "summary": "The X509_V_FLAG_X509_STRICT flag enables additional security checks of the certificates present in a certificate chain. It is not set by default. Starting from OpenSSL version 1.1.1h a check to disallow certificates in the chain that have explicitly encoded elliptic curve parameters was added as an additional strict check. An error in the implementation of this check meant that the result of a previous check to confirm that certificates in the chain are valid CA certificates was overwritten. This effectively bypasses the check that non-CA certificates must not be able to issue other certificates. If a \"purpose\" has been configured then there is a subsequent opportunity for checks that the certificate is a valid CA. All of the named \"purpose\" values implemented in libcrypto perform this check. Therefore, where a purpose is set the certificate chain will still be rejected even when the strict flag has been used. A purpose is set by default in libssl client and server certificate verification routines, but it can be overridden or removed by an application. In order to be affected, an application must explicitly set the X509_V_FLAG_X509_STRICT verification flag and either not set a purpose for the certificate verification or, in the case of TLS client or server applications, override the default purpose. OpenSSL versions 1.1.1h and newer are affected by this issue. Users of these versions should upgrade to OpenSSL 1.1.1k. OpenSSL 1.0.2 is not impacted by this issue.", + "unique_content_id": "7e96ee7be9f83a18c1773a7c46610f55024cfbe0be196a47e2b3ea741ae398e2", + "summary": "An OpenSSL TLS server may crash if sent a maliciously crafted renegotiation ClientHello message from a client. If a TLSv1.2 renegotiation ClientHello omits the signature_algorithms extension (where it was present in the initial ClientHello), but includes a signature_algorithms_cert extension then a NULL pointer dereference will result, leading to a crash and a denial of service attack. A server is only vulnerable if it has TLSv1.2 and renegotiation enabled (which is the default configuration). OpenSSL TLS clients are not impacted by this issue. All OpenSSL 1.1.1 versions are affected by this issue. Users of these versions should upgrade to OpenSSL 1.1.1k. OpenSSL 1.0.2 is not impacted by this issue.", "affected_packages": [ { "package": { @@ -9404,18 +9404,18 @@ "qualifiers": "" }, "fixed_version": "1.1.1k", - "affected_version_range": "vers:openssl/1.1.1h|1.1.1i|1.1.1j" + "affected_version_range": "vers:openssl/1.1.1|1.1.1a|1.1.1b|1.1.1c|1.1.1d|1.1.1e|1.1.1f|1.1.1g|1.1.1h|1.1.1i|1.1.1j" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2021-3450", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2021-3449", "severities": [], - "reference_id": "CVE-2021-3450", + "reference_id": "CVE-2021-3449", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/2a40b7bc7b94dd7de897a74571e7024f0cf0d63b", + "url": "https://github.com/openssl/openssl/commit/fb9fa6b51defd48157eeb207f52181f735d96148", "severities": [], "reference_id": "", "reference_type": "" @@ -9437,8 +9437,8 @@ "weaknesses": [] }, { - "unique_content_id": "b9610772604a38aae37934639b563f2d", - "summary": "An OpenSSL TLS server may crash if sent a maliciously crafted renegotiation ClientHello message from a client. If a TLSv1.2 renegotiation ClientHello omits the signature_algorithms extension (where it was present in the initial ClientHello), but includes a signature_algorithms_cert extension then a NULL pointer dereference will result, leading to a crash and a denial of service attack. A server is only vulnerable if it has TLSv1.2 and renegotiation enabled (which is the default configuration). OpenSSL TLS clients are not impacted by this issue. All OpenSSL 1.1.1 versions are affected by this issue. Users of these versions should upgrade to OpenSSL 1.1.1k. OpenSSL 1.0.2 is not impacted by this issue.", + "unique_content_id": "943c0441bb44156232628b06433f25a6e1d5c4bef1db447845be8bb595d55320", + "summary": "The X509_V_FLAG_X509_STRICT flag enables additional security checks of the certificates present in a certificate chain. It is not set by default. Starting from OpenSSL version 1.1.1h a check to disallow certificates in the chain that have explicitly encoded elliptic curve parameters was added as an additional strict check. An error in the implementation of this check meant that the result of a previous check to confirm that certificates in the chain are valid CA certificates was overwritten. This effectively bypasses the check that non-CA certificates must not be able to issue other certificates. If a \"purpose\" has been configured then there is a subsequent opportunity for checks that the certificate is a valid CA. All of the named \"purpose\" values implemented in libcrypto perform this check. Therefore, where a purpose is set the certificate chain will still be rejected even when the strict flag has been used. A purpose is set by default in libssl client and server certificate verification routines, but it can be overridden or removed by an application. In order to be affected, an application must explicitly set the X509_V_FLAG_X509_STRICT verification flag and either not set a purpose for the certificate verification or, in the case of TLS client or server applications, override the default purpose. OpenSSL versions 1.1.1h and newer are affected by this issue. Users of these versions should upgrade to OpenSSL 1.1.1k. OpenSSL 1.0.2 is not impacted by this issue.", "affected_packages": [ { "package": { @@ -9450,18 +9450,18 @@ "qualifiers": "" }, "fixed_version": "1.1.1k", - "affected_version_range": "vers:openssl/1.1.1|1.1.1a|1.1.1b|1.1.1c|1.1.1d|1.1.1e|1.1.1f|1.1.1g|1.1.1h|1.1.1i|1.1.1j" + "affected_version_range": "vers:openssl/1.1.1h|1.1.1i|1.1.1j" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2021-3449", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2021-3450", "severities": [], - "reference_id": "CVE-2021-3449", + "reference_id": "CVE-2021-3450", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/fb9fa6b51defd48157eeb207f52181f735d96148", + "url": "https://github.com/openssl/openssl/commit/2a40b7bc7b94dd7de897a74571e7024f0cf0d63b", "severities": [], "reference_id": "", "reference_type": "" @@ -9483,8 +9483,8 @@ "weaknesses": [] }, { - "unique_content_id": "7c59ebbda08fad46ad3628c58c6e1f4f", - "summary": "In order to decrypt SM2 encrypted data an application is expected to call the API function EVP_PKEY_decrypt(). Typically an application will call this function twice. The first time, on entry, the \"out\" parameter can be NULL and, on exit, the \"outlen\" parameter is populated with the buffer size required to hold the decrypted plaintext. The application can then allocate a sufficiently sized buffer and call EVP_PKEY_decrypt() again, but this time passing a non-NULL value for the \"out\" parameter. A bug in the implementation of the SM2 decryption code means that the calculation of the buffer size required to hold the plaintext returned by the first call to EVP_PKEY_decrypt() can be smaller than the actual size required by the second call. This can lead to a buffer overflow when EVP_PKEY_decrypt() is called by the application a second time with a buffer that is too small. A malicious attacker who is able present SM2 content for decryption to an application could cause attacker chosen data to overflow the buffer by up to a maximum of 62 bytes altering the contents of other data held after the buffer, possibly changing application behaviour or causing the application to crash. The location of the buffer is application dependent but is typically heap allocated.", + "unique_content_id": "ac1bc5a0f0673f7e6556dd40ca607825904051f6f1686650f07ba5727dcab9f9", + "summary": "ASN.1 strings are represented internally within OpenSSL as an ASN1_STRING structure which contains a buffer holding the string data and a field holding the buffer length. This contrasts with normal C strings which are repesented as a buffer for the string data which is terminated with a NUL (0) byte. Although not a strict requirement, ASN.1 strings that are parsed using OpenSSL's own \"d2i\" functions (and other similar parsing functions) as well as any string whose value has been set with the ASN1_STRING_set() function will additionally NUL terminate the byte array in the ASN1_STRING structure. However, it is possible for applications to directly construct valid ASN1_STRING structures which do not NUL terminate the byte array by directly setting the \"data\" and \"length\" fields in the ASN1_STRING array. This can also happen by using the ASN1_STRING_set0() function. Numerous OpenSSL functions that print ASN.1 data have been found to assume that the ASN1_STRING byte array will be NUL terminated, even though this is not guaranteed for strings that have been directly constructed. Where an application requests an ASN.1 structure to be printed, and where that ASN.1 structure contains ASN1_STRINGs that have been directly constructed by the application without NUL terminating the \"data\" field, then a read buffer overrun can occur. The same thing can also occur during name constraints processing of certificates (for example if a certificate has been directly constructed by the application instead of loading it via the OpenSSL parsing functions, and the certificate contains non NUL terminated ASN1_STRING structures). It can also occur in the X509_get1_email(), X509_REQ_get1_email() and X509_get1_ocsp() functions. If a malicious actor can cause an application to directly construct an ASN1_STRING and then process it through one of the affected OpenSSL functions then this issue could be hit. This might result in a crash (causing a Denial of Service attack). It could also result in the disclosure of private memory contents (such as private keys, or sensitive plaintext).", "affected_packages": [ { "package": { @@ -9497,17 +9497,35 @@ }, "fixed_version": "1.1.1l", "affected_version_range": "vers:openssl/1.1.1|1.1.1a|1.1.1b|1.1.1c|1.1.1d|1.1.1e|1.1.1f|1.1.1g|1.1.1h|1.1.1i|1.1.1j|1.1.1k" + }, + { + "package": { + "name": "openssl", + "type": "openssl", + "subpath": "", + "version": "", + "namespace": "", + "qualifiers": "" + }, + "fixed_version": "1.0.2za", + "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d|1.0.2e|1.0.2f|1.0.2g|1.0.2h|1.0.2i|1.0.2j|1.0.2k|1.0.2l|1.0.2m|1.0.2n|1.0.2o|1.0.2p|1.0.2q|1.0.2r|1.0.2s|1.0.2t|1.0.2u|1.0.2v|1.0.2w|1.0.2x|1.0.2y" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2021-3711", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2021-3712", "severities": [], - "reference_id": "CVE-2021-3711", + "reference_id": "CVE-2021-3712", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/59f5e75f3bced8fc0e130d72a3f582cf7b480b46", + "url": "https://github.com/openssl/openssl/commit/94d23fcff9b2a7a8368dfe52214d5c2569882c11", + "severities": [], + "reference_id": "", + "reference_type": "" + }, + { + "url": "https://github.com/openssl/openssl/commit/ccb0a11145ee72b042d10593a64eaf9e8a55ec12", "severities": [], "reference_id": "", "reference_type": "" @@ -9516,7 +9534,7 @@ "url": "https://www.openssl.org/news/secadv/20210824.txt", "severities": [ { - "value": "High", + "value": "Moderate", "system": "generic_textual", "scoring_elements": "" } @@ -9529,8 +9547,8 @@ "weaknesses": [] }, { - "unique_content_id": "97ca2e1d473bc9e2e802285c56f85541", - "summary": "ASN.1 strings are represented internally within OpenSSL as an ASN1_STRING structure which contains a buffer holding the string data and a field holding the buffer length. This contrasts with normal C strings which are repesented as a buffer for the string data which is terminated with a NUL (0) byte. Although not a strict requirement, ASN.1 strings that are parsed using OpenSSL's own \"d2i\" functions (and other similar parsing functions) as well as any string whose value has been set with the ASN1_STRING_set() function will additionally NUL terminate the byte array in the ASN1_STRING structure. However, it is possible for applications to directly construct valid ASN1_STRING structures which do not NUL terminate the byte array by directly setting the \"data\" and \"length\" fields in the ASN1_STRING array. This can also happen by using the ASN1_STRING_set0() function. Numerous OpenSSL functions that print ASN.1 data have been found to assume that the ASN1_STRING byte array will be NUL terminated, even though this is not guaranteed for strings that have been directly constructed. Where an application requests an ASN.1 structure to be printed, and where that ASN.1 structure contains ASN1_STRINGs that have been directly constructed by the application without NUL terminating the \"data\" field, then a read buffer overrun can occur. The same thing can also occur during name constraints processing of certificates (for example if a certificate has been directly constructed by the application instead of loading it via the OpenSSL parsing functions, and the certificate contains non NUL terminated ASN1_STRING structures). It can also occur in the X509_get1_email(), X509_REQ_get1_email() and X509_get1_ocsp() functions. If a malicious actor can cause an application to directly construct an ASN1_STRING and then process it through one of the affected OpenSSL functions then this issue could be hit. This might result in a crash (causing a Denial of Service attack). It could also result in the disclosure of private memory contents (such as private keys, or sensitive plaintext).", + "unique_content_id": "b2e254e7e251e702fd77c5eaf069909ab6e7ddf360fc3ff323ee75dc20566220", + "summary": "In order to decrypt SM2 encrypted data an application is expected to call the API function EVP_PKEY_decrypt(). Typically an application will call this function twice. The first time, on entry, the \"out\" parameter can be NULL and, on exit, the \"outlen\" parameter is populated with the buffer size required to hold the decrypted plaintext. The application can then allocate a sufficiently sized buffer and call EVP_PKEY_decrypt() again, but this time passing a non-NULL value for the \"out\" parameter. A bug in the implementation of the SM2 decryption code means that the calculation of the buffer size required to hold the plaintext returned by the first call to EVP_PKEY_decrypt() can be smaller than the actual size required by the second call. This can lead to a buffer overflow when EVP_PKEY_decrypt() is called by the application a second time with a buffer that is too small. A malicious attacker who is able present SM2 content for decryption to an application could cause attacker chosen data to overflow the buffer by up to a maximum of 62 bytes altering the contents of other data held after the buffer, possibly changing application behaviour or causing the application to crash. The location of the buffer is application dependent but is typically heap allocated.", "affected_packages": [ { "package": { @@ -9543,35 +9561,17 @@ }, "fixed_version": "1.1.1l", "affected_version_range": "vers:openssl/1.1.1|1.1.1a|1.1.1b|1.1.1c|1.1.1d|1.1.1e|1.1.1f|1.1.1g|1.1.1h|1.1.1i|1.1.1j|1.1.1k" - }, - { - "package": { - "name": "openssl", - "type": "openssl", - "subpath": "", - "version": "", - "namespace": "", - "qualifiers": "" - }, - "fixed_version": "1.0.2za", - "affected_version_range": "vers:openssl/1.0.2|1.0.2a|1.0.2b|1.0.2c|1.0.2d|1.0.2e|1.0.2f|1.0.2g|1.0.2h|1.0.2i|1.0.2j|1.0.2k|1.0.2l|1.0.2m|1.0.2n|1.0.2o|1.0.2p|1.0.2q|1.0.2r|1.0.2s|1.0.2t|1.0.2u|1.0.2v|1.0.2w|1.0.2x|1.0.2y" } ], "references": [ { - "url": "https://nvd.nist.gov/vuln/detail/CVE-2021-3712", - "severities": [], - "reference_id": "CVE-2021-3712", - "reference_type": "" - }, - { - "url": "https://github.com/openssl/openssl/commit/94d23fcff9b2a7a8368dfe52214d5c2569882c11", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2021-3711", "severities": [], - "reference_id": "", + "reference_id": "CVE-2021-3711", "reference_type": "" }, { - "url": "https://github.com/openssl/openssl/commit/ccb0a11145ee72b042d10593a64eaf9e8a55ec12", + "url": "https://github.com/openssl/openssl/commit/59f5e75f3bced8fc0e130d72a3f582cf7b480b46", "severities": [], "reference_id": "", "reference_type": "" @@ -9580,7 +9580,7 @@ "url": "https://www.openssl.org/news/secadv/20210824.txt", "severities": [ { - "value": "Moderate", + "value": "High", "system": "generic_textual", "scoring_elements": "" } @@ -9593,7 +9593,7 @@ "weaknesses": [] }, { - "unique_content_id": "1c5bbe67613cfce3a310b822466ad17e", + "unique_content_id": "2480e0bc015e4765e66637e4b96ea45d8d93d41719e171100ca32011f81d6e80", "summary": "Internally libssl in OpenSSL calls X509_verify_cert() on the client side to verify a certificate supplied by a server. That function may return a negative return value to indicate an internal error (for example out of memory). Such a negative return value is mishandled by OpenSSL and will cause an IO function (such as SSL_connect() or SSL_do_handshake()) to not indicate success and a subsequent call to SSL_get_error() to return the value SSL_ERROR_WANT_RETRY_VERIFY. This return value is only supposed to be returned by OpenSSL if the application has previously called SSL_CTX_set_cert_verify_callback(). Since most applications do not do this the SSL_ERROR_WANT_RETRY_VERIFY return value from SSL_get_error() will be totally unexpected and applications may not behave correctly as a result. The exact behaviour will depend on the application but it could result in crashes, infinite loops or other similar incorrect responses. This issue is made more serious in combination with a separate bug in OpenSSL 3.0 that will cause X509_verify_cert() to indicate an internal error when processing a certificate chain. This will occur where a certificate does not include the Subject Alternative Name extension but where a Certificate Authority has enforced name constraints. This issue can occur even with valid chains. By combining the two issues an attacker could induce incorrect, application dependent behaviour.", "affected_packages": [ { @@ -9639,7 +9639,7 @@ "weaknesses": [] }, { - "unique_content_id": "0039548ab133f97e2138bb298ccc7cae", + "unique_content_id": "80c2054b079c7d69842fe524fdc6abcf1246a37323a9f29ce4f80f4300e8282f", "summary": "There is a carry propagation bug in the MIPS32 and MIPS64 squaring procedure. Many EC algorithms are affected, including some of the TLS 1.3 default curves. Impact was not analyzed in detail, because the pre-requisites for attack are considered unlikely and include reusing private keys. Analysis suggests that attacks against RSA and DSA as a result of this defect would be very difficult to perform and are not believed likely. Attacks against DH are considered just feasible (although very difficult) because most of the work necessary to deduce information about a private key may be performed offline. The amount of resources required for such an attack would be significant. However, for an attack on TLS to be meaningful, the server would have to share the DH private key among multiple clients, which is no longer an option since CVE-2016-0701. This issue affects OpenSSL versions 1.0.2, 1.1.1 and 3.0.0. It was addressed in the releases of 1.1.1m and 3.0.1 on the 15th of December 2021. For the 1.0.2 release it is addressed in git commit 6fc1aaaf3 that is available to premium support customers only. It will be made available in 1.0.2zc when it is released. The issue only affects OpenSSL on MIPS platforms.", "affected_packages": [ { @@ -9721,7 +9721,7 @@ "weaknesses": [] }, { - "unique_content_id": "caa5eb3135dc715346ce3a32211b024e", + "unique_content_id": "dc0cbb60dc9280799a925c566b952d1c952cf5c3b30d9e3d5726c30a815e49d2", "summary": "The BN_mod_sqrt() function, which computes a modular square root, contains a bug that can cause it to loop forever for non-prime moduli. Internally this function is used when parsing certificates that contain elliptic curve public keys in compressed form or explicit elliptic curve parameters with a base point encoded in compressed form. It is possible to trigger the infinite loop by crafting a certificate that has invalid explicit curve parameters. Since certificate parsing happens prior to verification of the certificate signature, any process that parses an externally supplied certificate may thus be subject to a denial of service attack. The infinite loop can also be reached when parsing crafted private keys as they can contain explicit elliptic curve parameters. Thus vulnerable situations include: - TLS clients consuming server certificates - TLS servers consuming client certificates - Hosting providers taking certificates or private keys from customers - Certificate authorities parsing certification requests from subscribers - Anything else which parses ASN.1 elliptic curve parameters Also any other applications that use the BN_mod_sqrt() where the attacker can control the parameter values are vulnerable to this DoS issue. In the OpenSSL 1.0.2 version the public key is not parsed during initial parsing of the certificate which makes it slightly harder to trigger the infinite loop. However any operation which requires the public key from the certificate will trigger the infinite loop. In particular the attacker can use a self-signed certificate to trigger the loop during verification of the certificate signature. This issue affects OpenSSL versions 1.0.2, 1.1.1 and 3.0. It was addressed in the releases of 1.1.1n and 3.0.2 on the 15th March 2022.", "affected_packages": [ { diff --git a/vulnerabilities/tests/test_data_migrations.py b/vulnerabilities/tests/test_data_migrations.py index 55bbb71ef..491410fb1 100644 --- a/vulnerabilities/tests/test_data_migrations.py +++ b/vulnerabilities/tests/test_data_migrations.py @@ -948,7 +948,6 @@ def setUpBeforeMigration(self, apps): def test_fix_alpine_purl(self): Package = apps.get_model("vulnerabilities", "Package") package = Package.objects.all() - print(package) assert package.filter(type="alpine").count() == 0 assert package.filter(type="apk").count() == 1 diff --git a/vulnerabilities/tests/test_import_runner.py b/vulnerabilities/tests/test_import_runner.py index d46a34861..3b8080086 100644 --- a/vulnerabilities/tests/test_import_runner.py +++ b/vulnerabilities/tests/test_import_runner.py @@ -177,7 +177,12 @@ def test_advisory_summary_clean_up(): assert "\x00" not in adv.summary -DUMMY_ADVISORY = models.Advisory(summary="dummy", created_by="tests", date_collected=timezone.now()) +DUMMY_ADVISORY = models.Advisory( + unique_content_id="test-unique-content-id", + summary="dummy", + created_by="tests", + date_collected=timezone.now(), +) INFERENCES = [ diff --git a/vulnerabilities/tests/test_improve_runner.py b/vulnerabilities/tests/test_improve_runner.py index 347f87c97..682b45f19 100644 --- a/vulnerabilities/tests/test_improve_runner.py +++ b/vulnerabilities/tests/test_improve_runner.py @@ -12,7 +12,6 @@ import pytest from django.utils import timezone from packageurl import PackageURL -from pytest_django.asserts import assertQuerysetEqual from vulnerabilities.importer import Reference from vulnerabilities.improve_runner import create_valid_vulnerability_reference @@ -21,16 +20,12 @@ from vulnerabilities.improve_runner import get_vulns_for_aliases_and_get_new_aliases from vulnerabilities.improve_runner import process_inferences from vulnerabilities.improver import MAX_CONFIDENCE -from vulnerabilities.improver import Improver from vulnerabilities.improver import Inference from vulnerabilities.models import Advisory -from vulnerabilities.models import AffectedByPackageRelatedVulnerability from vulnerabilities.models import Alias -from vulnerabilities.models import FixingPackageRelatedVulnerability from vulnerabilities.models import Package from vulnerabilities.models import Vulnerability from vulnerabilities.models import VulnerabilityReference -from vulnerabilities.models import VulnerabilityRelatedReference from vulnerabilities.models import VulnerabilitySeverity @@ -227,7 +222,11 @@ def test_process_inferences_with_empty_aliases(): summary="", ) ], - advisory=Advisory.objects.create(summary="", date_collected=timezone.now()), + advisory=Advisory.objects.create( + unique_content_id="test-unique-content-id", + summary="", + date_collected=timezone.now(), + ), improver_name="NO_ALIASES_IMPROVER", ) diff --git a/vulnerabilities/tests/test_postgres_workaround.py b/vulnerabilities/tests/test_postgres_workaround.py index 38943bd1a..9fe2c66a0 100644 --- a/vulnerabilities/tests/test_postgres_workaround.py +++ b/vulnerabilities/tests/test_postgres_workaround.py @@ -426,6 +426,7 @@ @pytest.mark.django_db def test_postgres_workaround_with_many_references_many_affected_packages_and_long_summary(): adv, _ = Advisory.objects.get_or_create( + unique_content_id="test-unique-content-id", summary=data.summary, affected_packages=[pkg.to_dict() for pkg in data.affected_packages], references=[ref.to_dict() for ref in data.references], diff --git a/vulnerabilities/tests/test_vulnerability_status_improver.py b/vulnerabilities/tests/test_vulnerability_status_improver.py index 084df3a2c..2a67730d4 100644 --- a/vulnerabilities/tests/test_vulnerability_status_improver.py +++ b/vulnerabilities/tests/test_vulnerability_status_improver.py @@ -34,12 +34,14 @@ @pytest.mark.django_db(transaction=True) def test_interesting_advisories(): adv1 = Advisory.objects.create( + unique_content_id="test-unique-content-id", created_by=NVDImporterPipeline.pipeline_id, summary="1", date_collected=datetime.now(), ) adv1.aliases.add(*get_or_create_aliases(["CVE-1"])) adv2 = Advisory.objects.create( + unique_content_id="test-unique-content-id", created_by=NVDImporterPipeline.pipeline_id, summary="2", date_collected=datetime.now(), @@ -55,6 +57,7 @@ def test_improver_end_to_end(mock_response): response = os.path.join(TEST_DATA, "CVE-2023-35866.json") mock_response.return_value = response adv = Advisory.objects.create( + unique_content_id="test-unique-content-id", created_by=NVDImporterPipeline.pipeline_id, summary="1", date_collected=datetime.now(), From 04cab61eb48b767963eed2182deab984a62f03be Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Tue, 25 Mar 2025 17:10:09 +0530 Subject: [PATCH 074/545] Provide content id when inserting advisory Signed-off-by: Keshav Priyadarshi --- vulnerabilities/import_runner.py | 3 +++ vulnerabilities/pipes/advisory.py | 4 ++++ 2 files changed, 7 insertions(+) diff --git a/vulnerabilities/import_runner.py b/vulnerabilities/import_runner.py index 7a81e34da..041ef8872 100644 --- a/vulnerabilities/import_runner.py +++ b/vulnerabilities/import_runner.py @@ -98,13 +98,16 @@ def process_advisories( Return the number of inserted advisories. """ from vulnerabilities.pipes.advisory import get_or_create_aliases + from vulnerabilities.utils import compute_content_id count = 0 advisories = [] for data in advisory_datas: + content_id = compute_content_id(advisory_data=data) try: aliases = get_or_create_aliases(aliases=data.aliases) obj, created = Advisory.objects.get_or_create( + unique_content_id=content_id, summary=data.summary, affected_packages=[pkg.to_dict() for pkg in data.affected_packages], references=[ref.to_dict() for ref in data.references], diff --git a/vulnerabilities/pipes/advisory.py b/vulnerabilities/pipes/advisory.py index 3b33438ec..33d74286a 100644 --- a/vulnerabilities/pipes/advisory.py +++ b/vulnerabilities/pipes/advisory.py @@ -36,10 +36,14 @@ def get_or_create_aliases(aliases: List) -> List: def insert_advisory(advisory: AdvisoryData, pipeline_id: str, logger: Callable = None): + from vulnerabilities.utils import compute_content_id + advisory_obj = None aliases = get_or_create_aliases(aliases=advisory.aliases) + content_id = compute_content_id(advisory_data=advisory) try: advisory_obj, _ = Advisory.objects.get_or_create( + unique_content_id=content_id, summary=advisory.summary, affected_packages=[pkg.to_dict() for pkg in advisory.affected_packages], references=[ref.to_dict() for ref in advisory.references], From 69631583875d6393825921864197fc306dfe3a60 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Tue, 25 Mar 2025 19:46:35 +0530 Subject: [PATCH 075/545] Ensure reference_id is always a string Signed-off-by: Keshav Priyadarshi --- vulnerabilities/importer.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/vulnerabilities/importer.py b/vulnerabilities/importer.py index b22d29d00..933c19edc 100644 --- a/vulnerabilities/importer.py +++ b/vulnerabilities/importer.py @@ -103,6 +103,8 @@ class Reference: def __post_init__(self): if not self.url: raise TypeError("Reference must have a url") + if self.reference_id and not isinstance(self.reference_id, str): + self.reference_id = str(self.reference_id) def __lt__(self, other): if not isinstance(other, Reference): From 02d75b2ae7e6600dcf11e6d627c05dd431a7201e Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Wed, 26 Mar 2025 19:23:21 +0530 Subject: [PATCH 076/545] Add test for get_or_create_aliases Signed-off-by: Keshav Priyadarshi --- vulnerabilities/tests/pipes/test_advisory.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/vulnerabilities/tests/pipes/test_advisory.py b/vulnerabilities/tests/pipes/test_advisory.py index 84b44a5d0..c59c96ef8 100644 --- a/vulnerabilities/tests/pipes/test_advisory.py +++ b/vulnerabilities/tests/pipes/test_advisory.py @@ -18,7 +18,6 @@ from vulnerabilities.importer import Reference from vulnerabilities.pipes.advisory import get_or_create_aliases from vulnerabilities.pipes.advisory import import_advisory -from vulnerabilities.utils import compute_content_id advisory_data1 = AdvisoryData( summary="vulnerability description here", @@ -71,3 +70,13 @@ def test_vulnerability_pipes_importer_import_advisory_different_pipelines(): all_vulnerability_relation_objects = get_all_vulnerability_relationships_objects() import_advisory(advisory=advisory1, pipeline_id="test_importer2_pipeline") assert all_vulnerability_relation_objects == get_all_vulnerability_relationships_objects() + + +@pytest.mark.django_db +def test_vulnerability_pipes_get_or_create_aliases(): + aliases = ["CVE-TEST-123", "CVE-TEST-124"] + result_aliases_qs = get_or_create_aliases(aliases=aliases) + result_aliases = [i.alias for i in result_aliases_qs] + assert 2 == result_aliases_qs.count() + assert "CVE-TEST-123" in result_aliases + assert "CVE-TEST-124" in result_aliases From 16994eba42b739f84822b76f6b141b25f140245d Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Wed, 26 Mar 2025 19:31:40 +0530 Subject: [PATCH 077/545] Use iterator in vulnerability_status improver Signed-off-by: Keshav Priyadarshi --- vulnerabilities/improvers/vulnerability_status.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/vulnerabilities/improvers/vulnerability_status.py b/vulnerabilities/improvers/vulnerability_status.py index e9661344e..10a640c55 100644 --- a/vulnerabilities/improvers/vulnerability_status.py +++ b/vulnerabilities/improvers/vulnerability_status.py @@ -10,7 +10,6 @@ from typing import Iterable from urllib.parse import urljoin -from django.db.models import Q from django.db.models.query import QuerySet from vulnerabilities.importer import AdvisoryData @@ -37,7 +36,9 @@ class VulnerabilityStatusImprover(Improver): @property def interesting_advisories(self) -> QuerySet: - return Advisory.objects.filter(Q(created_by=NVDImporterPipeline.pipeline_id)).paginated() + return Advisory.objects.filter(created_by=NVDImporterPipeline.pipeline_id).iterator( + chunk_size=5000 + ) def get_inferences(self, advisory_data: AdvisoryData) -> Iterable[Inference]: """ From b3d5fc56557840269c6b2084a1f3c9b2178882af Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Wed, 26 Mar 2025 19:33:55 +0530 Subject: [PATCH 078/545] Use unique_content_id for get_or_create Advisory - Since unique_content_id is a proxy for other fields using it is enough and sufficient. Signed-off-by: Keshav Priyadarshi --- vulnerabilities/import_runner.py | 12 ++++++------ vulnerabilities/pipes/advisory.py | 13 +++++++------ 2 files changed, 13 insertions(+), 12 deletions(-) diff --git a/vulnerabilities/import_runner.py b/vulnerabilities/import_runner.py index 041ef8872..796a03ef3 100644 --- a/vulnerabilities/import_runner.py +++ b/vulnerabilities/import_runner.py @@ -108,16 +108,16 @@ def process_advisories( aliases = get_or_create_aliases(aliases=data.aliases) obj, created = Advisory.objects.get_or_create( unique_content_id=content_id, - summary=data.summary, - affected_packages=[pkg.to_dict() for pkg in data.affected_packages], - references=[ref.to_dict() for ref in data.references], - date_published=data.date_published, - weaknesses=data.weaknesses, + url=data.url, defaults={ + "summary": data.summary, + "affected_packages": [pkg.to_dict() for pkg in data.affected_packages], + "references": [ref.to_dict() for ref in data.references], + "date_published": data.date_published, + "weaknesses": data.weaknesses, "created_by": importer_name, "date_collected": datetime.datetime.now(tz=datetime.timezone.utc), }, - url=data.url, ) obj.aliases.add(*aliases) if not obj.date_imported: diff --git a/vulnerabilities/pipes/advisory.py b/vulnerabilities/pipes/advisory.py index 33d74286a..3d98392c9 100644 --- a/vulnerabilities/pipes/advisory.py +++ b/vulnerabilities/pipes/advisory.py @@ -15,6 +15,7 @@ from typing import List from django.db import transaction +from django.db.models.query import QuerySet from vulnerabilities.importer import AdvisoryData from vulnerabilities.improver import MAX_CONFIDENCE @@ -29,7 +30,7 @@ from vulnerabilities.models import Weakness -def get_or_create_aliases(aliases: List) -> List: +def get_or_create_aliases(aliases: List) -> QuerySet: for alias in aliases: Alias.objects.get_or_create(alias=alias) return Alias.objects.filter(alias__in=aliases) @@ -44,13 +45,13 @@ def insert_advisory(advisory: AdvisoryData, pipeline_id: str, logger: Callable = try: advisory_obj, _ = Advisory.objects.get_or_create( unique_content_id=content_id, - summary=advisory.summary, - affected_packages=[pkg.to_dict() for pkg in advisory.affected_packages], - references=[ref.to_dict() for ref in advisory.references], - date_published=advisory.date_published, - weaknesses=advisory.weaknesses, url=advisory.url, defaults={ + "summary": advisory.summary, + "affected_packages": [pkg.to_dict() for pkg in advisory.affected_packages], + "references": [ref.to_dict() for ref in advisory.references], + "date_published": advisory.date_published, + "weaknesses": advisory.weaknesses, "created_by": pipeline_id, "date_collected": datetime.now(timezone.utc), }, From eeee06ed81f9e7aaa3c93e4aef6598e8f7cd9a43 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Wed, 26 Mar 2025 19:37:25 +0530 Subject: [PATCH 079/545] Optimize alias migration using AdvisoryRelatedAlias - Use AdvisoryRelatedAlias to manage advisory alias relation Signed-off-by: Keshav Priyadarshi --- .../0090_migrate_advisory_aliases.py | 136 +++++++++++++++--- vulnerabilities/models.py | 16 +++ 2 files changed, 129 insertions(+), 23 deletions(-) diff --git a/vulnerabilities/migrations/0090_migrate_advisory_aliases.py b/vulnerabilities/migrations/0090_migrate_advisory_aliases.py index 2734d2bec..0710d296b 100644 --- a/vulnerabilities/migrations/0090_migrate_advisory_aliases.py +++ b/vulnerabilities/migrations/0090_migrate_advisory_aliases.py @@ -7,16 +7,32 @@ # See https://aboutcode.org for more information about nexB OSS projects. # +from timeit import default_timer as timer + +import django.db.models.deletion from aboutcode.pipeline import LoopProgress +from aboutcode.pipeline import humanize_time from django.db import migrations from django.db import models -import django.db.models.deletion """ -Model and data migration for converting the Advisory aliases -JSON field to a concrete M2M Advisory Alias relationship. +Model and data migration to convert Advisory.aliases +JSON field to a concrete M2M Advisory-Alias relationship. + +To achieve this following steps are executed in chronological order. + - Create AdvisoryRelatedAlias model for Advisory-Alias M2M relationship. + - Make unique_content_id non-nullable and a required field. + - Make Alias.vulnerability field nullable, as vulnerability may not + exist for a corresponding alias during initial data collection. + - Rename existing Advisory.aliases JSON field to old_aliases. + - Create a new Advisory.aliases M2M relation through AdvisoryRelatedAlias model. + - Run a data migration to populate new M2M Advisory.aliases relation using + Advisory.old_aliases data. + - Delete Advisory.old_aliases field. + """ + def bulk_update(model, items, fields, logger): item_count = 0 if items: @@ -25,7 +41,17 @@ def bulk_update(model, items, fields, logger): item_count += len(items) except Exception as e: logger(f"Error updating Advisory: {e}") - items.clear() + return item_count + + +def bulk_create(model, items, logger): + item_count = 0 + if items: + try: + model.objects.bulk_create(objs=items) + item_count += len(items) + except Exception as e: + logger(f"Error creating AdvisoryRelatedAlias: {e}") return item_count @@ -36,56 +62,122 @@ class Migration(migrations.Migration): ] def populate_new_advisory_aliases_field(apps, schema_editor): + """Populate the new Advisory.aliases relation using old_aliases JSON data.""" + migration_start_time = timer() Advisory = apps.get_model("vulnerabilities", "Advisory") Alias = apps.get_model("vulnerabilities", "Alias") + AdvisoryRelatedAlias = apps.get_model("vulnerabilities", "AdvisoryRelatedAlias") advisories = Advisory.objects.all() + aliases = {i.alias: i for i in Alias.objects.all()} - chunk_size = 10000 + chunk_size = 5000 advisories_count = advisories.count() - print(f"\nPopulate new advisory aliases relationship.") + batch_size = 5000 + relation_to_create = [] + advisory_alias_relation_count = 0 progress = LoopProgress( total_iterations=advisories_count, logger=print, progress_step=1, ) + print(f"\nPopulate new advisory aliases relationship.") for advisory in progress.iter(advisories.iterator(chunk_size=chunk_size)): - aliases = Alias.objects.filter(alias__in=advisory.old_aliases) - advisory.aliases.set(aliases) + advisory_alias_relations = [ + AdvisoryRelatedAlias(advisory=advisory, alias=aliases[alias]) + for alias in advisory.old_aliases + if alias in aliases + ] + relation_to_create.extend(advisory_alias_relations) + + if len(relation_to_create) > batch_size: + advisory_alias_relation_count += bulk_create( + model=AdvisoryRelatedAlias, + items=relation_to_create, + logger=print, + ) + relation_to_create.clear() + + advisory_alias_relation_count += bulk_create( + model=AdvisoryRelatedAlias, + items=relation_to_create, + logger=print, + ) + migration_run_time = timer() - migration_start_time + print( + f"\nSuccessfully created {advisory_alias_relation_count} advisory-alias relationship." + ) + print(f"\nData Migration: completed in {humanize_time(migration_run_time)}") def reverse_populate_new_advisory_aliases_field(apps, schema_editor): + """Use the Advisory.aliases relation to populate old_aliases JSON field.""" + migration_start_time = timer() Advisory = apps.get_model("vulnerabilities", "Advisory") - advisories = Advisory.objects.all() + advisories = Advisory.objects.prefetch_related("aliases").all() updated_advisory_count = 0 - batch_size = 10000 - chunk_size = 10000 - updated_advisory = [] + batch_size = 5000 + chunk_size = 5000 + advisory_to_update = [] progress = LoopProgress( total_iterations=advisories.count(), logger=print, progress_step=1, ) + print(f"\nReverse alias migration to M2M relation.") for advisory in progress.iter(advisories.iterator(chunk_size=chunk_size)): aliases = advisory.aliases.all() advisory.old_aliases = [alias.alias for alias in aliases] - updated_advisory.append(advisory) + advisory_to_update.append(advisory) - if len(updated_advisory) > batch_size: + if len(advisory_to_update) > batch_size: updated_advisory_count += bulk_update( model=Advisory, - items=updated_advisory, + items=advisory_to_update, fields=["old_aliases"], logger=print, ) + advisory_to_update.clear() updated_advisory_count += bulk_update( model=Advisory, - items=updated_advisory, + items=advisory_to_update, fields=["old_aliases"], logger=print, ) + migration_run_time = timer() - migration_start_time + print( + f"\nSuccessfully reversed the alias relationship for {updated_advisory_count} advisories." + ) + print(f"\nData Migration: completed in {humanize_time(migration_run_time)}") + operations = [ + migrations.CreateModel( + name="AdvisoryRelatedAlias", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ( + "advisory", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to="vulnerabilities.advisory" + ), + ), + ( + "alias", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to="vulnerabilities.alias" + ), + ), + ], + options={ + "unique_together": {("advisory", "alias")}, + }, + ), migrations.AlterField( model_name="advisory", name="unique_content_id", @@ -96,8 +188,6 @@ def reverse_populate_new_advisory_aliases_field(apps, schema_editor): null=False, ), ), - - # Make vulnerability relation optional migrations.AlterField( model_name="alias", name="vulnerability", @@ -109,8 +199,6 @@ def reverse_populate_new_advisory_aliases_field(apps, schema_editor): to="vulnerabilities.vulnerability", ), ), - - # Rename aliases field to old_aliases migrations.AlterModelOptions( name="advisory", options={"ordering": ["date_published", "unique_content_id"]}, @@ -127,14 +215,16 @@ def reverse_populate_new_advisory_aliases_field(apps, schema_editor): migrations.AddField( model_name="advisory", name="aliases", - field=models.ManyToManyField(related_name="advisories", to="vulnerabilities.alias"), + field=models.ManyToManyField( + related_name="advisories", + through="vulnerabilities.AdvisoryRelatedAlias", + to="vulnerabilities.alias", + ), ), - # Populate the new M2M aliases relation migrations.RunPython( code=populate_new_advisory_aliases_field, reverse_code=reverse_populate_new_advisory_aliases_field, ), - # Delete JSON aliases field migrations.RemoveField( model_name="advisory", name="old_aliases", diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index 4085c5171..dba205500 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -1325,6 +1325,7 @@ class Advisory(models.Model): ) aliases = models.ManyToManyField( Alias, + through="AdvisoryRelatedAlias", related_name="advisories", ) summary = models.TextField( @@ -1386,6 +1387,21 @@ def to_advisory_data(self) -> "AdvisoryData": ) +class AdvisoryRelatedAlias(models.Model): + advisory = models.ForeignKey( + Advisory, + on_delete=models.CASCADE, + ) + + alias = models.ForeignKey( + Alias, + on_delete=models.CASCADE, + ) + + class Meta: + unique_together = ("advisory", "alias") + + UserModel = get_user_model() From 5529efe4e345645680718cc5616c124419a94fd3 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Wed, 26 Mar 2025 21:27:12 +0530 Subject: [PATCH 080/545] Move the package search box to the top Signed-off-by: Keshav Priyadarshi --- vulnerabilities/templates/index.html | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/vulnerabilities/templates/index.html b/vulnerabilities/templates/index.html index cdc9212ed..78effa82d 100644 --- a/vulnerabilities/templates/index.html +++ b/vulnerabilities/templates/index.html @@ -10,12 +10,12 @@
    - {% include "vulnerability_search_box.html" %} + {% include "package_search_box.html" %}
    - {% include "package_search_box.html" %} + {% include "vulnerability_search_box.html" %}
    From 3cc02396a2d8a13ec76d95e8a7de07108872d1e5 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Wed, 26 Mar 2025 20:53:02 +0530 Subject: [PATCH 081/545] Add CHANGELOG for v36.0.0 Signed-off-by: Keshav Priyadarshi --- CHANGELOG.rst | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 7f6debf44..5f32b0c82 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -2,6 +2,27 @@ Release notes ============= +Version v36.0.0 +--------------------- + +- Add indexes for models https://github.com/aboutcode-org/vulnerablecode/pull/1701 +- Add fixed by package in V2 API https://github.com/aboutcode-org/vulnerablecode/pull/1706 +- Add tests for num queries for views https://github.com/aboutcode-org/vulnerablecode/pull/1730 +- Add postgresql conf in docker-compose https://github.com/aboutcode-org/vulnerablecode/pull/1733 +- Add default postgresql.conf for local docker build https://github.com/aboutcode-org/vulnerablecode/pull/1735 +- Add models for CodeFix https://github.com/aboutcode-org/vulnerablecode/pull/1704 +- Migrate Alpine Linux importer to aboutcode pipeline https://github.com/aboutcode-org/vulnerablecode/pull/1737 +- VCIO-next: Allow CVSS3.1 Severities in NVD https://github.com/aboutcode-org/vulnerablecode/pull/1738 +- Add Pipeline to add missing CVSSV3.1 scores https://github.com/aboutcode-org/vulnerablecode/pull/1740 +- Add description and reference to the latest release on the homepage https://github.com/aboutcode-org/vulnerablecode/pull/1743 +- Use proper apk package type for Alpine https://github.com/aboutcode-org/vulnerablecode/pull/1739 +- Optimize vulnerabilities view https://github.com/aboutcode-org/vulnerablecode/pull/1728 +- Add CWE support in multiple importers https://github.com/aboutcode-org/vulnerablecode/pull/1526 +- Fast content ID migration https://github.com/aboutcode-org/vulnerablecode/pull/1795 +- Add captcha for user signup https://github.com/aboutcode-org/vulnerablecode/pull/1822 +- Move the package search box to the top by @keshav-space in https://github.com/aboutcode-org/vulnerablecode/pull/1832 + + Version v35.1.0 --------------------- From 03bfa643ff733347255af103ca142bed03b304d2 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Wed, 26 Mar 2025 20:53:39 +0530 Subject: [PATCH 082/545] Bump version for v36.0.0 release Signed-off-by: Keshav Priyadarshi --- setup.cfg | 2 +- vulnerablecode/__init__.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.cfg b/setup.cfg index 8c6dc03c0..6daed299a 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = vulnerablecode -version = 35.1.0 +version = 36.0.0 license = Apache-2.0 AND CC-BY-SA-4.0 # description must be on ONE line https://github.com/pypa/setuptools/issues/1390 diff --git a/vulnerablecode/__init__.py b/vulnerablecode/__init__.py index ee339e883..3210b8cd9 100644 --- a/vulnerablecode/__init__.py +++ b/vulnerablecode/__init__.py @@ -10,7 +10,7 @@ import os import sys -__version__ = "35.1.0" +__version__ = "36.0.0" def command_line(): From 1f394fa4a5b120a542f1db3a83b5abe801e7e0c5 Mon Sep 17 00:00:00 2001 From: Tushar Goel <34160672+TG1999@users.noreply.github.com> Date: Tue, 1 Apr 2025 11:51:32 +0530 Subject: [PATCH 083/545] Populate missing vulnerabilities summary (#1767) * Address review comments Signed-off-by: Tushar Goel * Change models Signed-off-by: Tushar Goel * Change models Signed-off-by: Tushar Goel * Adhere to new models for summary pipeline Signed-off-by: Tushar Goel * Do bulk update Signed-off-by: Tushar Goel --------- Signed-off-by: Tushar Goel --- vulnerabilities/improvers/__init__.py | 2 + ...populate_vulnerability_summary_pipeline.py | 71 ++++++++ ...populate_vulnerability_summary_pipeline.py | 160 ++++++++++++++++++ 3 files changed, 233 insertions(+) create mode 100644 vulnerabilities/pipelines/populate_vulnerability_summary_pipeline.py create mode 100644 vulnerabilities/tests/pipelines/test_populate_vulnerability_summary_pipeline.py diff --git a/vulnerabilities/improvers/__init__.py b/vulnerabilities/improvers/__init__.py index 37143d125..9e36ce5f0 100644 --- a/vulnerabilities/improvers/__init__.py +++ b/vulnerabilities/improvers/__init__.py @@ -18,6 +18,7 @@ from vulnerabilities.pipelines import enhance_with_kev from vulnerabilities.pipelines import enhance_with_metasploit from vulnerabilities.pipelines import flag_ghost_packages +from vulnerabilities.pipelines import populate_vulnerability_summary_pipeline from vulnerabilities.pipelines import remove_duplicate_advisories IMPROVERS_REGISTRY = [ @@ -47,6 +48,7 @@ collect_commits.CollectFixCommitsPipeline, add_cvss31_to_CVEs.CVEAdvisoryMappingPipeline, remove_duplicate_advisories.RemoveDuplicateAdvisoriesPipeline, + populate_vulnerability_summary_pipeline.PopulateVulnerabilitySummariesPipeline, ] IMPROVERS_REGISTRY = { diff --git a/vulnerabilities/pipelines/populate_vulnerability_summary_pipeline.py b/vulnerabilities/pipelines/populate_vulnerability_summary_pipeline.py new file mode 100644 index 000000000..fb458efd5 --- /dev/null +++ b/vulnerabilities/pipelines/populate_vulnerability_summary_pipeline.py @@ -0,0 +1,71 @@ +# +# Copyright (c) nexB Inc. and others. All rights reserved. +# VulnerableCode is a trademark of nexB Inc. +# SPDX-License-Identifier: Apache-2.0 +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/aboutcode-org/vulnerablecode for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. +# + +import logging + +from aboutcode.pipeline import LoopProgress +from django.db.models import Q + +from vulnerabilities.models import Advisory +from vulnerabilities.models import Vulnerability +from vulnerabilities.pipelines import VulnerableCodePipeline + + +class PopulateVulnerabilitySummariesPipeline(VulnerableCodePipeline): + """Pipeline to populate missing vulnerability summaries from advisories.""" + + pipeline_id = "populate_vulnerability_summaries" + + @classmethod + def steps(cls): + return (cls.populate_missing_summaries,) + + def populate_missing_summaries(self): + """Find vulnerabilities with missing summaries and populate them using advisories with the same aliases.""" + vulnerabilities_qs = Vulnerability.objects.filter(summary="") + self.log( + f"Processing {vulnerabilities_qs.count()} vulnerabilities without summaries", + level=logging.INFO, + ) + + progress = LoopProgress(total_iterations=vulnerabilities_qs.count(), logger=self.log) + + vulnerabilities_to_be_updated = [] + + for vulnerability in progress.iter(vulnerabilities_qs.iterator()): + cve_alias = vulnerability.aliases.filter(alias__startswith="CVE-").first() + + if not cve_alias: + self.log( + f"Vulnerability {vulnerability.vulnerability_id} has no CVE alias", + level=logging.DEBUG, + ) + continue + + matching_advisories = Advisory.objects.filter( + aliases=cve_alias, created_by="nvd_importer" + ).exclude(summary="") + + if matching_advisories.exists(): + best_advisory = matching_advisories.order_by("-date_collected").first() + # Note: we filtered above to only get non-empty summaries + vulnerability.summary = best_advisory.summary + vulnerabilities_to_be_updated.append(vulnerability) + self.log( + f"Updated summary for vulnerability {vulnerability.vulnerability_id}", + level=logging.INFO, + ) + else: + self.log(f"No advisory found for alias {cve_alias}", level=logging.DEBUG) + Vulnerability.objects.bulk_update(vulnerabilities_to_be_updated, ["summary"]) + self.log( + f"Successfully populated {len(vulnerabilities_to_be_updated)} vulnerabilities with summary", + level=logging.INFO, + ) + self.log("Pipeline completed", level=logging.INFO) diff --git a/vulnerabilities/tests/pipelines/test_populate_vulnerability_summary_pipeline.py b/vulnerabilities/tests/pipelines/test_populate_vulnerability_summary_pipeline.py new file mode 100644 index 000000000..d8f3ad944 --- /dev/null +++ b/vulnerabilities/tests/pipelines/test_populate_vulnerability_summary_pipeline.py @@ -0,0 +1,160 @@ +# +# Copyright (c) nexB Inc. and others. All rights reserved. +# VulnerableCode is a trademark of nexB Inc. +# SPDX-License-Identifier: Apache-2.0 +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/aboutcode-org/vulnerablecode for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. +# + +import datetime +from pathlib import Path + +import pytz +from django.test import TestCase + +from vulnerabilities.models import Advisory +from vulnerabilities.models import Alias +from vulnerabilities.models import Vulnerability +from vulnerabilities.pipelines.populate_vulnerability_summary_pipeline import ( + PopulateVulnerabilitySummariesPipeline, +) + + +class PopulateVulnerabilitySummariesPipelineTest(TestCase): + def setUp(self): + self.data = Path(__file__).parent.parent / "test_data" + + def test_populate_missing_summaries_from_nvd(self): + """ + Test that vulnerabilities without summaries get them from NVD advisories. + """ + + # Create a vulnerability without a summary + vulnerability = Vulnerability.objects.create( + vulnerability_id="VCID-1234", + summary="", + ) + alias = Alias.objects.create(alias="CVE-2024-1234", vulnerability=vulnerability) + + # Create an NVD advisory with a summary + adv = Advisory.objects.create( + summary="Test vulnerability summary", + created_by="nvd_importer", + date_collected=datetime.datetime(2024, 1, 1, tzinfo=pytz.UTC), + unique_content_id="Test", + ) + adv.aliases.add(alias) + + # Run the pipeline + pipeline = PopulateVulnerabilitySummariesPipeline() + pipeline.populate_missing_summaries() + + # Check that the vulnerability now has a summary + vulnerability.refresh_from_db() + self.assertEqual(vulnerability.summary, "Test vulnerability summary") + + def test_no_matching_advisory(self): + """ + Test handling of vulnerabilities that have no matching NVD advisory. + """ + # Create a vulnerability without a summary + vulnerability = Vulnerability.objects.create( + vulnerability_id="VCID-1234", + summary="", + ) + Alias.objects.create(alias="CVE-2024-1234", vulnerability=vulnerability) + + # Run the pipeline + pipeline = PopulateVulnerabilitySummariesPipeline() + pipeline.populate_missing_summaries() + + # Check that the vulnerability still has no summary + vulnerability.refresh_from_db() + self.assertEqual(vulnerability.summary, "") + + def test_vulnerability_without_alias(self): + """ + Test handling of vulnerabilities that have no aliases. + """ + + # Create a vulnerability without a summary or alias + vulnerability = Vulnerability.objects.create( + vulnerability_id="VCID-1234", + summary="", + ) + + # Run the pipeline + pipeline = PopulateVulnerabilitySummariesPipeline() + pipeline.populate_missing_summaries() + + # Check that the vulnerability still has no summary + vulnerability.refresh_from_db() + self.assertEqual(vulnerability.summary, "") + + def test_non_nvd_advisory_ignored(self): + """ + Test that advisories from sources other than NVD are ignored. + """ + + # Create a vulnerability without a summary + vulnerability = Vulnerability.objects.create( + vulnerability_id="VCID-1234", + summary="", + ) + alias = Alias.objects.create(alias="CVE-2024-1234", vulnerability=vulnerability) + + # Create a non-NVD advisory with a summary + adv = Advisory.objects.create( + summary="Test vulnerability summary", + created_by="other_importer", + date_collected=datetime.datetime(2024, 1, 1, tzinfo=pytz.UTC), + unique_content_id="Test", + ) + + adv.aliases.add(alias) + + # Run the pipeline + pipeline = PopulateVulnerabilitySummariesPipeline() + pipeline.populate_missing_summaries() + + # Check that the vulnerability still has no summary + vulnerability.refresh_from_db() + self.assertEqual(vulnerability.summary, "") + + def test_multiple_matching_advisories(self): + """ + Test that the most recent matching advisory is used when there are multiple. + """ + vulnerability = Vulnerability.objects.create( + vulnerability_id="VCID-1234", + summary="", + ) + alias = Alias.objects.create(alias="CVE-2024-1234", vulnerability=vulnerability) + + # Create two NVD advisories with the same alias + adv1 = Advisory.objects.create( + summary="First matching advisory", + created_by="nvd_importer", + date_collected=datetime.datetime(2024, 1, 1, tzinfo=pytz.UTC), + unique_content_id="Test", + ) + + adv1.aliases.add(alias) + + adv2 = Advisory.objects.create( + summary="Second matching advisory", + created_by="nvd_importer", + date_collected=datetime.datetime(2024, 1, 2, tzinfo=pytz.UTC), + unique_content_id="Test-1", + ) + + adv2.aliases.add(alias) + + # Run the pipeline + pipeline = PopulateVulnerabilitySummariesPipeline() + pipeline.populate_missing_summaries() + + # Check that the vulnerability now has the most recent summary + vulnerability.refresh_from_db() + self.assertEqual(vulnerability.summary, "Second matching advisory") From 83df390b727db39cfdf099e5658bdfb7b497a1d9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 4 Apr 2025 11:43:48 +0530 Subject: [PATCH 084/545] Bump django from 4.2.17 to 4.2.20 (#1797) Bumps [django](https://github.com/django/django) from 4.2.17 to 4.2.20. - [Commits](https://github.com/django/django/compare/4.2.17...4.2.20) --- updated-dependencies: - dependency-name: django dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Tushar Goel <34160672+TG1999@users.noreply.github.com> --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index b0d7a4547..e9c0b16aa 100644 --- a/requirements.txt +++ b/requirements.txt @@ -27,7 +27,7 @@ dateparser==1.1.1 decorator==5.1.1 defusedxml==0.7.1 distro==1.7.0 -Django==4.2.17 +Django==4.2.20 django-crispy-forms==2.3 django-environ==0.11.2 django-filter==24.3 From ecdb4ad0367fefa4ca5a9d97eea481003bb70201 Mon Sep 17 00:00:00 2001 From: Tushar Goel <34160672+TG1999@users.noreply.github.com> Date: Fri, 4 Apr 2025 11:56:09 +0530 Subject: [PATCH 085/545] Stop github OSV importer crashes (#1853) * Stop github OSV importer crashes Signed-off-by: Tushar Goel * Fix formatting issues Signed-off-by: Tushar Goel --------- Signed-off-by: Tushar Goel --- vulnerabilities/importers/osv.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/vulnerabilities/importers/osv.py b/vulnerabilities/importers/osv.py index 90f4200e8..19867cda5 100644 --- a/vulnerabilities/importers/osv.py +++ b/vulnerabilities/importers/osv.py @@ -220,7 +220,14 @@ def get_affected_purl(affected_pkg, raw_id): f"No PackageURL possible: {purl!r} for affected_pkg {affected_pkg} for OSV id: {raw_id}" ) return - return PackageURL.from_string(str(purl)) + try: + package_url = PackageURL.from_string(str(purl)) + return package_url + except: + logger.error( + f"Invalid PackageURL: {purl!r} for affected_pkg {affected_pkg} for OSV id: {raw_id}" + ) + return None def get_affected_version_range(affected_pkg, raw_id, supported_ecosystem): From 51a1c0b967887f5ad5d3f8af6e9b74740d64dac1 Mon Sep 17 00:00:00 2001 From: Tushar Goel <34160672+TG1999@users.noreply.github.com> Date: Fri, 4 Apr 2025 12:05:23 +0530 Subject: [PATCH 086/545] Reorder importing order (#1854) Signed-off-by: Tushar Goel --- vulnerabilities/importers/__init__.py | 34 +++++++++++++-------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/vulnerabilities/importers/__init__.py b/vulnerabilities/importers/__init__.py index 3f429f669..f0d9532ab 100644 --- a/vulnerabilities/importers/__init__.py +++ b/vulnerabilities/importers/__init__.py @@ -44,10 +44,25 @@ from vulnerabilities.pipelines import pysec_importer IMPORTERS_REGISTRY = [ + nvd_importer.NVDImporterPipeline, + github_importer.GitHubAPIImporterPipeline, + gitlab_importer.GitLabImporterPipeline, + github_osv.GithubOSVImporter, + pypa_importer.PyPaImporterPipeline, + npm_importer.NpmImporterPipeline, + nginx_importer.NginxImporterPipeline, + pysec_importer.PyPIImporterPipeline, + apache_tomcat.ApacheTomcatImporter, + postgresql.PostgreSQLImporter, + debian.DebianImporter, + curl.CurlImporter, + epss.EPSSImporter, + vulnrichment.VulnrichImporter, + alpine_linux_importer.AlpineLinuxImporterPipeline, + ruby.RubyImporter, + apache_kafka.ApacheKafkaImporter, openssl.OpensslImporter, redhat.RedhatImporter, - debian.DebianImporter, - postgresql.PostgreSQLImporter, archlinux.ArchlinuxImporter, ubuntu.UbuntuImporter, debian_oval.DebianOvalImporter, @@ -59,25 +74,10 @@ project_kb_msr2019.ProjectKBMSRImporter, suse_scores.SUSESeverityScoreImporter, elixir_security.ElixirSecurityImporter, - apache_tomcat.ApacheTomcatImporter, xen.XenImporter, ubuntu_usn.UbuntuUSNImporter, fireeye.FireyeImporter, - apache_kafka.ApacheKafkaImporter, oss_fuzz.OSSFuzzImporter, - ruby.RubyImporter, - github_osv.GithubOSVImporter, - curl.CurlImporter, - epss.EPSSImporter, - vulnrichment.VulnrichImporter, - pypa_importer.PyPaImporterPipeline, - npm_importer.NpmImporterPipeline, - nginx_importer.NginxImporterPipeline, - gitlab_importer.GitLabImporterPipeline, - github_importer.GitHubAPIImporterPipeline, - nvd_importer.NVDImporterPipeline, - pysec_importer.PyPIImporterPipeline, - alpine_linux_importer.AlpineLinuxImporterPipeline, ] IMPORTERS_REGISTRY = { From 3fb3e7571dd9f0483d8ca17cd7d6c8524c676509 Mon Sep 17 00:00:00 2001 From: Tushar Goel <34160672+TG1999@users.noreply.github.com> Date: Mon, 7 Apr 2025 14:07:03 +0530 Subject: [PATCH 087/545] Fix alpine linux importer (#1861) Signed-off-by: Tushar Goel --- .../pipelines/alpine_linux_importer.py | 26 ++++++++++++------- .../test_alpine_linux_importer_pipeline.py | 4 +-- 2 files changed, 18 insertions(+), 12 deletions(-) diff --git a/vulnerabilities/pipelines/alpine_linux_importer.py b/vulnerabilities/pipelines/alpine_linux_importer.py index 28736e507..5657ee4d2 100644 --- a/vulnerabilities/pipelines/alpine_linux_importer.py +++ b/vulnerabilities/pipelines/alpine_linux_importer.py @@ -195,7 +195,8 @@ def load_advisories( level=logging.DEBUG, ) continue - + # fixed_vulns is a list of strings and each string is a space-separated + # list of aliases and CVES for vuln_ids in fixed_vulns: if not isinstance(vuln_ids, str): if logger: @@ -204,15 +205,16 @@ def load_advisories( level=logging.DEBUG, ) continue - vuln_ids = vuln_ids.split() - aliases = [] - vuln_id = vuln_ids[0] - # check for valid vuln ID, if there is valid vuln ID then iterate over - # the remaining elements of the list else iterate over the whole list - # and also check if the initial element is a reference or not - if is_cve(vuln_id): - aliases = [vuln_id] - vuln_ids = vuln_ids[1:] + vuln_ids = vuln_ids.strip().split() + if not vuln_ids: + if logger: + logger( + f"{vuln_ids!r} is empty", + level=logging.DEBUG, + ) + continue + aliases = vuln_ids + references = [] for reference_id in vuln_ids: @@ -225,6 +227,10 @@ def load_advisories( elif reference_id.startswith("wnpa-sec"): references.append(WireSharkReference.from_id(wnpa_sec_id=reference_id)) + elif not reference_id.startswith("CVE"): + if logger: + logger(f"Unknown reference id {reference_id!r}", level=logging.DEBUG) + qualifiers = { "distroversion": distroversion, "reponame": reponame, diff --git a/vulnerabilities/tests/pipelines/test_alpine_linux_importer_pipeline.py b/vulnerabilities/tests/pipelines/test_alpine_linux_importer_pipeline.py index 49182b287..2106b58ff 100644 --- a/vulnerabilities/tests/pipelines/test_alpine_linux_importer_pipeline.py +++ b/vulnerabilities/tests/pipelines/test_alpine_linux_importer_pipeline.py @@ -31,7 +31,7 @@ def test_process_record(): logger = TestLogger() expected_advisories = [ AdvisoryData( - aliases=[], + aliases=["XSA-248"], summary="", affected_packages=[ AffectedPackage( @@ -138,7 +138,7 @@ def test_process_record(): url="https://secdb.alpinelinux.org/v3.11/", ), AdvisoryData( - aliases=["CVE-2018-7540"], + aliases=["CVE-2018-7540", "XSA-252"], summary="", affected_packages=[ AffectedPackage( From c3cdb9b22fb73eac81a09e5562909c81ce56839e Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Fri, 4 Apr 2025 21:58:30 +0530 Subject: [PATCH 088/545] Make advisory content id unique - make the url field non-empty and non-nullable Signed-off-by: Keshav Priyadarshi --- ...alter_advisory_unique_together_and_more.py | 31 +++++++++++++++++++ vulnerabilities/models.py | 5 +-- 2 files changed, 34 insertions(+), 2 deletions(-) create mode 100644 vulnerabilities/migrations/0091_alter_advisory_unique_together_and_more.py diff --git a/vulnerabilities/migrations/0091_alter_advisory_unique_together_and_more.py b/vulnerabilities/migrations/0091_alter_advisory_unique_together_and_more.py new file mode 100644 index 000000000..5a1029567 --- /dev/null +++ b/vulnerabilities/migrations/0091_alter_advisory_unique_together_and_more.py @@ -0,0 +1,31 @@ +# Generated by Django 4.2.17 on 2025-04-04 16:08 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("vulnerabilities", "0090_migrate_advisory_aliases"), + ] + + operations = [ + migrations.AlterUniqueTogether( + name="advisory", + unique_together=set(), + ), + migrations.AlterField( + model_name="advisory", + name="unique_content_id", + field=models.CharField( + help_text="A 64 character unique identifier for the content of the advisory since we use sha256 as hex", + max_length=64, + unique=True, + ), + ), + migrations.AlterField( + model_name="advisory", + name="url", + field=models.URLField(help_text="Link to the advisory on the upstream website"), + ), + ] diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index dba205500..fc317b3ce 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -1321,6 +1321,7 @@ class Advisory(models.Model): max_length=64, blank=False, null=False, + unique=True, help_text="A 64 character unique identifier for the content of the advisory since we use sha256 as hex", ) aliases = models.ManyToManyField( @@ -1355,14 +1356,14 @@ class Advisory(models.Model): "vulnerabilities.pipeline.nginx_importer.NginxImporterPipeline", ) url = models.URLField( - blank=True, + blank=False, + null=False, help_text="Link to the advisory on the upstream website", ) objects = AdvisoryQuerySet.as_manager() class Meta: - unique_together = ["unique_content_id", "date_published", "url"] ordering = ["date_published", "unique_content_id"] def save(self, *args, **kwargs): From 7350f599b2208807665e90a4492a8d4419677aa4 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Mon, 7 Apr 2025 17:15:52 +0530 Subject: [PATCH 089/545] Use content id to insert new advisory Signed-off-by: Keshav Priyadarshi --- vulnerabilities/import_runner.py | 24 +++++++++++-------- vulnerabilities/pipes/advisory.py | 38 ++++++++++++++++++------------- 2 files changed, 37 insertions(+), 25 deletions(-) diff --git a/vulnerabilities/import_runner.py b/vulnerabilities/import_runner.py index 796a03ef3..10e1ab4b1 100644 --- a/vulnerabilities/import_runner.py +++ b/vulnerabilities/import_runner.py @@ -104,24 +104,30 @@ def process_advisories( advisories = [] for data in advisory_datas: content_id = compute_content_id(advisory_data=data) + advisory = { + "summary": data.summary, + "affected_packages": [pkg.to_dict() for pkg in data.affected_packages], + "references": [ref.to_dict() for ref in data.references], + "date_published": data.date_published, + "weaknesses": data.weaknesses, + "created_by": importer_name, + "date_collected": datetime.datetime.now(tz=datetime.timezone.utc), + } try: aliases = get_or_create_aliases(aliases=data.aliases) obj, created = Advisory.objects.get_or_create( unique_content_id=content_id, url=data.url, - defaults={ - "summary": data.summary, - "affected_packages": [pkg.to_dict() for pkg in data.affected_packages], - "references": [ref.to_dict() for ref in data.references], - "date_published": data.date_published, - "weaknesses": data.weaknesses, - "created_by": importer_name, - "date_collected": datetime.datetime.now(tz=datetime.timezone.utc), - }, + defaults=advisory, ) obj.aliases.add(*aliases) if not obj.date_imported: advisories.append(obj) + except Advisory.MultipleObjectsReturned as mo: + logger.error( + f"Multiple Advisories returned: unique_content_id: {content_id}, url: {data.url}, advisory: {advisory!r}" + ) + raise except Exception as e: logger.error( f"Error while processing {data!r} with aliases {data.aliases!r}: {e!r} \n {traceback_format_exc()}" diff --git a/vulnerabilities/pipes/advisory.py b/vulnerabilities/pipes/advisory.py index 3d98392c9..dd21bc88c 100644 --- a/vulnerabilities/pipes/advisory.py +++ b/vulnerabilities/pipes/advisory.py @@ -43,20 +43,27 @@ def insert_advisory(advisory: AdvisoryData, pipeline_id: str, logger: Callable = aliases = get_or_create_aliases(aliases=advisory.aliases) content_id = compute_content_id(advisory_data=advisory) try: + default_data = { + "summary": advisory.summary, + "affected_packages": [pkg.to_dict() for pkg in advisory.affected_packages], + "references": [ref.to_dict() for ref in advisory.references], + "date_published": advisory.date_published, + "weaknesses": advisory.weaknesses, + "created_by": pipeline_id, + "date_collected": datetime.now(timezone.utc), + } + advisory_obj, _ = Advisory.objects.get_or_create( unique_content_id=content_id, url=advisory.url, - defaults={ - "summary": advisory.summary, - "affected_packages": [pkg.to_dict() for pkg in advisory.affected_packages], - "references": [ref.to_dict() for ref in advisory.references], - "date_published": advisory.date_published, - "weaknesses": advisory.weaknesses, - "created_by": pipeline_id, - "date_collected": datetime.now(timezone.utc), - }, + defaults=default_data, ) advisory_obj.aliases.add(*aliases) + except Advisory.MultipleObjectsReturned: + logger.error( + f"Multiple Advisories returned: unique_content_id: {content_id}, url: {advisory.url}, advisory: {advisory!r}" + ) + raise except Exception as e: if logger: logger( @@ -137,19 +144,18 @@ def import_advisory( }, ) vulnerability.severities.add(vulnerability_severity) + if not created and logger: + logger( + f"Severity updated for reference {ref.url!r} to value: {severity.value!r} " + f"and scoring_elements: {severity.scoring_elements!r}", + level=logging.DEBUG, + ) except: if logger: logger( f"Failed to create VulnerabilitySeverity for: {severity} with error:\n{traceback_format_exc()}", level=logging.ERROR, ) - if not created: - if logger: - logger( - f"Severity updated for reference {ref.url!r} to value: {severity.value!r} " - f"and scoring_elements: {severity.scoring_elements!r}", - level=logging.DEBUG, - ) for affected_purl in affected_purls or []: vulnerable_package, _ = Package.objects.get_or_create_from_purl(purl=affected_purl) From 0828341f2730a01a2532cf3d03065744d2255dd0 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Mon, 7 Apr 2025 22:29:24 +0530 Subject: [PATCH 090/545] Test advisory with duplicate content_id Signed-off-by: Keshav Priyadarshi --- vulnerabilities/import_runner.py | 2 +- vulnerabilities/tests/conftest.py | 1 - ...populate_vulnerability_summary_pipeline.py | 4 + .../test_remove_duplicate_advisories.py | 11 +- vulnerabilities/tests/pipes/test_advisory.py | 156 ++++++++++++------ vulnerabilities/tests/test_add_cvsssv31.py | 1 + vulnerabilities/tests/test_import_runner.py | 1 + vulnerabilities/tests/test_models.py | 117 +++++++++---- .../tests/test_postgres_workaround.py | 1 + .../test_vulnerability_status_improver.py | 5 +- 10 files changed, 208 insertions(+), 91 deletions(-) diff --git a/vulnerabilities/import_runner.py b/vulnerabilities/import_runner.py index 10e1ab4b1..5bcf5f461 100644 --- a/vulnerabilities/import_runner.py +++ b/vulnerabilities/import_runner.py @@ -123,7 +123,7 @@ def process_advisories( obj.aliases.add(*aliases) if not obj.date_imported: advisories.append(obj) - except Advisory.MultipleObjectsReturned as mo: + except Advisory.MultipleObjectsReturned: logger.error( f"Multiple Advisories returned: unique_content_id: {content_id}, url: {data.url}, advisory: {advisory!r}" ) diff --git a/vulnerabilities/tests/conftest.py b/vulnerabilities/tests/conftest.py index de75014fb..69f956925 100644 --- a/vulnerabilities/tests/conftest.py +++ b/vulnerabilities/tests/conftest.py @@ -25,7 +25,6 @@ def no_rmtree(monkeypatch): # Step 2: Run test for importer only if it is activated (pytestmark = pytest.mark.skipif(...)) # Step 3: Migrate all the tests collect_ignore = [ - "test_models.py", "test_rust.py", "test_suse_backports.py", "test_suse.py", diff --git a/vulnerabilities/tests/pipelines/test_populate_vulnerability_summary_pipeline.py b/vulnerabilities/tests/pipelines/test_populate_vulnerability_summary_pipeline.py index d8f3ad944..08b135afc 100644 --- a/vulnerabilities/tests/pipelines/test_populate_vulnerability_summary_pipeline.py +++ b/vulnerabilities/tests/pipelines/test_populate_vulnerability_summary_pipeline.py @@ -43,6 +43,7 @@ def test_populate_missing_summaries_from_nvd(self): created_by="nvd_importer", date_collected=datetime.datetime(2024, 1, 1, tzinfo=pytz.UTC), unique_content_id="Test", + url="https://test.com", ) adv.aliases.add(alias) @@ -110,6 +111,7 @@ def test_non_nvd_advisory_ignored(self): created_by="other_importer", date_collected=datetime.datetime(2024, 1, 1, tzinfo=pytz.UTC), unique_content_id="Test", + url="https://test.com", ) adv.aliases.add(alias) @@ -138,6 +140,7 @@ def test_multiple_matching_advisories(self): created_by="nvd_importer", date_collected=datetime.datetime(2024, 1, 1, tzinfo=pytz.UTC), unique_content_id="Test", + url="https://test.com", ) adv1.aliases.add(alias) @@ -147,6 +150,7 @@ def test_multiple_matching_advisories(self): created_by="nvd_importer", date_collected=datetime.datetime(2024, 1, 2, tzinfo=pytz.UTC), unique_content_id="Test-1", + url="https://test.com", ) adv2.aliases.add(alias) diff --git a/vulnerabilities/tests/pipelines/test_remove_duplicate_advisories.py b/vulnerabilities/tests/pipelines/test_remove_duplicate_advisories.py index d6cd5b5d7..0d7e682be 100644 --- a/vulnerabilities/tests/pipelines/test_remove_duplicate_advisories.py +++ b/vulnerabilities/tests/pipelines/test_remove_duplicate_advisories.py @@ -32,6 +32,7 @@ def setUp(self): ) ], references=[Reference(url="https://example.com/vuln1")], + url="https://test.url/", ) def test_remove_duplicates_keeps_oldest(self): @@ -49,9 +50,10 @@ def test_remove_duplicates_keeps_oldest(self): ] advisories = [] - for date in dates: + for i, date in enumerate(dates): advisory = Advisory.objects.create( - unique_content_id=compute_content_id(advisory_data=self.advisory_data), + unique_content_id=f"incorrect-content-id{i}", + url=self.advisory_data.url, summary=self.advisory_data.summary, affected_packages=[pkg.to_dict() for pkg in self.advisory_data.affected_packages], references=[ref.to_dict() for ref in self.advisory_data.references], @@ -77,6 +79,7 @@ def test_different_content_preserved(self): # Create two advisories with different content advisory1 = Advisory.objects.create( unique_content_id="test-id1", + url="https://test.url/", summary="Summary 1", affected_packages=[], date_collected=datetime.datetime(2024, 1, 1, tzinfo=pytz.UTC), @@ -87,6 +90,7 @@ def test_different_content_preserved(self): advisory2 = Advisory.objects.create( unique_content_id="test-id2", + url="https://test.url/", summary="Summary 2", affected_packages=[], references=[], @@ -111,6 +115,7 @@ def test_recompute_content_ids(self): # Create advisory without content ID advisory = Advisory.objects.create( unique_content_id="incorrect-content-id", + url=self.advisory_data.url, summary=self.advisory_data.summary, affected_packages=[pkg.to_dict() for pkg in self.advisory_data.affected_packages], references=[ref.to_dict() for ref in self.advisory_data.references], @@ -125,4 +130,4 @@ def test_recompute_content_ids(self): # Check that content ID was updated advisory.refresh_from_db() expected_content_id = compute_content_id(advisory_data=self.advisory_data) - self.assertNotEqual(advisory.unique_content_id, expected_content_id) + self.assertEqual(advisory.unique_content_id, expected_content_id) diff --git a/vulnerabilities/tests/pipes/test_advisory.py b/vulnerabilities/tests/pipes/test_advisory.py index c59c96ef8..ee29a4b8d 100644 --- a/vulnerabilities/tests/pipes/test_advisory.py +++ b/vulnerabilities/tests/pipes/test_advisory.py @@ -7,7 +7,10 @@ # See https://aboutcode.org for more information about nexB OSS projects. # -import pytest +from datetime import datetime + +from django.core.exceptions import ValidationError +from django.test import TestCase from django.utils import timezone from packageurl import PackageURL from univers.version_range import VersionRange @@ -18,65 +21,116 @@ from vulnerabilities.importer import Reference from vulnerabilities.pipes.advisory import get_or_create_aliases from vulnerabilities.pipes.advisory import import_advisory +from vulnerabilities.utils import compute_content_id -advisory_data1 = AdvisoryData( - summary="vulnerability description here", - affected_packages=[ - AffectedPackage( - package=PackageURL(type="pypi", name="dummy"), - affected_version_range=VersionRange.from_string("vers:pypi/>=1.0.0|<=2.0.0"), - ) - ], - references=[Reference(url="https://example.com/with/more/info/CVE-2020-13371337")], - date_published=timezone.now(), - url="https://test.com", -) +class TestPipeAdvisory(TestCase): + def setUp(self): + self.advisory_data1 = AdvisoryData( + summary="vulnerability description here", + affected_packages=[ + AffectedPackage( + package=PackageURL(type="pypi", name="dummy"), + affected_version_range=VersionRange.from_string("vers:pypi/>=1.0.0|<=2.0.0"), + ) + ], + references=[Reference(url="https://example.com/with/more/info/CVE-2020-13371337")], + date_published=timezone.now(), + url="https://test.com", + ) -def get_advisory1(created_by="test_pipeline"): - from vulnerabilities.pipes.advisory import insert_advisory + def get_advisory1(self, created_by="test_pipeline"): + from vulnerabilities.pipes.advisory import insert_advisory - return insert_advisory( - advisory=advisory_data1, - pipeline_id=created_by, - ) + return insert_advisory( + advisory=self.advisory_data1, + pipeline_id=created_by, + ) + def get_all_vulnerability_relationships_objects(self): + return { + "vulnerabilities": list(models.Vulnerability.objects.all()), + "aliases": list(models.Alias.objects.all()), + "references": list(models.VulnerabilityReference.objects.all()), + "advisories": list(models.Advisory.objects.all()), + "packages": list(models.Package.objects.all()), + "references": list(models.VulnerabilityReference.objects.all()), + "severity": list(models.VulnerabilitySeverity.objects.all()), + } -def get_all_vulnerability_relationships_objects(): - return { - "vulnerabilities": list(models.Vulnerability.objects.all()), - "aliases": list(models.Alias.objects.all()), - "references": list(models.VulnerabilityReference.objects.all()), - "advisories": list(models.Advisory.objects.all()), - "packages": list(models.Package.objects.all()), - "references": list(models.VulnerabilityReference.objects.all()), - "severity": list(models.VulnerabilitySeverity.objects.all()), - } + def test_vulnerability_pipes_importer_import_advisory(self): + advisory1 = self.get_advisory1(created_by="test_importer_pipeline") + import_advisory(advisory=advisory1, pipeline_id="test_importer_pipeline") + all_vulnerability_relation_objects = self.get_all_vulnerability_relationships_objects() + import_advisory(advisory=advisory1, pipeline_id="test_importer_pipeline") + assert ( + all_vulnerability_relation_objects == self.get_all_vulnerability_relationships_objects() + ) + def test_vulnerability_pipes_importer_import_advisory_different_pipelines(self): + advisory1 = self.get_advisory1(created_by="test_importer_pipeline") + import_advisory(advisory=advisory1, pipeline_id="test_importer1_pipeline") + all_vulnerability_relation_objects = self.get_all_vulnerability_relationships_objects() + import_advisory(advisory=advisory1, pipeline_id="test_importer2_pipeline") + assert ( + all_vulnerability_relation_objects == self.get_all_vulnerability_relationships_objects() + ) -@pytest.mark.django_db -def test_vulnerability_pipes_importer_import_advisory(): - advisory1 = get_advisory1(created_by="test_importer_pipeline") - import_advisory(advisory=advisory1, pipeline_id="test_importer_pipeline") - all_vulnerability_relation_objects = get_all_vulnerability_relationships_objects() - import_advisory(advisory=advisory1, pipeline_id="test_importer_pipeline") - assert all_vulnerability_relation_objects == get_all_vulnerability_relationships_objects() + def test_vulnerability_pipes_get_or_create_aliases(self): + aliases = ["CVE-TEST-123", "CVE-TEST-124"] + result_aliases_qs = get_or_create_aliases(aliases=aliases) + result_aliases = [i.alias for i in result_aliases_qs] + assert 2 == result_aliases_qs.count() + assert "CVE-TEST-123" in result_aliases + assert "CVE-TEST-124" in result_aliases + def test_advisory_insert_without_url(self): + with self.assertRaises(ValidationError): + date = datetime.now() + models.Advisory.objects.create( + unique_content_id=compute_content_id(advisory_data=self.advisory_data1), + summary=self.advisory_data1.summary, + affected_packages=[pkg.to_dict() for pkg in self.advisory_data1.affected_packages], + references=[ref.to_dict() for ref in self.advisory_data1.references], + date_imported=date, + date_collected=date, + created_by="test_pipeline", + ) -@pytest.mark.django_db -def test_vulnerability_pipes_importer_import_advisory_different_pipelines(): - advisory1 = get_advisory1(created_by="test_importer_pipeline") - import_advisory(advisory=advisory1, pipeline_id="test_importer1_pipeline") - all_vulnerability_relation_objects = get_all_vulnerability_relationships_objects() - import_advisory(advisory=advisory1, pipeline_id="test_importer2_pipeline") - assert all_vulnerability_relation_objects == get_all_vulnerability_relationships_objects() + def test_advisory_insert_without_content_id(self): + with self.assertRaises(ValidationError): + date = datetime.now() + models.Advisory.objects.create( + url=self.advisory_data1.url, + summary=self.advisory_data1.summary, + affected_packages=[pkg.to_dict() for pkg in self.advisory_data1.affected_packages], + references=[ref.to_dict() for ref in self.advisory_data1.references], + date_imported=date, + date_collected=date, + created_by="test_pipeline", + ) + def test_advisory_insert_no_duplicate_content_id(self): + date = datetime.now() + models.Advisory.objects.create( + unique_content_id=compute_content_id(advisory_data=self.advisory_data1), + url=self.advisory_data1.url, + summary=self.advisory_data1.summary, + affected_packages=[pkg.to_dict() for pkg in self.advisory_data1.affected_packages], + references=[ref.to_dict() for ref in self.advisory_data1.references], + date_imported=date, + date_collected=date, + created_by="test_pipeline", + ) -@pytest.mark.django_db -def test_vulnerability_pipes_get_or_create_aliases(): - aliases = ["CVE-TEST-123", "CVE-TEST-124"] - result_aliases_qs = get_or_create_aliases(aliases=aliases) - result_aliases = [i.alias for i in result_aliases_qs] - assert 2 == result_aliases_qs.count() - assert "CVE-TEST-123" in result_aliases - assert "CVE-TEST-124" in result_aliases + with self.assertRaises(ValidationError): + models.Advisory.objects.create( + unique_content_id=compute_content_id(advisory_data=self.advisory_data1), + url=self.advisory_data1.url, + summary=self.advisory_data1.summary, + affected_packages=[pkg.to_dict() for pkg in self.advisory_data1.affected_packages], + references=[ref.to_dict() for ref in self.advisory_data1.references], + date_imported=date, + date_collected=date, + created_by="test_pipeline", + ) diff --git a/vulnerabilities/tests/test_add_cvsssv31.py b/vulnerabilities/tests/test_add_cvsssv31.py index 6b1c1875a..7116ad456 100644 --- a/vulnerabilities/tests/test_add_cvsssv31.py +++ b/vulnerabilities/tests/test_add_cvsssv31.py @@ -25,6 +25,7 @@ def setUp(self): advisory = Advisory.objects.create( created_by="nvd_importer", unique_content_id="test-unique-content-id", + url="https://nvd.nist.gov/vuln/detail/CVE-2024-1234", references=[ { "severities": [ diff --git a/vulnerabilities/tests/test_import_runner.py b/vulnerabilities/tests/test_import_runner.py index 3b8080086..2a0757e14 100644 --- a/vulnerabilities/tests/test_import_runner.py +++ b/vulnerabilities/tests/test_import_runner.py @@ -179,6 +179,7 @@ def test_advisory_summary_clean_up(): DUMMY_ADVISORY = models.Advisory( unique_content_id="test-unique-content-id", + url="https://test.url/", summary="dummy", created_by="tests", date_collected=timezone.now(), diff --git a/vulnerabilities/tests/test_models.py b/vulnerabilities/tests/test_models.py index a5f8e251c..7b2dd06cc 100644 --- a/vulnerabilities/tests/test_models.py +++ b/vulnerabilities/tests/test_models.py @@ -8,17 +8,25 @@ # import urllib.parse +from datetime import datetime from unittest import TestCase import pytest +from django.core.exceptions import ValidationError +from django.test import TestCase as DjangoTestCase from packageurl import PackageURL from univers import versions from univers.version_range import RANGE_CLASS_BY_SCHEMES +from univers.version_range import VersionRange from vulnerabilities import models +from vulnerabilities.importer import AdvisoryData +from vulnerabilities.importer import AffectedPackage +from vulnerabilities.importer import Reference from vulnerabilities.models import Alias from vulnerabilities.models import Package from vulnerabilities.models import Vulnerability +from vulnerabilities.utils import compute_content_id class TestVulnerabilityModel(TestCase): @@ -28,11 +36,9 @@ def test_vulnerability_save_with_vulnerability_id(self): assert models.Vulnerability.objects.filter(vulnerability_id="CVE-2020-7965").count() == 1 @pytest.mark.django_db - def test_cwe_not_present_in_weaknesses_db(self): + def test_cwe_present_in_weaknesses_db(self): w1 = models.Weakness.objects.create(cwe_id=189) - assert w1.weakness is None - assert w1.name is "" - assert w1.description is "" + assert w1.name == "Numeric Errors" # FIXME: The fixture code is duplicated. setUpClass is not working with the pytest mark. @@ -51,7 +57,7 @@ def test_package_to_vulnerability(self): assert p1.fixing_vulnerabilities.count() == 0 assert p2.fixing_vulnerabilities.count() == 1 - assert p2.fixing_vulnerabilities[0] == v1 + assert p2.fixing_vulnerabilities.first() == v1 def test_vulnerability_package(self): p1 = models.Package.objects.create(type="deb", name="git", version="2.30.1") @@ -66,8 +72,8 @@ def test_vulnerability_package(self): assert v1.vulnerable_packages.count() == 1 assert v1.fixed_by_packages.count() == 1 - assert v1.vulnerable_packages[0] == p1 - assert v1.fixed_by_packages[0] == p2 + assert v1.vulnerable_packages.first() == p1 + assert v1.fixed_by_packages.first() == p2 @pytest.mark.django_db @@ -208,10 +214,14 @@ def test_fixed_package_details(self): assert len(searched_for_package.affected_by) == 2 assert self.vuln_VCID_g2fu_45jw_aaan in searched_for_package.affected_by - assert self.package_pypi_redis_4_3_6 in self.vuln_VCID_g2fu_45jw_aaan.fixed_by_packages + assert ( + self.package_pypi_redis_4_3_6 in self.vuln_VCID_g2fu_45jw_aaan.fixed_by_packages.all() + ) assert self.vuln_VCID_rqe1_dkmg_aaad in searched_for_package.affected_by - assert self.package_pypi_redis_5_0_0b1 in self.vuln_VCID_rqe1_dkmg_aaad.fixed_by_packages + assert ( + self.package_pypi_redis_5_0_0b1 in self.vuln_VCID_rqe1_dkmg_aaad.fixed_by_packages.all() + ) searched_for_package_details = searched_for_package.fixed_package_details @@ -221,19 +231,8 @@ def test_fixed_package_details(self): name="redis", version="4.1.1", ), - "next_non_vulnerable": PackageURL( - type="pypi", - name="redis", - version="5.0.0b1", - ), - "latest_non_vulnerable": PackageURL( - type="pypi", - namespace=None, - name="redis", - version="5.0.0b1", - qualifiers={}, - subpath=None, - ), + "next_non_vulnerable": self.package_pypi_redis_5_0_0b1, + "latest_non_vulnerable": self.package_pypi_redis_5_0_0b1, "vulnerabilities": [ { "vulnerability": self.vuln_VCID_g2fu_45jw_aaan, @@ -276,13 +275,9 @@ def test_fixed_package_details(self): assert searched_for_package_details == package_details - assert searched_for_package_details.get("latest_non_vulnerable") == PackageURL( - type="pypi", - namespace=None, - name="redis", - version="5.0.0b1", - qualifiers={}, - subpath=None, + assert ( + searched_for_package_details.get("latest_non_vulnerable") + == self.package_pypi_redis_5_0_0b1 ) searched_for_package_fixing = searched_for_package.fixing @@ -477,15 +472,15 @@ def test_affecting_vulnerabilities_vulnerabilityqueryset_method(self): assert all_vulnerabilities_count == 4 - def test_affecting_vulnerabilities_package_property_method(self): + def test_affected_by_package_property_method(self): """ - Return a queryset of Vulnerabilities using the Package affecting_vulnerabilities() property + Return a queryset of Vulnerabilities using the Package affected_by() property method. """ searched_for_package = self.package_pypi_redis_4_1_1 # Return a queryset of Vulnerabilities that affect a specific Package. - this_package_vulnerabilities = searched_for_package.affecting_vulnerabilities + this_package_vulnerabilities = searched_for_package.affected_by assert this_package_vulnerabilities[0] == self.vuln_VCID_g2fu_45jw_aaan assert this_package_vulnerabilities[1] == self.vuln_VCID_rqe1_dkmg_aaad @@ -505,7 +500,7 @@ def test_fixing_vulnerabilities_package_property_method(self): redis_4_3_6_fixing_vulnerabilities = searched_for_package_redis_4_3_6.fixing_vulnerabilities assert redis_4_3_6_fixing_vulnerabilities.count() == 1 - assert redis_4_3_6_fixing_vulnerabilities[0] == self.vuln_VCID_g2fu_45jw_aaan + assert redis_4_3_6_fixing_vulnerabilities.first() == self.vuln_VCID_g2fu_45jw_aaan searched_for_package_redis_5_0_0b1 = self.package_pypi_redis_5_0_0b1 redis_5_0_0b1_fixing_vulnerabilities = ( @@ -513,7 +508,7 @@ def test_fixing_vulnerabilities_package_property_method(self): ) assert redis_5_0_0b1_fixing_vulnerabilities.count() == 1 - assert redis_5_0_0b1_fixing_vulnerabilities[0] == self.vuln_VCID_rqe1_dkmg_aaad + assert redis_5_0_0b1_fixing_vulnerabilities.first() == self.vuln_VCID_rqe1_dkmg_aaad def test_get_affecting_vulnerabilities_package_method(self): """ @@ -598,3 +593,57 @@ def test_get_fixed_by_package_versions(self): assert all_package_versions[1] == self.package_pypi_redis_4_3_6 assert all_package_versions[2] == self.package_pypi_redis_5_0_0b1 assert all_package_versions.count() == 3 + + +class TestAdvisoryModel(DjangoTestCase): + def setUp(self): + self.advisory_data1 = AdvisoryData( + summary="vulnerability description here", + affected_packages=[ + AffectedPackage( + package=PackageURL(type="pypi", name="dummy"), + affected_version_range=VersionRange.from_string("vers:pypi/>=1.0.0|<=2.0.0"), + ) + ], + references=[Reference(url="https://example.com/with/more/info/CVE-2020-13371337")], + date_published=datetime.now(), + url="https://test.com", + ) + + def test_advisory_insert_without_content_id(self): + with self.assertRaises(ValidationError): + date = datetime.now() + models.Advisory.objects.create( + url=self.advisory_data1.url, + summary=self.advisory_data1.summary, + affected_packages=[pkg.to_dict() for pkg in self.advisory_data1.affected_packages], + references=[ref.to_dict() for ref in self.advisory_data1.references], + date_imported=date, + date_collected=date, + created_by="test_pipeline", + ) + + def test_advisory_insert_no_duplicate_content_id(self): + date = datetime.now() + models.Advisory.objects.create( + unique_content_id=compute_content_id(advisory_data=self.advisory_data1), + url=self.advisory_data1.url, + summary=self.advisory_data1.summary, + affected_packages=[pkg.to_dict() for pkg in self.advisory_data1.affected_packages], + references=[ref.to_dict() for ref in self.advisory_data1.references], + date_imported=date, + date_collected=date, + created_by="test_pipeline", + ) + + with self.assertRaises(ValidationError): + models.Advisory.objects.create( + unique_content_id=compute_content_id(advisory_data=self.advisory_data1), + url=self.advisory_data1.url, + summary=self.advisory_data1.summary, + affected_packages=[pkg.to_dict() for pkg in self.advisory_data1.affected_packages], + references=[ref.to_dict() for ref in self.advisory_data1.references], + date_imported=date, + date_collected=date, + created_by="test_pipeline", + ) diff --git a/vulnerabilities/tests/test_postgres_workaround.py b/vulnerabilities/tests/test_postgres_workaround.py index 9fe2c66a0..b3d2d78cd 100644 --- a/vulnerabilities/tests/test_postgres_workaround.py +++ b/vulnerabilities/tests/test_postgres_workaround.py @@ -427,6 +427,7 @@ def test_postgres_workaround_with_many_references_many_affected_packages_and_long_summary(): adv, _ = Advisory.objects.get_or_create( unique_content_id="test-unique-content-id", + url="https://test.url/", summary=data.summary, affected_packages=[pkg.to_dict() for pkg in data.affected_packages], references=[ref.to_dict() for ref in data.references], diff --git a/vulnerabilities/tests/test_vulnerability_status_improver.py b/vulnerabilities/tests/test_vulnerability_status_improver.py index 2a67730d4..6605d717c 100644 --- a/vulnerabilities/tests/test_vulnerability_status_improver.py +++ b/vulnerabilities/tests/test_vulnerability_status_improver.py @@ -35,13 +35,15 @@ def test_interesting_advisories(): adv1 = Advisory.objects.create( unique_content_id="test-unique-content-id", + url="https://test.url/", created_by=NVDImporterPipeline.pipeline_id, summary="1", date_collected=datetime.now(), ) adv1.aliases.add(*get_or_create_aliases(["CVE-1"])) adv2 = Advisory.objects.create( - unique_content_id="test-unique-content-id", + unique_content_id="test-unique-content-id2", + url="https://test.url/", created_by=NVDImporterPipeline.pipeline_id, summary="2", date_collected=datetime.now(), @@ -58,6 +60,7 @@ def test_improver_end_to_end(mock_response): mock_response.return_value = response adv = Advisory.objects.create( unique_content_id="test-unique-content-id", + url="https://test.url/", created_by=NVDImporterPipeline.pipeline_id, summary="1", date_collected=datetime.now(), From c3a1a1c8d76e18c4fc64026c6d9fb823c896474e Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Tue, 8 Apr 2025 15:27:17 +0530 Subject: [PATCH 091/545] Ignore failing linkcheck Signed-off-by: Keshav Priyadarshi --- docs/source/conf.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/source/conf.py b/docs/source/conf.py index 05cec2924..dc1e573e0 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -36,6 +36,8 @@ "https://www.softwaretestinghelp.com/how-to-write-good-bug-report/", # Cloudflare protection "https://www.openssl.org/news/vulnerabilities.xml", # OpenSSL legacy advisory URL, not longer available "https://example.org/api/non-existent-packages", + "https://github.com/aboutcode-org/vulnerablecode/pull/495/commits", + "https://nvd.nist.gov/products/cpe", ] # Add any Sphinx extension module names here, as strings. They can be From b99b0c0a9098360a775fb065f3888993aa8999f3 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Tue, 8 Apr 2025 22:01:22 +0530 Subject: [PATCH 092/545] Update ci to use ubuntu 22.04 - Ubuntu 20.04 runner is deprecated https://github.com/actions/runner-images/issues/11101 Signed-off-by: Keshav Priyadarshi --- .github/workflows/docs.yml | 2 +- .github/workflows/main.yml | 2 +- .github/workflows/pypi-release.yml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index be89a5973..a92238227 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -4,7 +4,7 @@ on: [push, pull_request] jobs: build: - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 strategy: max-parallel: 4 diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 4428993e0..7c6872e44 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -9,7 +9,7 @@ env: jobs: build: - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 services: postgres: diff --git a/.github/workflows/pypi-release.yml b/.github/workflows/pypi-release.yml index 600b046d4..1bf50d19e 100644 --- a/.github/workflows/pypi-release.yml +++ b/.github/workflows/pypi-release.yml @@ -21,7 +21,7 @@ on: jobs: build-pypi-distribs: name: Build and publish library to PyPI - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 steps: - uses: actions/checkout@master From c0f30270cb4c169f4dc450f2174f54260158cecb Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Wed, 30 Apr 2025 13:10:33 +0530 Subject: [PATCH 093/545] Use built-in iterator for fetching packages Signed-off-by: Keshav Priyadarshi --- vulnerabilities/management/commands/export.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vulnerabilities/management/commands/export.py b/vulnerabilities/management/commands/export.py index 08685e33d..0da38bbdc 100644 --- a/vulnerabilities/management/commands/export.py +++ b/vulnerabilities/management/commands/export.py @@ -159,7 +159,7 @@ def packages_by_type_ns_name(): "fixing_vulnerabilities__weaknesses", "fixing_vulnerabilities__severities", ) - .paginated() + .iterator() ) for tp_ns_name, packages in groupby(qs, key=by_purl_type_ns_name): From 064c42d58b65f05651ab007bd6be594e5c376f75 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Wed, 30 Apr 2025 13:18:27 +0530 Subject: [PATCH 094/545] Add progress indicator to export command Signed-off-by: Keshav Priyadarshi --- vulnerabilities/management/commands/export.py | 30 +++++++++++++++++-- 1 file changed, 27 insertions(+), 3 deletions(-) diff --git a/vulnerabilities/management/commands/export.py b/vulnerabilities/management/commands/export.py index 0da38bbdc..93dcca8a7 100644 --- a/vulnerabilities/management/commands/export.py +++ b/vulnerabilities/management/commands/export.py @@ -9,8 +9,12 @@ import logging from itertools import groupby from pathlib import Path +from timeit import default_timer as timer +from traceback import format_exc as traceback_format_exc import saneyaml +from aboutcode.pipeline import LoopProgress +from aboutcode.pipeline import humanize_time from django.core.management.base import BaseCommand from django.core.management.base import CommandError from packageurl import PackageURL @@ -26,7 +30,7 @@ def serialize_severity(sev): "score": sev.value, "scoring_system": sev.scoring_system, "scoring_elements": sev.scoring_elements, - "published_at": sev.published_at, + "published_at": str(sev.published_at), "url": sev.url, } @@ -88,8 +92,22 @@ def export_data(self, base_path: Path): """ i = 0 seen_vcid = set() + export_start_time = timer() - for i, (purl_without_version, package_versions) in enumerate(packages_by_type_ns_name(), 1): + distinct_packages_count = ( + Package.objects.values("type", "namespace", "name") + .distinct("type", "namespace", "name") + .count() + ) + + progress = LoopProgress( + total_iterations=distinct_packages_count, + progress_step=1, + logger=self.stdout.write, + ) + for i, (purl_without_version, package_versions) in enumerate( + progress.iter(packages_by_type_ns_name()), 1 + ): pkg_version = None try: package_urls = [] @@ -131,9 +149,15 @@ def export_data(self, base_path: Path): self.stdout.write(f"Processed {i} package. Last PURL: {purl_without_version}") except Exception as e: - raise Exception(f"Failed to process Package: {pkg_version}") from e + self.stdout.write( + self.style.ERROR( + f"Failed to process Package {pkg_version}: {e!r} \n {traceback_format_exc()}" + ) + ) self.stdout.write(f"Exported data for: {i} package and {len(seen_vcid)} vulnerabilities.") + export_run_time = timer() - export_start_time + self.stdout.write(f"Export completed in {humanize_time(export_run_time)}") def by_purl_type_ns_name(package): From 2ff0b1112250fbf71543806a25c5b53afeee65b0 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Wed, 30 Apr 2025 13:28:34 +0530 Subject: [PATCH 095/545] Avoid triggering fresh db query on prefetched data - The Package.vulnerabilities property triggers fresh db query even on prefetched data. Signed-off-by: Keshav Priyadarshi --- vulnerabilities/management/commands/export.py | 7 ++++++- vulnerabilities/models.py | 4 ---- .../aboutcode-vulnerabilities/ps/VCID-pst6-b358-aaap.yml | 2 +- vulnerabilities/tests/test_models.py | 4 +--- 4 files changed, 8 insertions(+), 9 deletions(-) diff --git a/vulnerabilities/management/commands/export.py b/vulnerabilities/management/commands/export.py index 93dcca8a7..36323b04b 100644 --- a/vulnerabilities/management/commands/export.py +++ b/vulnerabilities/management/commands/export.py @@ -6,6 +6,7 @@ # See https://github.com/aboutcode-org/vulnerablecode for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # +import itertools import logging from itertools import groupby from pathlib import Path @@ -126,7 +127,11 @@ def export_data(self, base_path: Path): } package_vulnerabilities.append(package_data) - for vuln in pkg_version.vulnerabilities: + vulnerabilities = itertools.chain( + pkg_version.affected_by_vulnerabilities.all(), + pkg_version.fixing_vulnerabilities.all(), + ) + for vuln in vulnerabilities: vcid = vuln.vulnerability_id # do not write twice the same file if vcid in seen_vcid: diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index fc317b3ce..2e54533b2 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -994,10 +994,6 @@ def next_non_vulnerable_version(self): next_non_vulnerable, _ = self.get_non_vulnerable_versions() return next_non_vulnerable.version if next_non_vulnerable else None - @property - def vulnerabilities(self): - return self.affected_by_vulnerabilities.all() | self.fixing_vulnerabilities.all() - @property def latest_non_vulnerable_version(self): """ diff --git a/vulnerabilities/tests/test_data/export_command/aboutcode-vulnerabilities/ps/VCID-pst6-b358-aaap.yml b/vulnerabilities/tests/test_data/export_command/aboutcode-vulnerabilities/ps/VCID-pst6-b358-aaap.yml index 63ab7f5af..5451794b8 100644 --- a/vulnerabilities/tests/test_data/export_command/aboutcode-vulnerabilities/ps/VCID-pst6-b358-aaap.yml +++ b/vulnerabilities/tests/test_data/export_command/aboutcode-vulnerabilities/ps/VCID-pst6-b358-aaap.yml @@ -6,7 +6,7 @@ severities: - score: '7.0' scoring_system: cvssv3_vector scoring_elements: CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H - published_at: + published_at: None url: https://.. weaknesses: - CWE-15 diff --git a/vulnerabilities/tests/test_models.py b/vulnerabilities/tests/test_models.py index 7b2dd06cc..ceef67959 100644 --- a/vulnerabilities/tests/test_models.py +++ b/vulnerabilities/tests/test_models.py @@ -428,9 +428,7 @@ def test_affecting_vulnerabilities_vulnerabilityqueryset_method(self): searched_for_package = self.package_pypi_redis_4_1_1 # Return a queryset of Vulnerabilities that affect this Package. - this_package_vulnerabilities = ( - searched_for_package.vulnerabilities.affecting_vulnerabilities() - ) + this_package_vulnerabilities = searched_for_package.affected_by assert this_package_vulnerabilities[0] == self.vuln_VCID_g2fu_45jw_aaan assert this_package_vulnerabilities[1] == self.vuln_VCID_rqe1_dkmg_aaad From 380cd6fd58f25bc9c4ee1ca96b3ecfa6728e0610 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Mon, 5 May 2025 23:01:29 +0530 Subject: [PATCH 096/545] Add PipelineSchedule and PipelineRun models Signed-off-by: Keshav Priyadarshi --- vulnerabilities/models.py | 347 +++++++++++++++++++++++++++++++++++++- 1 file changed, 344 insertions(+), 3 deletions(-) diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index 2e54533b2..430f85cba 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -8,8 +8,7 @@ # import csv -import hashlib -import json +import datetime import logging import xml.etree.ElementTree as ET from contextlib import suppress @@ -18,6 +17,9 @@ from operator import attrgetter from typing import Union +import django_rq +import redis +from aboutcode.pipeline import humanize_time from cvss.exceptions import CVSS2MalformedError from cvss.exceptions import CVSS3MalformedError from cvss.exceptions import CVSS4MalformedError @@ -46,17 +48,22 @@ from packageurl.contrib.django.models import PackageURLMixin from packageurl.contrib.django.models import PackageURLQuerySet from rest_framework.authtoken.models import Token +from rq.command import send_stop_job_command +from rq.exceptions import NoSuchJobError +from rq.job import Job +from rq.job import JobStatus from univers.version_range import RANGE_CLASS_BY_SCHEMES from univers.version_range import AlpineLinuxVersionRange from univers.versions import Version +import vulnerablecode from vulnerabilities import utils from vulnerabilities.severity_systems import EPSS from vulnerabilities.severity_systems import SCORING_SYSTEMS -from vulnerabilities.utils import compute_content_id from vulnerabilities.utils import normalize_purl from vulnerabilities.utils import purl_to_dict from vulnerablecode import __version__ as VULNERABLECODE_VERSION +from vulnerablecode.settings import VULNERABLECODE_ASYNC logger = logging.getLogger(__name__) @@ -1811,3 +1818,337 @@ class CodeFix(CodeChange): related_name="code_fix", help_text="The fixing package version with this code fix", ) + + +class PipelineRun(models.Model): + """The Database representation of a pipeline execution.""" + + pipeline = models.ForeignKey( + "PipelineSchedule", + related_name="pipelineruns", + on_delete=models.CASCADE, + ) + run_id = models.CharField( + blank=True, + null=True, + editable=False, + ) + run_start_date = models.DateTimeField( + blank=True, + null=True, + editable=False, + ) + run_end_date = models.DateTimeField( + blank=True, + null=True, + editable=False, + ) + run_exitcode = models.IntegerField( + null=True, + blank=True, + editable=False, + ) + run_output = models.TextField( + blank=True, + editable=False, + ) + created_date = models.DateTimeField( + auto_now_add=True, + db_index=True, + ) + vulnerablecode_version = models.CharField( + max_length=100, + blank=True, + null=True, + ) + vulnerablecode_commit = models.CharField( + max_length=300, + blank=True, + null=True, + ) + log = models.TextField( + blank=True, + editable=False, + ) + + class Meta: + ordering = ["-created_date"] + + class Status(models.TextChoices): + UNKNOWN = "unknown" + RUNNING = "running" + SUCCESS = "success" + FAILURE = "failure" + STOPPED = "stopped" + STALE = "stale" + + @property + def status(self): + """Return current execution status.""" + status = self.Status + + if self.run_succeeded: + return status.SUCCESS + + elif self.run_staled: + return status.STALE + + elif self.run_stopped: + return status.STOPPED + + elif self.run_failed: + return status.FAILURE + + elif self.run_start_date: + return status.RUNNING + + return status.UNKNOWN + + @property + def pipeline_class(self): + """Return the pipeline class.""" + return self.pipeline.pipeline_class + + @property + def job(self): + with suppress(NoSuchJobError): + return Job.fetch( + str(self.run_id), + connection=django_rq.get_connection(), + ) + + @property + def job_status(self): + job = self.job + if job: + return self.job.get_status() + + @property + def run_succeeded(self): + """Return True if the execution was successfully executed.""" + return self.run_exitcode == 0 + + @property + def run_failed(self): + """Return True if the execution failed.""" + fail_exitcode = self.run_exitcode and self.run_exitcode > 0 + return fail_exitcode or self.job_status == "failed" + + @property + def run_stopped(self): + """Return True if the execution was stopped.""" + return self.run_exitcode == 99 + + @property + def run_staled(self): + """Return True if the execution staled.""" + return self.run_exitcode == 88 + + @property + def execution_time(self): + """Return the pipeline execution time.""" + if self.run_end_date and self.run_start_date: + execution_time = (self.run_end_date - self.run_start_date).total_seconds() + return humanize_time(execution_time) + + def set_vulnerablecode_version_and_commit(self): + """Set the current VulnerableCode version and commit.""" + if self.vulnerablecode_version: + msg = f"Field vulnerablecode_version already set to {self.vulnerablecode_version}" + raise ValueError(msg) + + self.vulnerablecode_version = VULNERABLECODE_VERSION + self.vulnerablecode_commit = vulnerablecode.get_short_commit() + self.save(update_fields=["vulnerablecode_version", "vulnerablecode_commit"]) + + def set_run_started(self): + """Set the `run_start_date` fields before starting the run execution.""" + self.run_start_date = timezone.now() + self.save(update_fields=["run_start_date"]) + + def set_run_ended(self, exitcode, output=""): + """Set the run-related fields after the run execution.""" + self.run_exitcode = exitcode + self.run_output = output + self.run_end_date = timezone.now() + self.save(update_fields=["run_exitcode", "run_output", "run_end_date"]) + + def set_run_staled(self): + """Set the execution as `stale` using a special 88 exitcode value.""" + self.set_run_ended(exitcode=88) + + def set_run_stopped(self): + """Set the execution as `stopped` using a special 99 exitcode value.""" + self.set_run_ended(exitcode=99) + + def stop_run(self): + self.append_to_log("Stop run requested") + + if not VULNERABLECODE_ASYNC: + self.set_run_stopped() + return + + if not self.job_status: + self.set_run_staled() + return + + if self.job_status == JobStatus.FAILED: + self.set_run_ended( + exitcode=1, + output=f"Killed from outside, latest_result={self.job.latest_result()}", + ) + return + + send_stop_job_command( + connection=django_rq.get_connection(), + job_id=str(self.run_id), + ) + self.set_run_stopped() + + def delete_run(self, delete_self=True): + if VULNERABLECODE_ASYNC and self.run_id: + if job := self.job: + job.delete() + + if delete_self: + self.delete() + + def delete(self, *args, **kwargs): + """ + Before deletion of the run instance, try to stop the run execution. + """ + with suppress(redis.exceptions.ConnectionError, AttributeError): + if self.status == self.Status.RUNNING: + self.stop_run() + + return super().delete(*args, **kwargs) + + def append_to_log(self, message, is_multiline=False): + """Append ``message`` to log field of run instance.""" + message = message.strip() + if not is_multiline: + message = message.replace("\n", "").replace("\r", "") + + self.log = self.log + message + "\n" + self.save(update_fields=["log"]) + + +class PipelineSchedule(models.Model): + """The Database representation of a pipeline schedule.""" + + pipeline_id = models.CharField( + max_length=600, + help_text=("Identify a registered Pipeline class."), + unique=True, + blank=False, + null=False, + ) + + is_active = models.BooleanField( + null=True, + db_index=True, + default=True, + help_text=( + "When set to True (Yes), this Pipeline is active. " + "When set to False (No), this Pipeline is inactive and not run." + ), + ) + + run_interval = models.PositiveSmallIntegerField( + validators=[ + MinValueValidator(1, message="Interval must be at least 1 day."), + MaxValueValidator(365, message="Interval must be at most 365 days."), + ], + default=1, + help_text=("Number of days to wait between run of this pipeline."), + ) + + schedule_work_id = models.CharField( + max_length=255, + unique=True, + null=True, + blank=True, + db_index=True, + help_text=("Identifier used to manage the periodic run job."), + ) + + created_date = models.DateTimeField( + auto_now_add=True, + db_index=True, + ) + + class Meta: + ordering = ["-created_date"] + + def __str__(self): + return f"{self.pipeline_id}" + + def save(self, *args, **kwargs): + if self.pk and (existing := PipelineSchedule.objects.get(pk=self.pk)): + if existing.is_active != self.is_active or existing.run_interval != self.run_interval: + self.schedule_work_id = self.create_new_job() + self.full_clean() + return super().save(*args, **kwargs) + + @property + def pipeline_class(self): + """Return the pipeline class.""" + from vulnerabilities.importers import IMPORTERS_REGISTRY + from vulnerabilities.improvers import IMPROVERS_REGISTRY + + if self.pipeline_id in IMPROVERS_REGISTRY: + return IMPROVERS_REGISTRY.get(self.pipeline_id) + if self.pipeline_id in IMPORTERS_REGISTRY: + return IMPORTERS_REGISTRY.get(self.pipeline_id) + + @property + def all_runs(self): + """Return all the previous run instances for this pipeline.""" + return self.pipelineruns.all().order_by("-created_date") + + @property + def latest_run(self): + return self.pipelineruns.latest("created_date") if self.pipelineruns.exists() else None + + @property + def earliest_run(self): + return self.pipelineruns.earliest("created_date") if self.pipelineruns.exists() else None + + @property + def latest_run_date(self): + return self.latest_run.created_date if self.latest_run else None + + @property + def next_run_date(self): + if not self.is_active: + return + + current_date_time = datetime.datetime.now(tz=datetime.timezone.utc) + if self.latest_run_date: + next_execution = self.latest_run_date + datetime.timedelta(days=self.run_interval) + if next_execution > current_date_time: + return next_execution + + return current_date_time + + @property + def status(self): + if not self.is_active: + return + + if self.latest_run: + return self.latest_run.status + + def create_new_job(self): + """ + Create a new scheduled job. If a previous scheduled job + exists remove the existing job from the scheduler. + """ + from vulnerabilities import schedules + + if not schedules.is_redis_running(): + return + if self.schedule_work_id: + schedules.clear_job(self.schedule_work_id) + + return schedules.schedule_execution(self) if self.is_active else None From 0b7f47cc58076144e7c20476db421612c90678b7 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Mon, 5 May 2025 23:05:46 +0530 Subject: [PATCH 097/545] Add pipeline execution task Signed-off-by: Keshav Priyadarshi --- vulnerabilities/tasks.py | 106 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 106 insertions(+) create mode 100644 vulnerabilities/tasks.py diff --git a/vulnerabilities/tasks.py b/vulnerabilities/tasks.py new file mode 100644 index 000000000..623f00a5f --- /dev/null +++ b/vulnerabilities/tasks.py @@ -0,0 +1,106 @@ +# +# Copyright (c) nexB Inc. and others. All rights reserved. +# VulnerableCode is a trademark of nexB Inc. +# SPDX-License-Identifier: Apache-2.0 +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/aboutcode-org/vulnerablecode for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. +# + + +import logging +from io import StringIO +from traceback import format_exc as traceback_format_exc + +from rq import get_current_job + +from vulnerabilities import models +from vulnerabilities.importer import Importer +from vulnerabilities.improver import Improver + +logger = logging.getLogger(__name__) + + +def execute_pipeline(pipeline_id): + from vulnerabilities.pipelines import VulnerableCodePipeline + + logger.info(f"Enter `execute_pipeline` {pipeline_id}") + + pipeline_schedule = models.PipelineSchedule.objects.get(pipeline_id=pipeline_id) + job = get_current_job() + + run = models.PipelineRun.objects.create( + pipeline=pipeline_schedule, + run_id=job.id, + ) + + run.set_vulnerablecode_version_and_commit() + run.set_run_started() + + output = "" + exitcode = 0 + run_class = run.pipeline_class + if issubclass(run_class, VulnerableCodePipeline): + pipeline_instance = run_class(run_instance=run) + exitcode, output = pipeline_instance.execute() + elif issubclass(run_class, Importer) or issubclass(run_class, Improver): + exitcode, output = legacy_runner(run_class=run_class, run=run) + else: + output = f"{pipeline_id} is not a valid importer/improver." + exitcode = 1 + + run.set_run_ended(exitcode=exitcode, output=output) + logger.info("Update Run instance with exitcode, output, and end_date") + + +def legacy_runner(run_class, run): + from vulnerabilities.import_runner import ImportRunner + from vulnerabilities.improve_runner import ImproveRunner + + exitcode = 0 + output = "" + pipeline_id = run.pipeline.pipeline_id + + log_stream = StringIO() + handler = logging.StreamHandler(log_stream) + module_name = pipeline_id.rsplit(".", 1)[0] + logger_modules = [module_name] + if module_name.startswith("vulnerabilities.improvers."): + logger_modules.append("vulnerabilities.improve_runner") + elif module_name.startswith("vulnerabilities.importers."): + logger_modules.append("vulnerabilities.import_runner") + + loggers = [] + for name in logger_modules: + logger = logging.getLogger(name) + logger.setLevel(logging.DEBUG) + logger.addHandler(handler) + loggers.append(logger) + + try: + if issubclass(run_class, Importer): + ImportRunner(run_class).run() + run.append_to_log(f"Successfully imported data using {pipeline_id}") + elif issubclass(run_class, Improver): + ImproveRunner(improver_class=run_class).run() + run.append_to_log(f"Successfully improved data using {pipeline_id}") + except Exception as e: + output = (f"Failed to run {pipeline_id}: {e!r} \n {traceback_format_exc()}",) + exitcode = 1 + + run.append_to_log(log_stream.getvalue(), is_multiline=True) + [logger.removeHandler(handler) for logger in loggers] + + return exitcode, output + + +def set_run_failure(job, connection, type, value, traceback): + from vulnerabilities.models import PipelineRun + + try: + run = PipelineRun.objects.get(run_id=job.id) + except PipelineRun.DoesNotExist: + logger.info(f"Failed to get the run instance with job.id={job.id}") + return + + run.set_run_ended(exitcode=1, output=f"value={value} trace={traceback}") From 6e48ddc4c5a22f278ddb5f8b6c7ade151478a30a Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Mon, 5 May 2025 23:10:49 +0530 Subject: [PATCH 098/545] Add scheduler for pipeline tasks Signed-off-by: Keshav Priyadarshi --- vulnerabilities/schedules.py | 94 ++++++++++++++++++++++++++++++++++++ 1 file changed, 94 insertions(+) create mode 100644 vulnerabilities/schedules.py diff --git a/vulnerabilities/schedules.py b/vulnerabilities/schedules.py new file mode 100644 index 000000000..edd56540a --- /dev/null +++ b/vulnerabilities/schedules.py @@ -0,0 +1,94 @@ +# +# Copyright (c) nexB Inc. and others. All rights reserved. +# VulnerableCode is a trademark of nexB Inc. +# SPDX-License-Identifier: Apache-2.0 +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/aboutcode-org/vulnerablecode for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. +# + +import logging + +import django_rq +from redis.exceptions import ConnectionError + +from vulnerabilities.tasks import execute_pipeline +from vulnerablecode.settings import VULNERABLECODE_PIPELINE_TIMEOUT + +log = logging.getLogger(__name__) +scheduler = django_rq.get_scheduler() + + +def schedule_execution(pipeline_schedule): + """ + Takes a `PackageSchedule` object as input and schedule a + recurring job using `rq_scheduler` to execute the pipeline. + """ + first_execution = pipeline_schedule.next_run_date + interval_in_seconds = pipeline_schedule.run_interval * 24 * 60 * 60 + + job = scheduler.schedule( + scheduled_time=first_execution, + func=execute_pipeline, + args=[pipeline_schedule.pipeline_id], + interval=interval_in_seconds, + result_ttl=interval_in_seconds, # Remove job results after next run + timeout=VULNERABLECODE_PIPELINE_TIMEOUT, + repeat=None, # None for repeat forever + ) + return job._id + + +def scheduled_job_exists(job_id): + """ + Check if a scheduled job with the given job ID exists. + """ + return job_id and (job_id in scheduler) + + +def clear_job(job): + """ + Take a job object or job ID as input + and cancel the corresponding scheduled job. + """ + return scheduler.cancel(job) + + +def clear_zombie_pipeline_schedules(logger=log): + """ + Clear scheduled jobs not associated with any PackageSchedule object. + """ + from vulnerabilities.models import PipelineSchedule + + schedule_ids = PipelineSchedule.objects.all().values_list("schedule_work_id", flat=True) + + for job in scheduler.get_jobs(): + if job._id not in schedule_ids: + logger.info(f"Deleting scheduled job {job}") + clear_job(job) + + +def is_redis_running(logger=log): + """ + Check the status of the Redis server. + """ + try: + connection = django_rq.get_connection() + return connection.ping() + except ConnectionError as e: + error_message = f"Error checking Redis status: {e}. Redis is not reachable." + logger.error(error_message) + return False + + +def update_pipeline_schedule(): + """Create schedules for new pipelines and delete schedules for removed pipelines.""" + + from vulnerabilities.importers import IMPORTERS_REGISTRY + from vulnerabilities.improvers import IMPROVERS_REGISTRY + from vulnerabilities.models import PipelineSchedule + + pipeline_ids = [*IMPORTERS_REGISTRY.keys(), *IMPROVERS_REGISTRY.keys()] + # pipeline_ids = ["nvd_importer", "vulnerabilities.importers.curl.CurlImporter"] + PipelineSchedule.objects.exclude(pipeline_id__in=pipeline_ids).delete() + [PipelineSchedule.objects.get_or_create(pipeline_id=id) for id in pipeline_ids] From 21d9d5a918db6db534dcf658fd6b3158c4d8eaa0 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Mon, 5 May 2025 23:13:19 +0530 Subject: [PATCH 099/545] Add management command to init schedule Signed-off-by: Keshav Priyadarshi --- .../management/commands/run_scheduler.py | 35 +++++++++++++++++++ 1 file changed, 35 insertions(+) create mode 100644 vulnerabilities/management/commands/run_scheduler.py diff --git a/vulnerabilities/management/commands/run_scheduler.py b/vulnerabilities/management/commands/run_scheduler.py new file mode 100644 index 000000000..108264da8 --- /dev/null +++ b/vulnerabilities/management/commands/run_scheduler.py @@ -0,0 +1,35 @@ +# +# Copyright (c) nexB Inc. and others. All rights reserved. +# VulnerableCode is a trademark of nexB Inc. +# SPDX-License-Identifier: Apache-2.0 +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/aboutcode-org/vulnerablecode for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. +# + + +from django_rq.management.commands import rqscheduler + +from vulnerabilities import models +from vulnerabilities.schedules import clear_zombie_pipeline_schedules +from vulnerabilities.schedules import scheduled_job_exists +from vulnerabilities.schedules import update_pipeline_schedule + + +def init_pipeline_scheduled(): + """Initialize schedule jobs for active PipelineSchedule.""" + active_pipeline_qs = models.PipelineSchedule.objects.filter(is_active=True) + for pipeline_schedule in active_pipeline_qs: + if scheduled_job_exists(pipeline_schedule.schedule_work_id): + continue + new_id = pipeline_schedule.create_new_job() + pipeline_schedule.schedule_work_id = new_id + pipeline_schedule.save(update_fields=["schedule_work_id"]) + + +class Command(rqscheduler.Command): + def handle(self, *args, **kwargs): + clear_zombie_pipeline_schedules() + update_pipeline_schedule() + init_pipeline_scheduled() + super(Command, self).handle(*args, **kwargs) From 7bc98b366359ecbaa0fe1502fe71b2392005102e Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Mon, 5 May 2025 23:18:56 +0530 Subject: [PATCH 100/545] Configure RQ settings Signed-off-by: Keshav Priyadarshi --- requirements.txt | 2 ++ setup.cfg | 2 ++ vulnerablecode/settings.py | 13 +++++++++++++ 3 files changed, 17 insertions(+) diff --git a/requirements.txt b/requirements.txt index e9c0b16aa..98776b77d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -92,8 +92,10 @@ python-dateutil==2.8.2 python-dotenv==0.20.0 pytz==2022.1 PyYAML==6.0.1 +redis==5.0.1 requests==2.32.0 restructuredtext-lint==1.4.0 +rq==1.15.1 saneyaml==0.6.0 semantic-version==2.9.0 six==1.16.0 diff --git a/setup.cfg b/setup.cfg index 6daed299a..c76436de8 100644 --- a/setup.cfg +++ b/setup.cfg @@ -94,6 +94,8 @@ install_requires = #pipeline aboutcode.pipeline>=0.1.0 + django-rq==2.10.1 + rq-scheduler==0.13.1 #vulntotal python-dotenv diff --git a/vulnerablecode/settings.py b/vulnerablecode/settings.py index a0e1bf1c0..451e7e51c 100644 --- a/vulnerablecode/settings.py +++ b/vulnerablecode/settings.py @@ -84,6 +84,7 @@ # required for Django collectstatic discovery "drf_spectacular_sidecar", "django_recaptcha", + "django_rq", ) RECAPTCHA_PUBLIC_KEY = env.str("RECAPTCHA_PUBLIC_KEY", "") @@ -362,3 +363,15 @@ "handlers": ["console"], "level": "ERROR", } + + +VULNERABLECODE_ASYNC = True +VULNERABLECODE_PIPELINE_TIMEOUT = "24h" +RQ_QUEUES = { + "default": { + "HOST": env.str("VULNERABLECODE_REDIS_HOST", default="localhost"), + "PORT": env.str("VULNERABLECODE_REDIS_PORT", default="6379"), + "PASSWORD": env.str("VULNERABLECODE_REDIS_PASSWORD", default=""), + "DEFAULT_TIMEOUT": env.int("VULNERABLECODE_REDIS_DEFAULT_TIMEOUT", default=360), + } +} From efd8d2c0d057b98cc826a7ab5af08e38bd2850a7 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Mon, 5 May 2025 23:22:23 +0530 Subject: [PATCH 101/545] Add utility to get latest commit hash Signed-off-by: Keshav Priyadarshi --- vulnerablecode/__init__.py | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/vulnerablecode/__init__.py b/vulnerablecode/__init__.py index 3210b8cd9..1b552cdda 100644 --- a/vulnerablecode/__init__.py +++ b/vulnerablecode/__init__.py @@ -9,10 +9,37 @@ import os import sys +from contextlib import suppress +from pathlib import Path + +import git __version__ = "36.0.0" +PROJECT_DIR = Path(__file__).resolve().parent +ROOT_DIR = PROJECT_DIR.parent + + +def get_git_describe_from_local_checkout(): + """ + Return the git describe tag from the local checkout. + This will only provide a result when the codebase is a git clone. + """ + with suppress(git.GitError): + return git.Repo(".").git.describe(tags=True, always=True) + + +def get_short_commit(): + """ + Return the short commit hash from a Git describe string while removing + any leading "g" character if present. + """ + if git_describe := get_git_describe_from_local_checkout(): + short_commit = git_describe.split("-")[-1] + return short_commit.lstrip("g") + + def command_line(): """ Command line entry point. From 656ef3c37d9f4c1da76de19f1464517dad93d494 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Mon, 5 May 2025 23:26:49 +0530 Subject: [PATCH 102/545] Update logs in PipelineRun instance Signed-off-by: Keshav Priyadarshi --- vulnerabilities/pipelines/__init__.py | 75 ++++++++++++++++++++++----- 1 file changed, 63 insertions(+), 12 deletions(-) diff --git a/vulnerabilities/pipelines/__init__.py b/vulnerabilities/pipelines/__init__.py index d74db9f35..da200db48 100644 --- a/vulnerabilities/pipelines/__init__.py +++ b/vulnerabilities/pipelines/__init__.py @@ -8,19 +8,22 @@ # import logging +import traceback from datetime import datetime from datetime import timezone from timeit import default_timer as timer from traceback import format_exc as traceback_format_exc from typing import Iterable +from typing import List -from aboutcode.pipeline import BasePipeline from aboutcode.pipeline import LoopProgress +from aboutcode.pipeline import PipelineDefinition from aboutcode.pipeline import humanize_time from vulnerabilities.importer import AdvisoryData from vulnerabilities.improver import MAX_CONFIDENCE from vulnerabilities.models import Advisory +from vulnerabilities.models import PipelineRun from vulnerabilities.pipes.advisory import import_advisory from vulnerabilities.pipes.advisory import insert_advisory from vulnerabilities.utils import classproperty @@ -28,19 +31,52 @@ module_logger = logging.getLogger(__name__) -class VulnerableCodePipeline(BasePipeline): - pipeline_id = None # Unique Pipeline ID +class BasePipelineRun: + """ + Encapsulate the code related to a Pipeline run (execution): + - Execution context: groups, steps + - Execution logic + - Logging + - Results + """ - def on_failure(self): - """ - Tasks to run in the event that pipeline execution fails. + def __init__( + self, + run_instance: PipelineRun = None, + selected_groups: List = None, + selected_steps: List = None, + ): + """Load the Pipeline class.""" + self.run = run_instance + self.pipeline_class = self.__class__ + self.pipeline_name = self.__class__.__name__ - Implement cleanup or other tasks that need to be performed - on pipeline failure, such as: - - Removing cloned repositories. - - Deleting downloaded archives. - """ - pass + self.selected_groups = selected_groups + self.selected_steps = selected_steps or [] + + self.execution_log = [] + self.current_step = "" + + def append_to_log(self, message): + if self.run: + self.run.append_to_log(message) + self.execution_log.append(message) + + def set_current_step(self, message): + self.current_step = message + + @staticmethod + def output_from_exception(exception): + """Return a formatted error message including the traceback.""" + output = f"{exception}\n\n" + + if exception.__cause__ and str(exception.__cause__) != str(exception): + output += f"Cause: {exception.__cause__}\n\n" + + traceback_formatted = "".join(traceback.format_tb(exception.__traceback__)) + output += f"Traceback:\n{traceback_formatted}" + + return output def execute(self): """Execute each steps in the order defined on this pipeline class.""" @@ -90,6 +126,21 @@ def log(self, message, level=logging.INFO): module_logger.log(level, message) self.append_to_log(message) + +class VulnerableCodePipeline(PipelineDefinition, BasePipelineRun): + pipeline_id = None # Unique Pipeline ID + + def on_failure(self): + """ + Tasks to run in the event that pipeline execution fails. + + Implement cleanup or other tasks that need to be performed + on pipeline failure, such as: + - Removing cloned repositories. + - Deleting downloaded archives. + """ + pass + @classproperty def pipeline_id(cls): """Return unique pipeline_id set in cls.pipeline_id""" From 7a7994861e3175cfdd99bda7fa9bf5ba9cdf7e79 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Mon, 5 May 2025 23:29:49 +0530 Subject: [PATCH 103/545] Add API endpoint for Pipeline schedule Signed-off-by: Keshav Priyadarshi --- vulnerabilities/api_v2.py | 102 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 102 insertions(+) diff --git a/vulnerabilities/api_v2.py b/vulnerabilities/api_v2.py index 10ffb6d98..3eb65a751 100644 --- a/vulnerabilities/api_v2.py +++ b/vulnerabilities/api_v2.py @@ -14,15 +14,19 @@ from drf_spectacular.utils import extend_schema from drf_spectacular.utils import extend_schema_view from packageurl import PackageURL +from rest_framework import mixins from rest_framework import serializers from rest_framework import status from rest_framework import viewsets from rest_framework.decorators import action +from rest_framework.permissions import IsAdminUser from rest_framework.response import Response from rest_framework.reverse import reverse from vulnerabilities.models import CodeFix from vulnerabilities.models import Package +from vulnerabilities.models import PipelineRun +from vulnerabilities.models import PipelineSchedule from vulnerabilities.models import Vulnerability from vulnerabilities.models import VulnerabilityReference from vulnerabilities.models import VulnerabilitySeverity @@ -606,3 +610,101 @@ def get_queryset(self): affected_package_vulnerability__vulnerability__vulnerability_id=vulnerability_id ) return queryset + + +class CreateListRetrieveUpdateViewSet( + mixins.CreateModelMixin, + mixins.ListModelMixin, + mixins.RetrieveModelMixin, + mixins.UpdateModelMixin, + viewsets.GenericViewSet, +): + """ + A viewset that provides `create`, `list, `retrieve`, and `update` actions. + To use it, override the class and set the `.queryset` and + `.serializer_class` attributes. + """ + + pass + + +class PipelineRunAPISerializer(serializers.HyperlinkedModelSerializer): + status = serializers.SerializerMethodField() + execution_time = serializers.SerializerMethodField() + + class Meta: + model = PipelineRun + fields = [ + "run_id", + "status", + "execution_time", + "run_start_date", + "run_end_date", + "run_exitcode", + "run_output", + "created_date", + "vulnerablecode_version", + "vulnerablecode_commit", + "log", + ] + + def get_status(self, obj): + return obj.status + + def get_execution_time(self, obj): + return obj.execution_time + + +class PipelineScheduleAPISerializer(serializers.HyperlinkedModelSerializer): + url = serializers.HyperlinkedIdentityField( + view_name="schedule-detail", lookup_field="pipeline_id" + ) + pipelineruns = PipelineRunAPISerializer(many=True, read_only=True) + next_run_date = serializers.SerializerMethodField() + + class Meta: + model = PipelineSchedule + fields = [ + "url", + "pipeline_id", + "is_active", + "run_interval", + "created_date", + "schedule_work_id", + "next_run_date", + "pipelineruns", + ] + + def get_next_run_date(self, obj): + return obj.next_run_date + + +class PipelineScheduleCreateSerializer(serializers.ModelSerializer): + class Meta: + model = PipelineSchedule + fields = ["pipeline_id", "is_active", "run_interval"] + extra_kwargs = { + field: {"initial": PipelineSchedule._meta.get_field(field).get_default()} + for field in ["is_active", "run_interval"] + } + + +class PipelineScheduleUpdateSerializer(serializers.ModelSerializer): + class Meta: + model = PipelineSchedule + fields = ["is_active", "run_interval"] + + +class PipelineScheduleV2ViewSet(CreateListRetrieveUpdateViewSet): + queryset = PipelineSchedule.objects.prefetch_related("pipelineruns").all() + serializer_class = PipelineScheduleAPISerializer + lookup_field = "pipeline_id" + lookup_value_regex = r"[\w.]+" + # permission_classes = [IsAdminUser] + + def get_serializer_class(self): + if self.action == "create": + return PipelineScheduleCreateSerializer + elif self.action == "update": + return PipelineScheduleUpdateSerializer + return super().get_serializer_class() From 202db9ddcef54595399f32669d53436129d394cd Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Mon, 5 May 2025 23:31:59 +0530 Subject: [PATCH 104/545] Add docker service for pipeline schedule Signed-off-by: Keshav Priyadarshi --- Dockerfile | 6 ++++++ docker-compose.yml | 35 +++++++++++++++++++++++++++++++++++ docker.env | 2 ++ 3 files changed, 43 insertions(+) diff --git a/Dockerfile b/Dockerfile index 45a9b496d..0a193d7c0 100644 --- a/Dockerfile +++ b/Dockerfile @@ -17,6 +17,12 @@ ENV PYTHONDONTWRITEBYTECODE 1 RUN mkdir -p /var/vulnerablecode/static +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + wait-for-it \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* + # Keep the dependencies installation before the COPY of the app/ for proper caching COPY setup.cfg setup.py requirements.txt pyproject.toml /app/ RUN pip install . -c requirements.txt diff --git a/docker-compose.yml b/docker-compose.yml index afbe9f337..3b464a7fb 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -10,6 +10,15 @@ services: - db_data:/var/lib/postgresql/data/ - ./etc/postgresql/postgresql.conf:/etc/postgresql/postgresql.conf + redis: + image: redis + # Enable redis data persistence using the "Append Only File" with the + # default policy of fsync every second. See https://redis.io/topics/persistence + command: redis-server --appendonly yes + volumes: + - redis_data:/data + restart: always + vulnerablecode: build: . command: /bin/sh -c " @@ -26,6 +35,31 @@ services: depends_on: - db + scheduler: + build: . + command: wait-for-it web:8000 -- python ./manage.py run_scheduler + env_file: + - docker.env + volumes: + - /etc/vulnerablecode/:/etc/vulnerablecode/ + depends_on: + - redis + - db + - vulnerablecode + + rq_worker: + build: . + command: wait-for-it web:8000 -- python ./manage.py rqworker default + env_file: + - docker.env + volumes: + - /etc/vulnerablecode/:/etc/vulnerablecode/ + depends_on: + - redis + - db + - vulnerablecode + + nginx: image: nginx ports: @@ -44,4 +78,5 @@ services: volumes: db_data: static: + redis_data: diff --git a/docker.env b/docker.env index 8b427a92c..2e53e5ebf 100644 --- a/docker.env +++ b/docker.env @@ -4,3 +4,5 @@ POSTGRES_PASSWORD=vulnerablecode VULNERABLECODE_DB_HOST=db VULNERABLECODE_STATIC_ROOT=/var/vulnerablecode/static/ + +VULNERABLECODE_REDIS_HOST=redis \ No newline at end of file From dd8b252f32d6a6c141bee1f1488eb33c9f18545f Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Mon, 5 May 2025 23:36:36 +0530 Subject: [PATCH 105/545] Add list view for pipeline schedules Signed-off-by: Keshav Priyadarshi --- vulnerabilities/forms.py | 13 ++ .../templates/pipeline_schedule_list.html | 143 ++++++++++++++++++ vulnerabilities/views.py | 23 ++- 3 files changed, 177 insertions(+), 2 deletions(-) create mode 100644 vulnerabilities/templates/pipeline_schedule_list.html diff --git a/vulnerabilities/forms.py b/vulnerabilities/forms.py index 50511571d..403b02f62 100644 --- a/vulnerabilities/forms.py +++ b/vulnerabilities/forms.py @@ -85,3 +85,16 @@ def clean_username(self): def save_m2m(self): pass + + +class PipelineSchedulePackageForm(forms.Form): + search = forms.CharField( + required=True, + label=False, + widget=forms.TextInput( + attrs={ + "placeholder": "Search a pipeline...", + "class": "input ", + }, + ), + ) diff --git a/vulnerabilities/templates/pipeline_schedule_list.html b/vulnerabilities/templates/pipeline_schedule_list.html new file mode 100644 index 000000000..53c5902a5 --- /dev/null +++ b/vulnerabilities/templates/pipeline_schedule_list.html @@ -0,0 +1,143 @@ +{% extends "base.html" %} + +{% block title %} +Pipeline Schedule +{% endblock %} + +{% block extrahead %} + +{% endblock %} + + +{% block content %} +
    +
    +
    + +
    +
    +

    Pipeline Schedule

    +
    +
    +
    +
    +
    + {{ form.search }} +
    +
    + +
    +
    +
    + + {% if is_paginated %} + + {% endif %} +
    +
    +
    +{% endblock %} \ No newline at end of file diff --git a/vulnerabilities/views.py b/vulnerabilities/views.py index a2df48634..d3b842e95 100644 --- a/vulnerabilities/views.py +++ b/vulnerabilities/views.py @@ -16,20 +16,23 @@ from django.core.mail import send_mail from django.db.models import Prefetch from django.http.response import Http404 +from django.shortcuts import get_object_or_404 from django.shortcuts import redirect from django.shortcuts import render from django.urls import reverse_lazy from django.views import View from django.views import generic from django.views.generic.detail import DetailView +from django.views.generic.edit import FormMixin from django.views.generic.list import ListView -from univers.version_range import RANGE_CLASS_BY_SCHEMES -from univers.version_range import AlpineLinuxVersionRange from vulnerabilities import models from vulnerabilities.forms import ApiUserCreationForm from vulnerabilities.forms import PackageSearchForm +from vulnerabilities.forms import PipelineSchedulePackageForm from vulnerabilities.forms import VulnerabilitySearchForm +from vulnerabilities.models import PipelineRun +from vulnerabilities.models import PipelineSchedule from vulnerabilities.severity_systems import EPSS from vulnerabilities.severity_systems import SCORING_SYSTEMS from vulnerablecode import __version__ as VULNERABLECODE_VERSION @@ -346,3 +349,19 @@ def get_context_data(self, **kwargs): } ) return context + + +class PipelineScheduleListView(ListView, FormMixin): + model = PipelineSchedule + context_object_name = "schedule_list" + template_name = "pipeline_schedule_list.html" + paginate_by = 30 + form_class = PipelineSchedulePackageForm + + def get_queryset(self): + form = self.form_class(self.request.GET) + if form.is_valid(): + return PipelineSchedule.objects.filter( + pipeline_id__icontains=form.cleaned_data.get("search") + ) + return PipelineSchedule.objects.all() From baae4a8871a19c2398b289678340b7b2e4ed033a Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Mon, 5 May 2025 23:41:00 +0530 Subject: [PATCH 106/545] Add css and js for log highlighting Signed-off-by: Keshav Priyadarshi --- .../static/css/highlight-10.6.0.css | 2 + .../static/css/highlight.css-10.6.0.ABOUT | 12 + .../static/js/highlight-10.6.0.min.js | 1326 +++++++++++++++++ .../static/js/highlight.js-10.6.0.ABOUT | 12 + 4 files changed, 1352 insertions(+) create mode 100644 vulnerablecode/static/css/highlight-10.6.0.css create mode 100644 vulnerablecode/static/css/highlight.css-10.6.0.ABOUT create mode 100644 vulnerablecode/static/js/highlight-10.6.0.min.js create mode 100644 vulnerablecode/static/js/highlight.js-10.6.0.ABOUT diff --git a/vulnerablecode/static/css/highlight-10.6.0.css b/vulnerablecode/static/css/highlight-10.6.0.css new file mode 100644 index 000000000..d559f9422 --- /dev/null +++ b/vulnerablecode/static/css/highlight-10.6.0.css @@ -0,0 +1,2 @@ +/* default: atom-one-dark.css */ .hljs{display:block;overflow-x:auto;padding:.5em;color:#abb2bf;background:#282c34}.hljs-comment,.hljs-quote{color:#5c6370;font-style:italic}.hljs-doctag,.hljs-formula,.hljs-keyword{color:#c678dd}.hljs-deletion,.hljs-name,.hljs-section,.hljs-selector-tag,.hljs-subst{color:#e06c75}.hljs-literal{color:#56b6c2}.hljs-addition,.hljs-attribute,.hljs-meta-string,.hljs-regexp,.hljs-string{color:#98c379}.hljs-built_in,.hljs-class .hljs-title{color:#e6c07b}.hljs-attr,.hljs-number,.hljs-selector-attr,.hljs-selector-class,.hljs-selector-pseudo,.hljs-template-variable,.hljs-type,.hljs-variable{color:#d19a66}.hljs-bullet,.hljs-link,.hljs-meta,.hljs-selector-id,.hljs-symbol,.hljs-title{color:#61aeee}.hljs-emphasis{font-style:italic}.hljs-strong{font-weight:700}.hljs-link{text-decoration:underline} +/* .log: an-old-hope.css */ .log .hljs-comment,.log .hljs-quote{color:#b6b18b}.log .hljs-deletion,.log .hljs-name,.log .hljs-regexp,.log .hljs-selector-class,.log .hljs-selector-id,.log .hljs-tag,.log .hljs-template-variable,.log .hljs-variable{color:#eb3c54}.log .hljs-built_in,.log .hljs-builtin-name,.log .hljs-link,.log .hljs-literal,.log .hljs-meta,.log .hljs-number,.log .hljs-params,.log .hljs-type{color:#e7ce56}.log .hljs-attribute{color:#ee7c2b}.log .hljs-addition,.log .hljs-bullet,.log .hljs-string,.log .hljs-symbol{color:#4fb4d7}.log .hljs-section,.log .hljs-title{color:#78bb65;font-weight:700}.log .hljs-keyword,.log .hljs-selector-tag{color:#b45ea4}.log .hljs{display:block;overflow-x:auto;background:#1c1d21;color:#c0c5ce;padding:.log .5em}.log .hljs-emphasis{font-style:italic}.log .hljs-strong{font-weight:700} \ No newline at end of file diff --git a/vulnerablecode/static/css/highlight.css-10.6.0.ABOUT b/vulnerablecode/static/css/highlight.css-10.6.0.ABOUT new file mode 100644 index 000000000..b2c626a23 --- /dev/null +++ b/vulnerablecode/static/css/highlight.css-10.6.0.ABOUT @@ -0,0 +1,12 @@ +about_resource: highlight-10.6.0.css +name: highlight.js +version: 10.6.0 +download_url: https://github.com/highlightjs/highlight.js/archive/10.6.0.zip +description: Syntax highlighting with language autodetection. +homepage_url: https://highlightjs.org/ +license_expression: bsd-new +package_url: pkg:npm/highlight.js@10.6.0 +licenses: + - key: bsd-new + name: BSD-3-Clause + file: bsd-new.LICENSE diff --git a/vulnerablecode/static/js/highlight-10.6.0.min.js b/vulnerablecode/static/js/highlight-10.6.0.min.js new file mode 100644 index 000000000..4370be467 --- /dev/null +++ b/vulnerablecode/static/js/highlight-10.6.0.min.js @@ -0,0 +1,1326 @@ +/* + Highlight.js 10.6.0 (eb122d3b) + License: BSD-3-Clause + Copyright (c) 2006-2020, Ivan Sagalaev +*/ +var hljs=function(){"use strict";function e(t){ +return t instanceof Map?t.clear=t.delete=t.set=()=>{ +throw Error("map is read-only")}:t instanceof Set&&(t.add=t.clear=t.delete=()=>{ +throw Error("set is read-only") +}),Object.freeze(t),Object.getOwnPropertyNames(t).forEach((n=>{var s=t[n] +;"object"!=typeof s||Object.isFrozen(s)||e(s)})),t}var t=e,n=e;t.default=n +;class s{constructor(e){void 0===e.data&&(e.data={}),this.data=e.data} +ignoreMatch(){this.ignore=!0}}function r(e){ +return e.replace(/&/g,"&").replace(//g,">").replace(/"/g,""").replace(/'/g,"'") +}function a(e,...t){const n=Object.create(null);for(const t in e)n[t]=e[t] +;return t.forEach((e=>{for(const t in e)n[t]=e[t]})),n}const i=e=>!!e.kind +;class o{constructor(e,t){ +this.buffer="",this.classPrefix=t.classPrefix,e.walk(this)}addText(e){ +this.buffer+=r(e)}openNode(e){if(!i(e))return;let t=e.kind +;e.sublanguage||(t=`${this.classPrefix}${t}`),this.span(t)}closeNode(e){ +i(e)&&(this.buffer+="")}value(){return this.buffer}span(e){ +this.buffer+=``}}class l{constructor(){this.rootNode={ +children:[]},this.stack=[this.rootNode]}get top(){ +return this.stack[this.stack.length-1]}get root(){return this.rootNode}add(e){ +this.top.children.push(e)}openNode(e){const t={kind:e,children:[]} +;this.add(t),this.stack.push(t)}closeNode(){ +if(this.stack.length>1)return this.stack.pop()}closeAllNodes(){ +for(;this.closeNode(););}toJSON(){return JSON.stringify(this.rootNode,null,4)} +walk(e){return this.constructor._walk(e,this.rootNode)}static _walk(e,t){ +return"string"==typeof t?e.addText(t):t.children&&(e.openNode(t), +t.children.forEach((t=>this._walk(e,t))),e.closeNode(t)),e}static _collapse(e){ +"string"!=typeof e&&e.children&&(e.children.every((e=>"string"==typeof e))?e.children=[e.children.join("")]:e.children.forEach((e=>{ +l._collapse(e)})))}}class c extends l{constructor(e){super(),this.options=e} +addKeyword(e,t){""!==e&&(this.openNode(t),this.addText(e),this.closeNode())} +addText(e){""!==e&&this.add(e)}addSublanguage(e,t){const n=e.root +;n.kind=t,n.sublanguage=!0,this.add(n)}toHTML(){ +return new o(this,this.options).value()}finalize(){return!0}}function u(e){ +return e?"string"==typeof e?e:e.source:null} +const g="[a-zA-Z]\\w*",d="[a-zA-Z_]\\w*",h="\\b\\d+(\\.\\d+)?",f="(-?)(\\b0[xX][a-fA-F0-9]+|(\\b\\d+(\\.\\d*)?|\\.\\d+)([eE][-+]?\\d+)?)",p="\\b(0b[01]+)",m={ +begin:"\\\\[\\s\\S]",relevance:0},b={className:"string",begin:"'",end:"'", +illegal:"\\n",contains:[m]},x={className:"string",begin:'"',end:'"', +illegal:"\\n",contains:[m]},E={ +begin:/\b(a|an|the|are|I'm|isn't|don't|doesn't|won't|but|just|should|pretty|simply|enough|gonna|going|wtf|so|such|will|you|your|they|like|more)\b/ +},v=(e,t,n={})=>{const s=a({className:"comment",begin:e,end:t,contains:[]},n) +;return s.contains.push(E),s.contains.push({className:"doctag", +begin:"(?:TODO|FIXME|NOTE|BUG|OPTIMIZE|HACK|XXX):",relevance:0}),s +},w=v("//","$"),N=v("/\\*","\\*/"),y=v("#","$");var R=Object.freeze({ +__proto__:null,MATCH_NOTHING_RE:/\b\B/,IDENT_RE:g,UNDERSCORE_IDENT_RE:d, +NUMBER_RE:h,C_NUMBER_RE:f,BINARY_NUMBER_RE:p, +RE_STARTERS_RE:"!|!=|!==|%|%=|&|&&|&=|\\*|\\*=|\\+|\\+=|,|-|-=|/=|/|:|;|<<|<<=|<=|<|===|==|=|>>>=|>>=|>=|>>>|>>|>|\\?|\\[|\\{|\\(|\\^|\\^=|\\||\\|=|\\|\\||~", +SHEBANG:(e={})=>{const t=/^#![ ]*\// +;return e.binary&&(e.begin=((...e)=>e.map((e=>u(e))).join(""))(t,/.*\b/,e.binary,/\b.*/)), +a({className:"meta",begin:t,end:/$/,relevance:0,"on:begin":(e,t)=>{ +0!==e.index&&t.ignoreMatch()}},e)},BACKSLASH_ESCAPE:m,APOS_STRING_MODE:b, +QUOTE_STRING_MODE:x,PHRASAL_WORDS_MODE:E,COMMENT:v,C_LINE_COMMENT_MODE:w, +C_BLOCK_COMMENT_MODE:N,HASH_COMMENT_MODE:y,NUMBER_MODE:{className:"number", +begin:h,relevance:0},C_NUMBER_MODE:{className:"number",begin:f,relevance:0}, +BINARY_NUMBER_MODE:{className:"number",begin:p,relevance:0},CSS_NUMBER_MODE:{ +className:"number", +begin:h+"(%|em|ex|ch|rem|vw|vh|vmin|vmax|cm|mm|in|pt|pc|px|deg|grad|rad|turn|s|ms|Hz|kHz|dpi|dpcm|dppx)?", +relevance:0},REGEXP_MODE:{begin:/(?=\/[^/\n]*\/)/,contains:[{className:"regexp", +begin:/\//,end:/\/[gimuy]*/,illegal:/\n/,contains:[m,{begin:/\[/,end:/\]/, +relevance:0,contains:[m]}]}]},TITLE_MODE:{className:"title",begin:g,relevance:0 +},UNDERSCORE_TITLE_MODE:{className:"title",begin:d,relevance:0},METHOD_GUARD:{ +begin:"\\.\\s*[a-zA-Z_]\\w*",relevance:0},END_SAME_AS_BEGIN:e=>Object.assign(e,{ +"on:begin":(e,t)=>{t.data._beginMatch=e[1]},"on:end":(e,t)=>{ +t.data._beginMatch!==e[1]&&t.ignoreMatch()}})});function _(e,t){ +"."===e.input[e.index-1]&&t.ignoreMatch()}function k(e,t){ +t&&e.beginKeywords&&(e.begin="\\b("+e.beginKeywords.split(" ").join("|")+")(?!\\.)(?=\\b|\\s)", +e.__beforeBegin=_,e.keywords=e.keywords||e.beginKeywords,delete e.beginKeywords, +void 0===e.relevance&&(e.relevance=0))}function O(e,t){ +Array.isArray(e.illegal)&&(e.illegal=((...e)=>"("+e.map((e=>u(e))).join("|")+")")(...e.illegal)) +}function M(e,t){if(e.match){ +if(e.begin||e.end)throw Error("begin & end are not supported with match") +;e.begin=e.match,delete e.match}}function A(e,t){ +void 0===e.relevance&&(e.relevance=1)} +const L=["of","and","for","in","not","or","if","then","parent","list","value"] +;function B(e,t,n="keyword"){const s={} +;return"string"==typeof e?r(n,e.split(" ")):Array.isArray(e)?r(n,e):Object.keys(e).forEach((n=>{ +Object.assign(s,B(e[n],t,n))})),s;function r(e,n){ +t&&(n=n.map((e=>e.toLowerCase()))),n.forEach((t=>{const n=t.split("|") +;s[n[0]]=[e,I(n[0],n[1])]}))}}function I(e,t){ +return t?Number(t):(e=>L.includes(e.toLowerCase()))(e)?0:1} +function T(e,{plugins:t}){function n(t,n){ +return RegExp(u(t),"m"+(e.case_insensitive?"i":"")+(n?"g":""))}class s{ +constructor(){ +this.matchIndexes={},this.regexes=[],this.matchAt=1,this.position=0} +addRule(e,t){ +t.position=this.position++,this.matchIndexes[this.matchAt]=t,this.regexes.push([t,e]), +this.matchAt+=(e=>RegExp(e.toString()+"|").exec("").length-1)(e)+1}compile(){ +0===this.regexes.length&&(this.exec=()=>null) +;const e=this.regexes.map((e=>e[1]));this.matcherRe=n(((e,t="|")=>{ +const n=/\[(?:[^\\\]]|\\.)*\]|\(\??|\\([1-9][0-9]*)|\\./;let s=0,r="" +;for(let a=0;a0&&(r+=t),r+="(";o.length>0;){const e=n.exec(o);if(null==e){r+=o;break} +r+=o.substring(0,e.index), +o=o.substring(e.index+e[0].length),"\\"===e[0][0]&&e[1]?r+="\\"+(Number(e[1])+i):(r+=e[0], +"("===e[0]&&s++)}r+=")"}return r})(e),!0),this.lastIndex=0}exec(e){ +this.matcherRe.lastIndex=this.lastIndex;const t=this.matcherRe.exec(e) +;if(!t)return null +;const n=t.findIndex(((e,t)=>t>0&&void 0!==e)),s=this.matchIndexes[n] +;return t.splice(0,n),Object.assign(t,s)}}class r{constructor(){ +this.rules=[],this.multiRegexes=[], +this.count=0,this.lastIndex=0,this.regexIndex=0}getMatcher(e){ +if(this.multiRegexes[e])return this.multiRegexes[e];const t=new s +;return this.rules.slice(e).forEach((([e,n])=>t.addRule(e,n))), +t.compile(),this.multiRegexes[e]=t,t}resumingScanAtSamePosition(){ +return 0!==this.regexIndex}considerAll(){this.regexIndex=0}addRule(e,t){ +this.rules.push([e,t]),"begin"===t.type&&this.count++}exec(e){ +const t=this.getMatcher(this.regexIndex);t.lastIndex=this.lastIndex +;let n=t.exec(e) +;if(this.resumingScanAtSamePosition())if(n&&n.index===this.lastIndex);else{ +const t=this.getMatcher(0);t.lastIndex=this.lastIndex+1,n=t.exec(e)} +return n&&(this.regexIndex+=n.position+1, +this.regexIndex===this.count&&this.considerAll()),n}} +if(e.compilerExtensions||(e.compilerExtensions=[]), +e.contains&&e.contains.includes("self"))throw Error("ERR: contains `self` is not supported at the top-level of a language. See documentation.") +;return e.classNameAliases=a(e.classNameAliases||{}),function t(s,i){const o=s +;if(s.compiled)return o +;[M].forEach((e=>e(s,i))),e.compilerExtensions.forEach((e=>e(s,i))), +s.__beforeBegin=null,[k,O,A].forEach((e=>e(s,i))),s.compiled=!0;let l=null +;if("object"==typeof s.keywords&&(l=s.keywords.$pattern, +delete s.keywords.$pattern), +s.keywords&&(s.keywords=B(s.keywords,e.case_insensitive)), +s.lexemes&&l)throw Error("ERR: Prefer `keywords.$pattern` to `mode.lexemes`, BOTH are not allowed. (see mode reference) ") +;return l=l||s.lexemes||/\w+/, +o.keywordPatternRe=n(l,!0),i&&(s.begin||(s.begin=/\B|\b/), +o.beginRe=n(s.begin),s.endSameAsBegin&&(s.end=s.begin), +s.end||s.endsWithParent||(s.end=/\B|\b/), +s.end&&(o.endRe=n(s.end)),o.terminatorEnd=u(s.end)||"", +s.endsWithParent&&i.terminatorEnd&&(o.terminatorEnd+=(s.end?"|":"")+i.terminatorEnd)), +s.illegal&&(o.illegalRe=n(s.illegal)), +s.contains||(s.contains=[]),s.contains=[].concat(...s.contains.map((e=>(e=>(e.variants&&!e.cachedVariants&&(e.cachedVariants=e.variants.map((t=>a(e,{ +variants:null},t)))),e.cachedVariants?e.cachedVariants:j(e)?a(e,{ +starts:e.starts?a(e.starts):null +}):Object.isFrozen(e)?a(e):e))("self"===e?s:e)))),s.contains.forEach((e=>{t(e,o) +})),s.starts&&t(s.starts,i),o.matcher=(e=>{const t=new r +;return e.contains.forEach((e=>t.addRule(e.begin,{rule:e,type:"begin" +}))),e.terminatorEnd&&t.addRule(e.terminatorEnd,{type:"end" +}),e.illegal&&t.addRule(e.illegal,{type:"illegal"}),t})(o),o}(e)}function j(e){ +return!!e&&(e.endsWithParent||j(e.starts))}function S(e){const t={ +props:["language","code","autodetect"],data:()=>({detectedLanguage:"", +unknownLanguage:!1}),computed:{className(){ +return this.unknownLanguage?"":"hljs "+this.detectedLanguage},highlighted(){ +if(!this.autoDetect&&!e.getLanguage(this.language))return console.warn(`The language "${this.language}" you specified could not be found.`), +this.unknownLanguage=!0,r(this.code);let t={} +;return this.autoDetect?(t=e.highlightAuto(this.code), +this.detectedLanguage=t.language):(t=e.highlight(this.language,this.code,this.ignoreIllegals), +this.detectedLanguage=this.language),t.value},autoDetect(){ +return!(this.language&&(e=this.autodetect,!e&&""!==e));var e}, +ignoreIllegals:()=>!0},render(e){return e("pre",{},[e("code",{ +class:this.className,domProps:{innerHTML:this.highlighted}})])}};return{ +Component:t,VuePlugin:{install(e){e.component("highlightjs",t)}}}}const P={ +"after:highlightBlock":({block:e,result:t,text:n})=>{const s=C(e) +;if(!s.length)return;const a=document.createElement("div") +;a.innerHTML=t.value,t.value=((e,t,n)=>{let s=0,a="";const i=[];function o(){ +return e.length&&t.length?e[0].offset!==t[0].offset?e[0].offset"}function c(e){ +a+=""}function u(e){("start"===e.event?l:c)(e.node)} +for(;e.length||t.length;){let t=o() +;if(a+=r(n.substring(s,t[0].offset)),s=t[0].offset,t===e){i.reverse().forEach(c) +;do{u(t.splice(0,1)[0]),t=o()}while(t===e&&t.length&&t[0].offset===s) +;i.reverse().forEach(l) +}else"start"===t[0].event?i.push(t[0].node):i.pop(),u(t.splice(0,1)[0])} +return a+r(n.substr(s))})(s,C(a),n)}};function D(e){ +return e.nodeName.toLowerCase()}function C(e){const t=[];return function e(n,s){ +for(let r=n.firstChild;r;r=r.nextSibling)3===r.nodeType?s+=r.nodeValue.length:1===r.nodeType&&(t.push({ +event:"start",offset:s,node:r}),s=e(r,s),D(r).match(/br|hr|img|input/)||t.push({ +event:"stop",offset:s,node:r}));return s}(e,0),t}const H=e=>{console.error(e) +},U=(e,...t)=>{console.log("WARN: "+e,...t)},$=(e,t)=>{ +console.log(`Deprecated as of ${e}. ${t}`)},z=r,K=a,G=Symbol("nomatch") +;return(e=>{const n=Object.create(null),r=Object.create(null),a=[];let i=!0 +;const o=/(^(<[^>]+>|\t|)+|\n)/gm,l="Could not find the language '{}', did you forget to load/include a language module?",u={ +disableAutodetect:!0,name:"Plain text",contains:[]};let g={ +noHighlightRe:/^(no-?highlight)$/i, +languageDetectRe:/\blang(?:uage)?-([\w-]+)\b/i,classPrefix:"hljs-", +tabReplace:null,useBR:!1,languages:null,__emitter:c};function d(e){ +return g.noHighlightRe.test(e)}function h(e,t,n,s){const r={code:t,language:e} +;M("before:highlight",r);const a=r.result?r.result:f(r.language,r.code,n,s) +;return a.code=r.code,M("after:highlight",a),a}function f(e,t,r,o){const c=t +;function u(e,t){const n=w.case_insensitive?t[0].toLowerCase():t[0] +;return Object.prototype.hasOwnProperty.call(e.keywords,n)&&e.keywords[n]} +function d(){null!=R.subLanguage?(()=>{if(""===M)return;let e=null +;if("string"==typeof R.subLanguage){ +if(!n[R.subLanguage])return void O.addText(M) +;e=f(R.subLanguage,M,!0,k[R.subLanguage]),k[R.subLanguage]=e.top +}else e=p(M,R.subLanguage.length?R.subLanguage:null) +;R.relevance>0&&(A+=e.relevance),O.addSublanguage(e.emitter,e.language) +})():(()=>{if(!R.keywords)return void O.addText(M);let e=0 +;R.keywordPatternRe.lastIndex=0;let t=R.keywordPatternRe.exec(M),n="";for(;t;){ +n+=M.substring(e,t.index);const s=u(R,t);if(s){const[e,r]=s +;O.addText(n),n="",A+=r;const a=w.classNameAliases[e]||e;O.addKeyword(t[0],a) +}else n+=t[0];e=R.keywordPatternRe.lastIndex,t=R.keywordPatternRe.exec(M)} +n+=M.substr(e),O.addText(n)})(),M=""}function h(e){ +return e.className&&O.openNode(w.classNameAliases[e.className]||e.className), +R=Object.create(e,{parent:{value:R}}),R}function m(e,t,n){let r=((e,t)=>{ +const n=e&&e.exec(t);return n&&0===n.index})(e.endRe,n);if(r){if(e["on:end"]){ +const n=new s(e);e["on:end"](t,n),n.ignore&&(r=!1)}if(r){ +for(;e.endsParent&&e.parent;)e=e.parent;return e}} +if(e.endsWithParent)return m(e.parent,t,n)}function b(e){ +return 0===R.matcher.regexIndex?(M+=e[0],1):(I=!0,0)}function x(e){ +const t=e[0],n=c.substr(e.index),s=m(R,e,n);if(!s)return G;const r=R +;r.skip?M+=t:(r.returnEnd||r.excludeEnd||(M+=t),d(),r.excludeEnd&&(M=t));do{ +R.className&&O.closeNode(),R.skip||R.subLanguage||(A+=R.relevance),R=R.parent +}while(R!==s.parent) +;return s.starts&&(s.endSameAsBegin&&(s.starts.endRe=s.endRe), +h(s.starts)),r.returnEnd?0:t.length}let E={};function v(t,n){const a=n&&n[0] +;if(M+=t,null==a)return d(),0 +;if("begin"===E.type&&"end"===n.type&&E.index===n.index&&""===a){ +if(M+=c.slice(n.index,n.index+1),!i){const t=Error("0 width match regex") +;throw t.languageName=e,t.badRule=E.rule,t}return 1} +if(E=n,"begin"===n.type)return function(e){ +const t=e[0],n=e.rule,r=new s(n),a=[n.__beforeBegin,n["on:begin"]] +;for(const n of a)if(n&&(n(e,r),r.ignore))return b(t) +;return n&&n.endSameAsBegin&&(n.endRe=RegExp(t.replace(/[-/\\^$*+?.()|[\]{}]/g,"\\$&"),"m")), +n.skip?M+=t:(n.excludeBegin&&(M+=t), +d(),n.returnBegin||n.excludeBegin||(M=t)),h(n),n.returnBegin?0:t.length}(n) +;if("illegal"===n.type&&!r){ +const e=Error('Illegal lexeme "'+a+'" for mode "'+(R.className||"")+'"') +;throw e.mode=R,e}if("end"===n.type){const e=x(n);if(e!==G)return e} +if("illegal"===n.type&&""===a)return 1 +;if(B>1e5&&B>3*n.index)throw Error("potential infinite loop, way more iterations than matches") +;return M+=a,a.length}const w=_(e) +;if(!w)throw H(l.replace("{}",e)),Error('Unknown language: "'+e+'"') +;const N=T(w,{plugins:a});let y="",R=o||N;const k={},O=new g.__emitter(g);(()=>{ +const e=[];for(let t=R;t!==w;t=t.parent)t.className&&e.unshift(t.className) +;e.forEach((e=>O.openNode(e)))})();let M="",A=0,L=0,B=0,I=!1;try{ +for(R.matcher.considerAll();;){ +B++,I?I=!1:R.matcher.considerAll(),R.matcher.lastIndex=L +;const e=R.matcher.exec(c);if(!e)break;const t=v(c.substring(L,e.index),e) +;L=e.index+t}return v(c.substr(L)),O.closeAllNodes(),O.finalize(),y=O.toHTML(),{ +relevance:Math.floor(A),value:y,language:e,illegal:!1,emitter:O,top:R}}catch(t){ +if(t.message&&t.message.includes("Illegal"))return{illegal:!0,illegalBy:{ +msg:t.message,context:c.slice(L-100,L+100),mode:t.mode},sofar:y,relevance:0, +value:z(c),emitter:O};if(i)return{illegal:!1,relevance:0,value:z(c),emitter:O, +language:e,top:R,errorRaised:t};throw t}}function p(e,t){ +t=t||g.languages||Object.keys(n);const s=(e=>{const t={relevance:0, +emitter:new g.__emitter(g),value:z(e),illegal:!1,top:u} +;return t.emitter.addText(e),t})(e),r=t.filter(_).filter(O).map((t=>f(t,e,!1))) +;r.unshift(s);const a=r.sort(((e,t)=>{ +if(e.relevance!==t.relevance)return t.relevance-e.relevance +;if(e.language&&t.language){if(_(e.language).supersetOf===t.language)return 1 +;if(_(t.language).supersetOf===e.language)return-1}return 0})),[i,o]=a,l=i +;return l.second_best=o,l}const m={"before:highlightBlock":({block:e})=>{ +g.useBR&&(e.innerHTML=e.innerHTML.replace(/\n/g,"").replace(//g,"\n")) +},"after:highlightBlock":({result:e})=>{ +g.useBR&&(e.value=e.value.replace(/\n/g,"
    "))}},b=/^(<[^>]+>|\t)+/gm,x={ +"after:highlightBlock":({result:e})=>{ +g.tabReplace&&(e.value=e.value.replace(b,(e=>e.replace(/\t/g,g.tabReplace))))}} +;function E(e){let t=null;const n=(e=>{let t=e.className+" " +;t+=e.parentNode?e.parentNode.className:"";const n=g.languageDetectRe.exec(t) +;if(n){const t=_(n[1]) +;return t||(U(l.replace("{}",n[1])),U("Falling back to no-highlight mode for this block.",e)), +t?n[1]:"no-highlight"}return t.split(/\s+/).find((e=>d(e)||_(e)))})(e) +;if(d(n))return;M("before:highlightBlock",{block:e,language:n}),t=e +;const s=t.textContent,a=n?h(n,s,!0):p(s);M("after:highlightBlock",{block:e, +result:a,text:s}),e.innerHTML=a.value,((e,t,n)=>{const s=t?r[t]:n +;e.classList.add("hljs"),s&&e.classList.add(s)})(e,n,a.language),e.result={ +language:a.language,re:a.relevance,relavance:a.relevance +},a.second_best&&(e.second_best={language:a.second_best.language, +re:a.second_best.relevance,relavance:a.second_best.relevance})}const v=()=>{ +v.called||(v.called=!0, +$("10.6.0","initHighlighting() is deprecated. Use highlightAll() instead."), +document.querySelectorAll("pre code").forEach(E))};let w=!1,N=!1;function y(){ +N?document.querySelectorAll("pre code").forEach(E):w=!0}function _(e){ +return e=(e||"").toLowerCase(),n[e]||n[r[e]]}function k(e,{languageName:t}){ +"string"==typeof e&&(e=[e]),e.forEach((e=>{r[e]=t}))}function O(e){const t=_(e) +;return t&&!t.disableAutodetect}function M(e,t){const n=e;a.forEach((e=>{ +e[n]&&e[n](t)}))} +"undefined"!=typeof window&&window.addEventListener&&window.addEventListener("DOMContentLoaded",(()=>{ +N=!0,w&&y()}),!1),Object.assign(e,{highlight:h,highlightAuto:p,highlightAll:y, +fixMarkup:e=>{ +return $("10.2.0","fixMarkup will be removed entirely in v11.0"),$("10.2.0","Please see https://github.com/highlightjs/highlight.js/issues/2534"), +t=e, +g.tabReplace||g.useBR?t.replace(o,(e=>"\n"===e?g.useBR?"
    ":e:g.tabReplace?e.replace(/\t/g,g.tabReplace):e)):t +;var t},highlightBlock:E,configure:e=>{ +e.useBR&&($("10.3.0","'useBR' will be removed entirely in v11.0"), +$("10.3.0","Please see https://github.com/highlightjs/highlight.js/issues/2559")), +g=K(g,e)},initHighlighting:v,initHighlightingOnLoad:()=>{ +$("10.6.0","initHighlightingOnLoad() is deprecated. Use highlightAll() instead."), +w=!0},registerLanguage:(t,s)=>{let r=null;try{r=s(e)}catch(e){ +if(H("Language definition for '{}' could not be registered.".replace("{}",t)), +!i)throw e;H(e),r=u} +r.name||(r.name=t),n[t]=r,r.rawDefinition=s.bind(null,e),r.aliases&&k(r.aliases,{ +languageName:t})},listLanguages:()=>Object.keys(n),getLanguage:_, +registerAliases:k,requireLanguage:e=>{ +$("10.4.0","requireLanguage will be removed entirely in v11."), +$("10.4.0","Please see https://github.com/highlightjs/highlight.js/pull/2844") +;const t=_(e);if(t)return t +;throw Error("The '{}' language is required, but not loaded.".replace("{}",e))}, +autoDetection:O,inherit:K,addPlugin:e=>{a.push(e)},vuePlugin:S(e).VuePlugin +}),e.debugMode=()=>{i=!1},e.safeMode=()=>{i=!0},e.versionString="10.6.0" +;for(const e in R)"object"==typeof R[e]&&t(R[e]) +;return Object.assign(e,R),e.addPlugin(m),e.addPlugin(P),e.addPlugin(x),e})({}) +}();"object"==typeof exports&&"undefined"!=typeof module&&(module.exports=hljs); +hljs.registerLanguage("apache",(()=>{"use strict";return e=>{const n={ +className:"number",begin:/\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}(:\d{1,5})?/} +;return{name:"Apache config",aliases:["apacheconf"],case_insensitive:!0, +contains:[e.HASH_COMMENT_MODE,{className:"section",begin:/<\/?/,end:/>/, +contains:[n,{className:"number",begin:/:\d{1,5}/ +},e.inherit(e.QUOTE_STRING_MODE,{relevance:0})]},{className:"attribute", +begin:/\w+/,relevance:0,keywords:{ +nomarkup:"order deny allow setenv rewriterule rewriteengine rewritecond documentroot sethandler errordocument loadmodule options header listen serverroot servername" +},starts:{end:/$/,relevance:0,keywords:{literal:"on off all deny allow"}, +contains:[{className:"meta",begin:/\s\[/,end:/\]$/},{className:"variable", +begin:/[\$%]\{/,end:/\}/,contains:["self",{className:"number",begin:/[$%]\d+/}] +},n,{className:"number",begin:/\d+/},e.QUOTE_STRING_MODE]}}],illegal:/\S/}} +})()); +hljs.registerLanguage("bash",(()=>{"use strict";function e(...e){ +return e.map((e=>{return(s=e)?"string"==typeof s?s:s.source:null;var s +})).join("")}return s=>{const n={},t={begin:/\$\{/,end:/\}/,contains:["self",{ +begin:/:-/,contains:[n]}]};Object.assign(n,{className:"variable",variants:[{ +begin:e(/\$[\w\d#@][\w\d_]*/,"(?![\\w\\d])(?![$])")},t]});const a={ +className:"subst",begin:/\$\(/,end:/\)/,contains:[s.BACKSLASH_ESCAPE]},i={ +begin:/<<-?\s*(?=\w+)/,starts:{contains:[s.END_SAME_AS_BEGIN({begin:/(\w+)/, +end:/(\w+)/,className:"string"})]}},c={className:"string",begin:/"/,end:/"/, +contains:[s.BACKSLASH_ESCAPE,n,a]};a.contains.push(c);const o={begin:/\$\(\(/, +end:/\)\)/,contains:[{begin:/\d+#[0-9a-f]+/,className:"number"},s.NUMBER_MODE,n] +},r=s.SHEBANG({binary:"(fish|bash|zsh|sh|csh|ksh|tcsh|dash|scsh)",relevance:10 +}),l={className:"function",begin:/\w[\w\d_]*\s*\(\s*\)\s*\{/,returnBegin:!0, +contains:[s.inherit(s.TITLE_MODE,{begin:/\w[\w\d_]*/})],relevance:0};return{ +name:"Bash",aliases:["sh","zsh"],keywords:{$pattern:/\b[a-z._-]+\b/, +keyword:"if then else elif fi for while in do done case esac function", +literal:"true false", +built_in:"break cd continue eval exec exit export getopts hash pwd readonly return shift test times trap umask unset alias bind builtin caller command declare echo enable help let local logout mapfile printf read readarray source type typeset ulimit unalias set shopt autoload bg bindkey bye cap chdir clone comparguments compcall compctl compdescribe compfiles compgroups compquote comptags comptry compvalues dirs disable disown echotc echoti emulate fc fg float functions getcap getln history integer jobs kill limit log noglob popd print pushd pushln rehash sched setcap setopt stat suspend ttyctl unfunction unhash unlimit unsetopt vared wait whence where which zcompile zformat zftp zle zmodload zparseopts zprof zpty zregexparse zsocket zstyle ztcp" +},contains:[r,s.SHEBANG(),l,o,s.HASH_COMMENT_MODE,i,c,{className:"",begin:/\\"/ +},{className:"string",begin:/'/,end:/'/},n]}}})()); +hljs.registerLanguage("c",(()=>{"use strict";function e(e){ +return((...e)=>e.map((e=>(e=>e?"string"==typeof e?e:e.source:null)(e))).join(""))("(",e,")?") +}return t=>{const n=t.COMMENT("//","$",{contains:[{begin:/\\\n/}] +}),r="[a-zA-Z_]\\w*::",a="(decltype\\(auto\\)|"+e(r)+"[a-zA-Z_]\\w*"+e("<[^<>]+>")+")",i={ +className:"keyword",begin:"\\b[a-z\\d_]*_t\\b"},s={className:"string", +variants:[{begin:'(u8?|U|L)?"',end:'"',illegal:"\\n", +contains:[t.BACKSLASH_ESCAPE]},{ +begin:"(u8?|U|L)?'(\\\\(x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4,8}|[0-7]{3}|\\S)|.)", +end:"'",illegal:"."},t.END_SAME_AS_BEGIN({ +begin:/(?:u8?|U|L)?R"([^()\\ ]{0,16})\(/,end:/\)([^()\\ ]{0,16})"/})]},o={ +className:"number",variants:[{begin:"\\b(0b[01']+)"},{ +begin:"(-?)\\b([\\d']+(\\.[\\d']*)?|\\.[\\d']+)((ll|LL|l|L)(u|U)?|(u|U)(ll|LL|l|L)?|f|F|b|B)" +},{ +begin:"(-?)(\\b0[xX][a-fA-F0-9']+|(\\b[\\d']+(\\.[\\d']*)?|\\.[\\d']+)([eE][-+]?[\\d']+)?)" +}],relevance:0},c={className:"meta",begin:/#\s*[a-z]+\b/,end:/$/,keywords:{ +"meta-keyword":"if else elif endif define undef warning error line pragma _Pragma ifdef ifndef include" +},contains:[{begin:/\\\n/,relevance:0},t.inherit(s,{className:"meta-string"}),{ +className:"meta-string",begin:/<.*?>/,end:/$/,illegal:"\\n" +},n,t.C_BLOCK_COMMENT_MODE]},l={className:"title",begin:e(r)+t.IDENT_RE, +relevance:0},d=e(r)+t.IDENT_RE+"\\s*\\(",u={ +keyword:"int float while private char char8_t char16_t char32_t catch import module export virtual operator sizeof dynamic_cast|10 typedef const_cast|10 const for static_cast|10 union namespace unsigned long volatile static protected bool template mutable if public friend do goto auto void enum else break extern using asm case typeid wchar_t short reinterpret_cast|10 default double register explicit signed typename try this switch continue inline delete alignas alignof constexpr consteval constinit decltype concept co_await co_return co_yield requires noexcept static_assert thread_local restrict final override atomic_bool atomic_char atomic_schar atomic_uchar atomic_short atomic_ushort atomic_int atomic_uint atomic_long atomic_ulong atomic_llong atomic_ullong new throw return and and_eq bitand bitor compl not not_eq or or_eq xor xor_eq", +built_in:"std string wstring cin cout cerr clog stdin stdout stderr stringstream istringstream ostringstream auto_ptr deque list queue stack vector map set pair bitset multiset multimap unordered_set unordered_map unordered_multiset unordered_multimap priority_queue make_pair array shared_ptr abort terminate abs acos asin atan2 atan calloc ceil cosh cos exit exp fabs floor fmod fprintf fputs free frexp fscanf future isalnum isalpha iscntrl isdigit isgraph islower isprint ispunct isspace isupper isxdigit tolower toupper labs ldexp log10 log malloc realloc memchr memcmp memcpy memset modf pow printf putchar puts scanf sinh sin snprintf sprintf sqrt sscanf strcat strchr strcmp strcpy strcspn strlen strncat strncmp strncpy strpbrk strrchr strspn strstr tanh tan vfprintf vprintf vsprintf endl initializer_list unique_ptr _Bool complex _Complex imaginary _Imaginary", +literal:"true false nullptr NULL"},m=[c,i,n,t.C_BLOCK_COMMENT_MODE,o,s],p={ +variants:[{begin:/=/,end:/;/},{begin:/\(/,end:/\)/},{ +beginKeywords:"new throw return else",end:/;/}],keywords:u,contains:m.concat([{ +begin:/\(/,end:/\)/,keywords:u,contains:m.concat(["self"]),relevance:0}]), +relevance:0},_={className:"function",begin:"("+a+"[\\*&\\s]+)+"+d, +returnBegin:!0,end:/[{;=]/,excludeEnd:!0,keywords:u,illegal:/[^\w\s\*&:<>.]/, +contains:[{begin:"decltype\\(auto\\)",keywords:u,relevance:0},{begin:d, +returnBegin:!0,contains:[l],relevance:0},{className:"params",begin:/\(/, +end:/\)/,keywords:u,relevance:0,contains:[n,t.C_BLOCK_COMMENT_MODE,s,o,i,{ +begin:/\(/,end:/\)/,keywords:u,relevance:0, +contains:["self",n,t.C_BLOCK_COMMENT_MODE,s,o,i]}] +},i,n,t.C_BLOCK_COMMENT_MODE,c]};return{name:"C",aliases:["c","h"],keywords:u, +disableAutodetect:!0,illegal:"",keywords:u,contains:["self",i]},{begin:t.IDENT_RE+"::",keywords:u},{ +className:"class",beginKeywords:"enum class struct union",end:/[{;:<>=]/, +contains:[{beginKeywords:"final class struct"},t.TITLE_MODE]}]),exports:{ +preprocessor:c,strings:s,keywords:u}}}})()); +hljs.registerLanguage("coffeescript",(()=>{"use strict" +;const e=["as","in","of","if","for","while","finally","var","new","function","do","return","void","else","break","catch","instanceof","with","throw","case","default","try","switch","continue","typeof","delete","let","yield","const","class","debugger","async","await","static","import","from","export","extends"],n=["true","false","null","undefined","NaN","Infinity"],a=[].concat(["setInterval","setTimeout","clearInterval","clearTimeout","require","exports","eval","isFinite","isNaN","parseFloat","parseInt","decodeURI","decodeURIComponent","encodeURI","encodeURIComponent","escape","unescape"],["arguments","this","super","console","window","document","localStorage","module","global"],["Intl","DataView","Number","Math","Date","String","RegExp","Object","Function","Boolean","Error","Symbol","Set","Map","WeakSet","WeakMap","Proxy","Reflect","JSON","Promise","Float64Array","Int16Array","Int32Array","Int8Array","Uint16Array","Uint32Array","Float32Array","Array","Uint8Array","Uint8ClampedArray","ArrayBuffer"],["EvalError","InternalError","RangeError","ReferenceError","SyntaxError","TypeError","URIError"]) +;return r=>{const t={ +keyword:e.concat(["then","unless","until","loop","by","when","and","or","is","isnt","not"]).filter((i=["var","const","let","function","static"], +e=>!i.includes(e))),literal:n.concat(["yes","no","on","off"]), +built_in:a.concat(["npm","print"])};var i;const s="[A-Za-z$_][0-9A-Za-z$_]*",o={ +className:"subst",begin:/#\{/,end:/\}/,keywords:t +},c=[r.BINARY_NUMBER_MODE,r.inherit(r.C_NUMBER_MODE,{starts:{end:"(\\s*/)?", +relevance:0}}),{className:"string",variants:[{begin:/'''/,end:/'''/, +contains:[r.BACKSLASH_ESCAPE]},{begin:/'/,end:/'/,contains:[r.BACKSLASH_ESCAPE] +},{begin:/"""/,end:/"""/,contains:[r.BACKSLASH_ESCAPE,o]},{begin:/"/,end:/"/, +contains:[r.BACKSLASH_ESCAPE,o]}]},{className:"regexp",variants:[{begin:"///", +end:"///",contains:[o,r.HASH_COMMENT_MODE]},{begin:"//[gim]{0,3}(?=\\W)", +relevance:0},{begin:/\/(?![ *]).*?(?![\\]).\/[gim]{0,3}(?=\W)/}]},{begin:"@"+s +},{subLanguage:"javascript",excludeBegin:!0,excludeEnd:!0,variants:[{ +begin:"```",end:"```"},{begin:"`",end:"`"}]}];o.contains=c +;const l=r.inherit(r.TITLE_MODE,{begin:s}),d="(\\(.*\\)\\s*)?\\B[-=]>",g={ +className:"params",begin:"\\([^\\(]",returnBegin:!0,contains:[{begin:/\(/, +end:/\)/,keywords:t,contains:["self"].concat(c)}]};return{name:"CoffeeScript", +aliases:["coffee","cson","iced"],keywords:t,illegal:/\/\*/, +contains:c.concat([r.COMMENT("###","###"),r.HASH_COMMENT_MODE,{ +className:"function",begin:"^\\s*"+s+"\\s*=\\s*"+d,end:"[-=]>",returnBegin:!0, +contains:[l,g]},{begin:/[:\(,=]\s*/,relevance:0,contains:[{className:"function", +begin:d,end:"[-=]>",returnBegin:!0,contains:[g]}]},{className:"class", +beginKeywords:"class",end:"$",illegal:/[:="\[\]]/,contains:[{ +beginKeywords:"extends",endsWithParent:!0,illegal:/[:="\[\]]/,contains:[l]},l] +},{begin:s+":",end:":",returnBegin:!0,returnEnd:!0,relevance:0}])}}})()); +hljs.registerLanguage("cpp",(()=>{"use strict";function e(e){ +return((...e)=>e.map((e=>(e=>e?"string"==typeof e?e:e.source:null)(e))).join(""))("(",e,")?") +}return t=>{const n=t.COMMENT("//","$",{contains:[{begin:/\\\n/}] +}),r="[a-zA-Z_]\\w*::",a="(decltype\\(auto\\)|"+e(r)+"[a-zA-Z_]\\w*"+e("<[^<>]+>")+")",i={ +className:"keyword",begin:"\\b[a-z\\d_]*_t\\b"},s={className:"string", +variants:[{begin:'(u8?|U|L)?"',end:'"',illegal:"\\n", +contains:[t.BACKSLASH_ESCAPE]},{ +begin:"(u8?|U|L)?'(\\\\(x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4,8}|[0-7]{3}|\\S)|.)", +end:"'",illegal:"."},t.END_SAME_AS_BEGIN({ +begin:/(?:u8?|U|L)?R"([^()\\ ]{0,16})\(/,end:/\)([^()\\ ]{0,16})"/})]},o={ +className:"number",variants:[{begin:"\\b(0b[01']+)"},{ +begin:"(-?)\\b([\\d']+(\\.[\\d']*)?|\\.[\\d']+)((ll|LL|l|L)(u|U)?|(u|U)(ll|LL|l|L)?|f|F|b|B)" +},{ +begin:"(-?)(\\b0[xX][a-fA-F0-9']+|(\\b[\\d']+(\\.[\\d']*)?|\\.[\\d']+)([eE][-+]?[\\d']+)?)" +}],relevance:0},c={className:"meta",begin:/#\s*[a-z]+\b/,end:/$/,keywords:{ +"meta-keyword":"if else elif endif define undef warning error line pragma _Pragma ifdef ifndef include" +},contains:[{begin:/\\\n/,relevance:0},t.inherit(s,{className:"meta-string"}),{ +className:"meta-string",begin:/<.*?>/,end:/$/,illegal:"\\n" +},n,t.C_BLOCK_COMMENT_MODE]},l={className:"title",begin:e(r)+t.IDENT_RE, +relevance:0},d=e(r)+t.IDENT_RE+"\\s*\\(",u={ +keyword:"int float while private char char8_t char16_t char32_t catch import module export virtual operator sizeof dynamic_cast|10 typedef const_cast|10 const for static_cast|10 union namespace unsigned long volatile static protected bool template mutable if public friend do goto auto void enum else break extern using asm case typeid wchar_t short reinterpret_cast|10 default double register explicit signed typename try this switch continue inline delete alignas alignof constexpr consteval constinit decltype concept co_await co_return co_yield requires noexcept static_assert thread_local restrict final override atomic_bool atomic_char atomic_schar atomic_uchar atomic_short atomic_ushort atomic_int atomic_uint atomic_long atomic_ulong atomic_llong atomic_ullong new throw return and and_eq bitand bitor compl not not_eq or or_eq xor xor_eq", +built_in:"std string wstring cin cout cerr clog stdin stdout stderr stringstream istringstream ostringstream auto_ptr deque list queue stack vector map set pair bitset multiset multimap unordered_set unordered_map unordered_multiset unordered_multimap priority_queue make_pair array shared_ptr abort terminate abs acos asin atan2 atan calloc ceil cosh cos exit exp fabs floor fmod fprintf fputs free frexp fscanf future isalnum isalpha iscntrl isdigit isgraph islower isprint ispunct isspace isupper isxdigit tolower toupper labs ldexp log10 log malloc realloc memchr memcmp memcpy memset modf pow printf putchar puts scanf sinh sin snprintf sprintf sqrt sscanf strcat strchr strcmp strcpy strcspn strlen strncat strncmp strncpy strpbrk strrchr strspn strstr tanh tan vfprintf vprintf vsprintf endl initializer_list unique_ptr _Bool complex _Complex imaginary _Imaginary", +literal:"true false nullptr NULL"},m=[c,i,n,t.C_BLOCK_COMMENT_MODE,o,s],p={ +variants:[{begin:/=/,end:/;/},{begin:/\(/,end:/\)/},{ +beginKeywords:"new throw return else",end:/;/}],keywords:u,contains:m.concat([{ +begin:/\(/,end:/\)/,keywords:u,contains:m.concat(["self"]),relevance:0}]), +relevance:0},_={className:"function",begin:"("+a+"[\\*&\\s]+)+"+d, +returnBegin:!0,end:/[{;=]/,excludeEnd:!0,keywords:u,illegal:/[^\w\s\*&:<>.]/, +contains:[{begin:"decltype\\(auto\\)",keywords:u,relevance:0},{begin:d, +returnBegin:!0,contains:[l],relevance:0},{className:"params",begin:/\(/, +end:/\)/,keywords:u,relevance:0,contains:[n,t.C_BLOCK_COMMENT_MODE,s,o,i,{ +begin:/\(/,end:/\)/,keywords:u,relevance:0, +contains:["self",n,t.C_BLOCK_COMMENT_MODE,s,o,i]}] +},i,n,t.C_BLOCK_COMMENT_MODE,c]};return{name:"C++", +aliases:["cc","c++","h++","hpp","hh","hxx","cxx"],keywords:u,illegal:"",keywords:u,contains:["self",i]},{begin:t.IDENT_RE+"::",keywords:u},{ +className:"class",beginKeywords:"enum class struct union",end:/[{;:<>=]/, +contains:[{beginKeywords:"final class struct"},t.TITLE_MODE]}]),exports:{ +preprocessor:c,strings:s,keywords:u}}}})()); +hljs.registerLanguage("csharp",(()=>{"use strict";return e=>{var n={ +keyword:["abstract","as","base","break","case","class","const","continue","do","else","event","explicit","extern","finally","fixed","for","foreach","goto","if","implicit","in","interface","internal","is","lock","namespace","new","operator","out","override","params","private","protected","public","readonly","record","ref","return","sealed","sizeof","stackalloc","static","struct","switch","this","throw","try","typeof","unchecked","unsafe","using","virtual","void","volatile","while"].concat(["add","alias","and","ascending","async","await","by","descending","equals","from","get","global","group","init","into","join","let","nameof","not","notnull","on","or","orderby","partial","remove","select","set","unmanaged","value|0","var","when","where","with","yield"]), +built_in:["bool","byte","char","decimal","delegate","double","dynamic","enum","float","int","long","nint","nuint","object","sbyte","short","string","ulong","unit","ushort"], +literal:["default","false","null","true"]},a=e.inherit(e.TITLE_MODE,{ +begin:"[a-zA-Z](\\.?\\w)*"}),i={className:"number",variants:[{ +begin:"\\b(0b[01']+)"},{ +begin:"(-?)\\b([\\d']+(\\.[\\d']*)?|\\.[\\d']+)(u|U|l|L|ul|UL|f|F|b|B)"},{ +begin:"(-?)(\\b0[xX][a-fA-F0-9']+|(\\b[\\d']+(\\.[\\d']*)?|\\.[\\d']+)([eE][-+]?[\\d']+)?)" +}],relevance:0},s={className:"string",begin:'@"',end:'"',contains:[{begin:'""'}] +},t=e.inherit(s,{illegal:/\n/}),r={className:"subst",begin:/\{/,end:/\}/, +keywords:n},l=e.inherit(r,{illegal:/\n/}),c={className:"string",begin:/\$"/, +end:'"',illegal:/\n/,contains:[{begin:/\{\{/},{begin:/\}\}/ +},e.BACKSLASH_ESCAPE,l]},o={className:"string",begin:/\$@"/,end:'"',contains:[{ +begin:/\{\{/},{begin:/\}\}/},{begin:'""'},r]},d=e.inherit(o,{illegal:/\n/, +contains:[{begin:/\{\{/},{begin:/\}\}/},{begin:'""'},l]}) +;r.contains=[o,c,s,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,i,e.C_BLOCK_COMMENT_MODE], +l.contains=[d,c,t,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,i,e.inherit(e.C_BLOCK_COMMENT_MODE,{ +illegal:/\n/})];var g={variants:[o,c,s,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE] +},E={begin:"<",end:">",contains:[{beginKeywords:"in out"},a] +},_=e.IDENT_RE+"(<"+e.IDENT_RE+"(\\s*,\\s*"+e.IDENT_RE+")*>)?(\\[\\])?",b={ +begin:"@"+e.IDENT_RE,relevance:0};return{name:"C#",aliases:["cs","c#"], +keywords:n,illegal:/::/,contains:[e.COMMENT("///","$",{returnBegin:!0, +contains:[{className:"doctag",variants:[{begin:"///",relevance:0},{ +begin:"\x3c!--|--\x3e"},{begin:""}]}] +}),e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,{className:"meta",begin:"#", +end:"$",keywords:{ +"meta-keyword":"if else elif endif define undef warning error line region endregion pragma checksum" +}},g,i,{beginKeywords:"class interface",relevance:0,end:/[{;=]/, +illegal:/[^\s:,]/,contains:[{beginKeywords:"where class" +},a,E,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{beginKeywords:"namespace", +relevance:0,end:/[{;=]/,illegal:/[^\s:]/, +contains:[a,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{ +beginKeywords:"record",relevance:0,end:/[{;=]/,illegal:/[^\s:]/, +contains:[a,E,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{className:"meta", +begin:"^\\s*\\[",excludeBegin:!0,end:"\\]",excludeEnd:!0,contains:[{ +className:"meta-string",begin:/"/,end:/"/}]},{ +beginKeywords:"new return throw await else",relevance:0},{className:"function", +begin:"("+_+"\\s+)+"+e.IDENT_RE+"\\s*(<.+>\\s*)?\\(",returnBegin:!0, +end:/\s*[{;=]/,excludeEnd:!0,keywords:n,contains:[{ +beginKeywords:"public private protected static internal protected abstract async extern override unsafe virtual new sealed partial", +relevance:0},{begin:e.IDENT_RE+"\\s*(<.+>\\s*)?\\(",returnBegin:!0, +contains:[e.TITLE_MODE,E],relevance:0},{className:"params",begin:/\(/,end:/\)/, +excludeBegin:!0,excludeEnd:!0,keywords:n,relevance:0, +contains:[g,i,e.C_BLOCK_COMMENT_MODE] +},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},b]}}})()); +hljs.registerLanguage("css",(()=>{"use strict" +;const e=["a","abbr","address","article","aside","audio","b","blockquote","body","button","canvas","caption","cite","code","dd","del","details","dfn","div","dl","dt","em","fieldset","figcaption","figure","footer","form","h1","h2","h3","h4","h5","h6","header","hgroup","html","i","iframe","img","input","ins","kbd","label","legend","li","main","mark","menu","nav","object","ol","p","q","quote","samp","section","span","strong","summary","sup","table","tbody","td","textarea","tfoot","th","thead","time","tr","ul","var","video"],t=["any-hover","any-pointer","aspect-ratio","color","color-gamut","color-index","device-aspect-ratio","device-height","device-width","display-mode","forced-colors","grid","height","hover","inverted-colors","monochrome","orientation","overflow-block","overflow-inline","pointer","prefers-color-scheme","prefers-contrast","prefers-reduced-motion","prefers-reduced-transparency","resolution","scan","scripting","update","width","min-width","max-width","min-height","max-height"],i=["active","any-link","blank","checked","current","default","defined","dir","disabled","drop","empty","enabled","first","first-child","first-of-type","fullscreen","future","focus","focus-visible","focus-within","has","host","host-context","hover","indeterminate","in-range","invalid","is","lang","last-child","last-of-type","left","link","local-link","not","nth-child","nth-col","nth-last-child","nth-last-col","nth-last-of-type","nth-of-type","only-child","only-of-type","optional","out-of-range","past","placeholder-shown","read-only","read-write","required","right","root","scope","target","target-within","user-invalid","valid","visited","where"],o=["after","backdrop","before","cue","cue-region","first-letter","first-line","grammar-error","marker","part","placeholder","selection","slotted","spelling-error"],r=["align-content","align-items","align-self","animation","animation-delay","animation-direction","animation-duration","animation-fill-mode","animation-iteration-count","animation-name","animation-play-state","animation-timing-function","auto","backface-visibility","background","background-attachment","background-clip","background-color","background-image","background-origin","background-position","background-repeat","background-size","border","border-bottom","border-bottom-color","border-bottom-left-radius","border-bottom-right-radius","border-bottom-style","border-bottom-width","border-collapse","border-color","border-image","border-image-outset","border-image-repeat","border-image-slice","border-image-source","border-image-width","border-left","border-left-color","border-left-style","border-left-width","border-radius","border-right","border-right-color","border-right-style","border-right-width","border-spacing","border-style","border-top","border-top-color","border-top-left-radius","border-top-right-radius","border-top-style","border-top-width","border-width","bottom","box-decoration-break","box-shadow","box-sizing","break-after","break-before","break-inside","caption-side","clear","clip","clip-path","color","column-count","column-fill","column-gap","column-rule","column-rule-color","column-rule-style","column-rule-width","column-span","column-width","columns","content","counter-increment","counter-reset","cursor","direction","display","empty-cells","filter","flex","flex-basis","flex-direction","flex-flow","flex-grow","flex-shrink","flex-wrap","float","font","font-display","font-family","font-feature-settings","font-kerning","font-language-override","font-size","font-size-adjust","font-stretch","font-style","font-variant","font-variant-ligatures","font-variation-settings","font-weight","height","hyphens","icon","image-orientation","image-rendering","image-resolution","ime-mode","inherit","initial","justify-content","left","letter-spacing","line-height","list-style","list-style-image","list-style-position","list-style-type","margin","margin-bottom","margin-left","margin-right","margin-top","marks","mask","max-height","max-width","min-height","min-width","nav-down","nav-index","nav-left","nav-right","nav-up","none","normal","object-fit","object-position","opacity","order","orphans","outline","outline-color","outline-offset","outline-style","outline-width","overflow","overflow-wrap","overflow-x","overflow-y","padding","padding-bottom","padding-left","padding-right","padding-top","page-break-after","page-break-before","page-break-inside","perspective","perspective-origin","pointer-events","position","quotes","resize","right","src","tab-size","table-layout","text-align","text-align-last","text-decoration","text-decoration-color","text-decoration-line","text-decoration-style","text-indent","text-overflow","text-rendering","text-shadow","text-transform","text-underline-position","top","transform","transform-origin","transform-style","transition","transition-delay","transition-duration","transition-property","transition-timing-function","unicode-bidi","vertical-align","visibility","white-space","widows","width","word-break","word-spacing","word-wrap","z-index"].reverse() +;return n=>{const a=(e=>({IMPORTANT:{className:"meta",begin:"!important"}, +HEXCOLOR:{className:"number",begin:"#([a-fA-F0-9]{6}|[a-fA-F0-9]{3})"}, +ATTRIBUTE_SELECTOR_MODE:{className:"selector-attr",begin:/\[/,end:/\]/, +illegal:"$",contains:[e.APOS_STRING_MODE,e.QUOTE_STRING_MODE]} +}))(n),l=[n.APOS_STRING_MODE,n.QUOTE_STRING_MODE];return{name:"CSS", +case_insensitive:!0,illegal:/[=|'\$]/,keywords:{keyframePosition:"from to"}, +classNameAliases:{keyframePosition:"selector-tag"}, +contains:[n.C_BLOCK_COMMENT_MODE,{begin:/-(webkit|moz|ms|o)-(?=[a-z])/ +},n.CSS_NUMBER_MODE,{className:"selector-id",begin:/#[A-Za-z0-9_-]+/,relevance:0 +},{className:"selector-class",begin:"\\.[a-zA-Z-][a-zA-Z0-9_-]*",relevance:0 +},a.ATTRIBUTE_SELECTOR_MODE,{className:"selector-pseudo",variants:[{ +begin:":("+i.join("|")+")"},{begin:"::("+o.join("|")+")"}]},{ +className:"attribute",begin:"\\b("+r.join("|")+")\\b"},{begin:":",end:"[;}]", +contains:[a.HEXCOLOR,a.IMPORTANT,n.CSS_NUMBER_MODE,...l,{ +begin:/(url|data-uri)\(/,end:/\)/,relevance:0,keywords:{built_in:"url data-uri" +},contains:[{className:"string",begin:/[^)]/,endsWithParent:!0,excludeEnd:!0}] +},{className:"built_in",begin:/[\w-]+(?=\()/}]},{ +begin:(s=/@/,((...e)=>e.map((e=>(e=>e?"string"==typeof e?e:e.source:null)(e))).join(""))("(?=",s,")")), +end:"[{;]",relevance:0,illegal:/:/,contains:[{className:"keyword", +begin:/@-?\w[\w]*(-\w+)*/},{begin:/\s/,endsWithParent:!0,excludeEnd:!0, +relevance:0,keywords:{$pattern:/[a-z-]+/,keyword:"and or not only", +attribute:t.join(" ")},contains:[{begin:/[a-z-]+(?=:)/,className:"attribute" +},...l,n.CSS_NUMBER_MODE]}]},{className:"selector-tag", +begin:"\\b("+e.join("|")+")\\b"}]};var s}})()); +hljs.registerLanguage("diff",(()=>{"use strict";return e=>({name:"Diff", +aliases:["patch"],contains:[{className:"meta",relevance:10,variants:[{ +begin:/^@@ +-\d+,\d+ +\+\d+,\d+ +@@/},{begin:/^\*\*\* +\d+,\d+ +\*\*\*\*$/},{ +begin:/^--- +\d+,\d+ +----$/}]},{className:"comment",variants:[{begin:/Index: /, +end:/$/},{begin:/^index/,end:/$/},{begin:/={3,}/,end:/$/},{begin:/^-{3}/,end:/$/ +},{begin:/^\*{3} /,end:/$/},{begin:/^\+{3}/,end:/$/},{begin:/^\*{15}$/},{ +begin:/^diff --git/,end:/$/}]},{className:"addition",begin:/^\+/,end:/$/},{ +className:"deletion",begin:/^-/,end:/$/},{className:"addition",begin:/^!/, +end:/$/}]})})()); +hljs.registerLanguage("go",(()=>{"use strict";return e=>{const n={ +keyword:"break default func interface select case map struct chan else goto package switch const fallthrough if range type continue for import return var go defer bool byte complex64 complex128 float32 float64 int8 int16 int32 int64 string uint8 uint16 uint32 uint64 int uint uintptr rune", +literal:"true false iota nil", +built_in:"append cap close complex copy imag len make new panic print println real recover delete" +};return{name:"Go",aliases:["golang"],keywords:n,illegal:"{"use strict";function e(...e){ +return e.map((e=>{return(n=e)?"string"==typeof n?n:n.source:null;var n +})).join("")}return n=>{const a="HTTP/(2|1\\.[01])",s=[{className:"attribute", +begin:e("^",/[A-Za-z][A-Za-z0-9-]*/,"(?=\\:\\s)"),starts:{contains:[{ +className:"punctuation",begin:/: /,relevance:0,starts:{end:"$",relevance:0}}]} +},{begin:"\\n\\n",starts:{subLanguage:[],endsWithParent:!0}}];return{ +name:"HTTP",aliases:["https"],illegal:/\S/,contains:[{begin:"^(?="+a+" \\d{3})", +end:/$/,contains:[{className:"meta",begin:a},{className:"number", +begin:"\\b\\d{3}\\b"}],starts:{end:/\b\B/,illegal:/\S/,contains:s}},{ +begin:"(?=^[A-Z]+ (.*?) "+a+"$)",end:/$/,contains:[{className:"string", +begin:" ",end:" ",excludeBegin:!0,excludeEnd:!0},{className:"meta",begin:a},{ +className:"keyword",begin:"[A-Z]+"}],starts:{end:/\b\B/,illegal:/\S/,contains:s} +}]}}})()); +hljs.registerLanguage("ini",(()=>{"use strict";function e(e){ +return e?"string"==typeof e?e:e.source:null}function n(...n){ +return n.map((n=>e(n))).join("")}return s=>{const a={className:"number", +relevance:0,variants:[{begin:/([+-]+)?[\d]+_[\d_]+/},{begin:s.NUMBER_RE}] +},i=s.COMMENT();i.variants=[{begin:/;/,end:/$/},{begin:/#/,end:/$/}];const t={ +className:"variable",variants:[{begin:/\$[\w\d"][\w\d_]*/},{begin:/\$\{(.*?)\}/ +}]},r={className:"literal",begin:/\bon|off|true|false|yes|no\b/},l={ +className:"string",contains:[s.BACKSLASH_ESCAPE],variants:[{begin:"'''", +end:"'''",relevance:10},{begin:'"""',end:'"""',relevance:10},{begin:'"',end:'"' +},{begin:"'",end:"'"}]},c={begin:/\[/,end:/\]/,contains:[i,r,t,l,a,"self"], +relevance:0 +},g="("+[/[A-Za-z0-9_-]+/,/"(\\"|[^"])*"/,/'[^']*'/].map((n=>e(n))).join("|")+")" +;return{name:"TOML, also INI",aliases:["toml"],case_insensitive:!0,illegal:/\S/, +contains:[i,{className:"section",begin:/\[+/,end:/\]+/},{ +begin:n(g,"(\\s*\\.\\s*",g,")*",n("(?=",/\s*=\s*[^#\s]/,")")),className:"attr", +starts:{end:/$/,contains:[i,c,r,t,l,a]}}]}}})()); +hljs.registerLanguage("java",(()=>{"use strict" +;var e="\\.([0-9](_*[0-9])*)",n="[0-9a-fA-F](_*[0-9a-fA-F])*",a={ +className:"number",variants:[{ +begin:`(\\b([0-9](_*[0-9])*)((${e})|\\.)?|(${e}))[eE][+-]?([0-9](_*[0-9])*)[fFdD]?\\b` +},{begin:`\\b([0-9](_*[0-9])*)((${e})[fFdD]?\\b|\\.([fFdD]\\b)?)`},{ +begin:`(${e})[fFdD]?\\b`},{begin:"\\b([0-9](_*[0-9])*)[fFdD]\\b"},{ +begin:`\\b0[xX]((${n})\\.?|(${n})?\\.(${n}))[pP][+-]?([0-9](_*[0-9])*)[fFdD]?\\b` +},{begin:"\\b(0|[1-9](_*[0-9])*)[lL]?\\b"},{begin:`\\b0[xX](${n})[lL]?\\b`},{ +begin:"\\b0(_*[0-7])*[lL]?\\b"},{begin:"\\b0[bB][01](_*[01])*[lL]?\\b"}], +relevance:0};return e=>{ +var n="false synchronized int abstract float private char boolean var static null if const for true while long strictfp finally protected import native final void enum else break transient catch instanceof byte super volatile case assert short package default double public try this switch continue throws protected public private module requires exports do",s={ +className:"meta",begin:"@[\xc0-\u02b8a-zA-Z_$][\xc0-\u02b8a-zA-Z_$0-9]*", +contains:[{begin:/\(/,end:/\)/,contains:["self"]}]};const r=a;return{ +name:"Java",aliases:["jsp"],keywords:n,illegal:/<\/|#/, +contains:[e.COMMENT("/\\*\\*","\\*/",{relevance:0,contains:[{begin:/\w+@/, +relevance:0},{className:"doctag",begin:"@[A-Za-z]+"}]}),{ +begin:/import java\.[a-z]+\./,keywords:"import",relevance:2 +},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,{ +className:"class",beginKeywords:"class interface enum",end:/[{;=]/, +excludeEnd:!0,relevance:1,keywords:"class interface enum",illegal:/[:"\[\]]/, +contains:[{beginKeywords:"extends implements"},e.UNDERSCORE_TITLE_MODE]},{ +beginKeywords:"new throw return else",relevance:0},{className:"class", +begin:"record\\s+"+e.UNDERSCORE_IDENT_RE+"\\s*\\(",returnBegin:!0,excludeEnd:!0, +end:/[{;=]/,keywords:n,contains:[{beginKeywords:"record"},{ +begin:e.UNDERSCORE_IDENT_RE+"\\s*\\(",returnBegin:!0,relevance:0, +contains:[e.UNDERSCORE_TITLE_MODE]},{className:"params",begin:/\(/,end:/\)/, +keywords:n,relevance:0,contains:[e.C_BLOCK_COMMENT_MODE] +},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{className:"function", +begin:"([\xc0-\u02b8a-zA-Z_$][\xc0-\u02b8a-zA-Z_$0-9]*(<[\xc0-\u02b8a-zA-Z_$][\xc0-\u02b8a-zA-Z_$0-9]*(\\s*,\\s*[\xc0-\u02b8a-zA-Z_$][\xc0-\u02b8a-zA-Z_$0-9]*)*>)?\\s+)+"+e.UNDERSCORE_IDENT_RE+"\\s*\\(", +returnBegin:!0,end:/[{;=]/,excludeEnd:!0,keywords:n,contains:[{ +begin:e.UNDERSCORE_IDENT_RE+"\\s*\\(",returnBegin:!0,relevance:0, +contains:[e.UNDERSCORE_TITLE_MODE]},{className:"params",begin:/\(/,end:/\)/, +keywords:n,relevance:0, +contains:[s,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,r,e.C_BLOCK_COMMENT_MODE] +},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},r,s]}}})()); +hljs.registerLanguage("javascript",(()=>{"use strict" +;const e="[A-Za-z$_][0-9A-Za-z$_]*",n=["as","in","of","if","for","while","finally","var","new","function","do","return","void","else","break","catch","instanceof","with","throw","case","default","try","switch","continue","typeof","delete","let","yield","const","class","debugger","async","await","static","import","from","export","extends"],a=["true","false","null","undefined","NaN","Infinity"],s=[].concat(["setInterval","setTimeout","clearInterval","clearTimeout","require","exports","eval","isFinite","isNaN","parseFloat","parseInt","decodeURI","decodeURIComponent","encodeURI","encodeURIComponent","escape","unescape"],["arguments","this","super","console","window","document","localStorage","module","global"],["Intl","DataView","Number","Math","Date","String","RegExp","Object","Function","Boolean","Error","Symbol","Set","Map","WeakSet","WeakMap","Proxy","Reflect","JSON","Promise","Float64Array","Int16Array","Int32Array","Int8Array","Uint16Array","Uint32Array","Float32Array","Array","Uint8Array","Uint8ClampedArray","ArrayBuffer"],["EvalError","InternalError","RangeError","ReferenceError","SyntaxError","TypeError","URIError"]) +;function r(e){return t("(?=",e,")")}function t(...e){return e.map((e=>{ +return(n=e)?"string"==typeof n?n:n.source:null;var n})).join("")}return i=>{ +const c=e,o={begin:/<[A-Za-z0-9\\._:-]+/,end:/\/[A-Za-z0-9\\._:-]+>|\/>/, +isTrulyOpeningTag:(e,n)=>{const a=e[0].length+e.index,s=e.input[a] +;"<"!==s?">"===s&&(((e,{after:n})=>{const a="", +returnBegin:!0,end:"\\s*=>",contains:[{className:"params",variants:[{ +begin:i.UNDERSCORE_IDENT_RE,relevance:0},{className:null,begin:/\(\s*\)/,skip:!0 +},{begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,keywords:l,contains:A}]}] +},{begin:/,/,relevance:0},{className:"",begin:/\s/,end:/\s*/,skip:!0},{ +variants:[{begin:"<>",end:""},{begin:o.begin,"on:begin":o.isTrulyOpeningTag, +end:o.end}],subLanguage:"xml",contains:[{begin:o.begin,end:o.end,skip:!0, +contains:["self"]}]}],relevance:0},{className:"function", +beginKeywords:"function",end:/[{;]/,excludeEnd:!0,keywords:l, +contains:["self",i.inherit(i.TITLE_MODE,{begin:c}),p],illegal:/%/},{ +beginKeywords:"while if switch catch for"},{className:"function", +begin:i.UNDERSCORE_IDENT_RE+"\\([^()]*(\\([^()]*(\\([^()]*\\)[^()]*)*\\)[^()]*)*\\)\\s*\\{", +returnBegin:!0,contains:[p,i.inherit(i.TITLE_MODE,{begin:c})]},{variants:[{ +begin:"\\."+c},{begin:"\\$"+c}],relevance:0},{className:"class", +beginKeywords:"class",end:/[{;=]/,excludeEnd:!0,illegal:/[:"[\]]/,contains:[{ +beginKeywords:"extends"},i.UNDERSCORE_TITLE_MODE]},{begin:/\b(?=constructor)/, +end:/[{;]/,excludeEnd:!0,contains:[i.inherit(i.TITLE_MODE,{begin:c}),"self",p] +},{begin:"(get|set)\\s+(?="+c+"\\()",end:/\{/,keywords:"get set", +contains:[i.inherit(i.TITLE_MODE,{begin:c}),{begin:/\(\)/},p]},{begin:/\$[(.]/}] +}}})()); +hljs.registerLanguage("json",(()=>{"use strict";return n=>{const e={ +literal:"true false null" +},i=[n.C_LINE_COMMENT_MODE,n.C_BLOCK_COMMENT_MODE],a=[n.QUOTE_STRING_MODE,n.C_NUMBER_MODE],l={ +end:",",endsWithParent:!0,excludeEnd:!0,contains:a,keywords:e},t={begin:/\{/, +end:/\}/,contains:[{className:"attr",begin:/"/,end:/"/, +contains:[n.BACKSLASH_ESCAPE],illegal:"\\n"},n.inherit(l,{begin:/:/ +})].concat(i),illegal:"\\S"},s={begin:"\\[",end:"\\]",contains:[n.inherit(l)], +illegal:"\\S"};return a.push(t,s),i.forEach((n=>{a.push(n)})),{name:"JSON", +contains:a,keywords:e,illegal:"\\S"}}})()); +hljs.registerLanguage("kotlin",(()=>{"use strict" +;var e="\\.([0-9](_*[0-9])*)",n="[0-9a-fA-F](_*[0-9a-fA-F])*",a={ +className:"number",variants:[{ +begin:`(\\b([0-9](_*[0-9])*)((${e})|\\.)?|(${e}))[eE][+-]?([0-9](_*[0-9])*)[fFdD]?\\b` +},{begin:`\\b([0-9](_*[0-9])*)((${e})[fFdD]?\\b|\\.([fFdD]\\b)?)`},{ +begin:`(${e})[fFdD]?\\b`},{begin:"\\b([0-9](_*[0-9])*)[fFdD]\\b"},{ +begin:`\\b0[xX]((${n})\\.?|(${n})?\\.(${n}))[pP][+-]?([0-9](_*[0-9])*)[fFdD]?\\b` +},{begin:"\\b(0|[1-9](_*[0-9])*)[lL]?\\b"},{begin:`\\b0[xX](${n})[lL]?\\b`},{ +begin:"\\b0(_*[0-7])*[lL]?\\b"},{begin:"\\b0[bB][01](_*[01])*[lL]?\\b"}], +relevance:0};return e=>{const n={ +keyword:"abstract as val var vararg get set class object open private protected public noinline crossinline dynamic final enum if else do while for when throw try catch finally import package is in fun override companion reified inline lateinit init interface annotation data sealed internal infix operator out by constructor super tailrec where const inner suspend typealias external expect actual", +built_in:"Byte Short Char Int Long Boolean Float Double Void Unit Nothing", +literal:"true false null"},i={className:"symbol",begin:e.UNDERSCORE_IDENT_RE+"@" +},s={className:"subst",begin:/\$\{/,end:/\}/,contains:[e.C_NUMBER_MODE]},t={ +className:"variable",begin:"\\$"+e.UNDERSCORE_IDENT_RE},r={className:"string", +variants:[{begin:'"""',end:'"""(?=[^"])',contains:[t,s]},{begin:"'",end:"'", +illegal:/\n/,contains:[e.BACKSLASH_ESCAPE]},{begin:'"',end:'"',illegal:/\n/, +contains:[e.BACKSLASH_ESCAPE,t,s]}]};s.contains.push(r);const l={ +className:"meta", +begin:"@(?:file|property|field|get|set|receiver|param|setparam|delegate)\\s*:(?:\\s*"+e.UNDERSCORE_IDENT_RE+")?" +},c={className:"meta",begin:"@"+e.UNDERSCORE_IDENT_RE,contains:[{begin:/\(/, +end:/\)/,contains:[e.inherit(r,{className:"meta-string"})]}] +},o=a,b=e.COMMENT("/\\*","\\*/",{contains:[e.C_BLOCK_COMMENT_MODE]}),E={ +variants:[{className:"type",begin:e.UNDERSCORE_IDENT_RE},{begin:/\(/,end:/\)/, +contains:[]}]},d=E;return d.variants[1].contains=[E],E.variants[1].contains=[d], +{name:"Kotlin",aliases:["kt"],keywords:n,contains:[e.COMMENT("/\\*\\*","\\*/",{ +relevance:0,contains:[{className:"doctag",begin:"@[A-Za-z]+"}] +}),e.C_LINE_COMMENT_MODE,b,{className:"keyword", +begin:/\b(break|continue|return|this)\b/,starts:{contains:[{className:"symbol", +begin:/@\w+/}]}},i,l,c,{className:"function",beginKeywords:"fun",end:"[(]|$", +returnBegin:!0,excludeEnd:!0,keywords:n,relevance:5,contains:[{ +begin:e.UNDERSCORE_IDENT_RE+"\\s*\\(",returnBegin:!0,relevance:0, +contains:[e.UNDERSCORE_TITLE_MODE]},{className:"type",begin://, +keywords:"reified",relevance:0},{className:"params",begin:/\(/,end:/\)/, +endsParent:!0,keywords:n,relevance:0,contains:[{begin:/:/,end:/[=,\/]/, +endsWithParent:!0,contains:[E,e.C_LINE_COMMENT_MODE,b],relevance:0 +},e.C_LINE_COMMENT_MODE,b,l,c,r,e.C_NUMBER_MODE]},b]},{className:"class", +beginKeywords:"class interface trait",end:/[:\{(]|$/,excludeEnd:!0, +illegal:"extends implements",contains:[{ +beginKeywords:"public protected internal private constructor" +},e.UNDERSCORE_TITLE_MODE,{className:"type",begin://,excludeBegin:!0, +excludeEnd:!0,relevance:0},{className:"type",begin:/[,:]\s*/,end:/[<\(,]|$/, +excludeBegin:!0,returnEnd:!0},l,c]},r,{className:"meta",begin:"^#!/usr/bin/env", +end:"$",illegal:"\n"},o]}}})()); +hljs.registerLanguage("less",(()=>{"use strict" +;const e=["a","abbr","address","article","aside","audio","b","blockquote","body","button","canvas","caption","cite","code","dd","del","details","dfn","div","dl","dt","em","fieldset","figcaption","figure","footer","form","h1","h2","h3","h4","h5","h6","header","hgroup","html","i","iframe","img","input","ins","kbd","label","legend","li","main","mark","menu","nav","object","ol","p","q","quote","samp","section","span","strong","summary","sup","table","tbody","td","textarea","tfoot","th","thead","time","tr","ul","var","video"],t=["any-hover","any-pointer","aspect-ratio","color","color-gamut","color-index","device-aspect-ratio","device-height","device-width","display-mode","forced-colors","grid","height","hover","inverted-colors","monochrome","orientation","overflow-block","overflow-inline","pointer","prefers-color-scheme","prefers-contrast","prefers-reduced-motion","prefers-reduced-transparency","resolution","scan","scripting","update","width","min-width","max-width","min-height","max-height"],i=["active","any-link","blank","checked","current","default","defined","dir","disabled","drop","empty","enabled","first","first-child","first-of-type","fullscreen","future","focus","focus-visible","focus-within","has","host","host-context","hover","indeterminate","in-range","invalid","is","lang","last-child","last-of-type","left","link","local-link","not","nth-child","nth-col","nth-last-child","nth-last-col","nth-last-of-type","nth-of-type","only-child","only-of-type","optional","out-of-range","past","placeholder-shown","read-only","read-write","required","right","root","scope","target","target-within","user-invalid","valid","visited","where"],o=["after","backdrop","before","cue","cue-region","first-letter","first-line","grammar-error","marker","part","placeholder","selection","slotted","spelling-error"],n=["align-content","align-items","align-self","animation","animation-delay","animation-direction","animation-duration","animation-fill-mode","animation-iteration-count","animation-name","animation-play-state","animation-timing-function","auto","backface-visibility","background","background-attachment","background-clip","background-color","background-image","background-origin","background-position","background-repeat","background-size","border","border-bottom","border-bottom-color","border-bottom-left-radius","border-bottom-right-radius","border-bottom-style","border-bottom-width","border-collapse","border-color","border-image","border-image-outset","border-image-repeat","border-image-slice","border-image-source","border-image-width","border-left","border-left-color","border-left-style","border-left-width","border-radius","border-right","border-right-color","border-right-style","border-right-width","border-spacing","border-style","border-top","border-top-color","border-top-left-radius","border-top-right-radius","border-top-style","border-top-width","border-width","bottom","box-decoration-break","box-shadow","box-sizing","break-after","break-before","break-inside","caption-side","clear","clip","clip-path","color","column-count","column-fill","column-gap","column-rule","column-rule-color","column-rule-style","column-rule-width","column-span","column-width","columns","content","counter-increment","counter-reset","cursor","direction","display","empty-cells","filter","flex","flex-basis","flex-direction","flex-flow","flex-grow","flex-shrink","flex-wrap","float","font","font-display","font-family","font-feature-settings","font-kerning","font-language-override","font-size","font-size-adjust","font-stretch","font-style","font-variant","font-variant-ligatures","font-variation-settings","font-weight","height","hyphens","icon","image-orientation","image-rendering","image-resolution","ime-mode","inherit","initial","justify-content","left","letter-spacing","line-height","list-style","list-style-image","list-style-position","list-style-type","margin","margin-bottom","margin-left","margin-right","margin-top","marks","mask","max-height","max-width","min-height","min-width","nav-down","nav-index","nav-left","nav-right","nav-up","none","normal","object-fit","object-position","opacity","order","orphans","outline","outline-color","outline-offset","outline-style","outline-width","overflow","overflow-wrap","overflow-x","overflow-y","padding","padding-bottom","padding-left","padding-right","padding-top","page-break-after","page-break-before","page-break-inside","perspective","perspective-origin","pointer-events","position","quotes","resize","right","src","tab-size","table-layout","text-align","text-align-last","text-decoration","text-decoration-color","text-decoration-line","text-decoration-style","text-indent","text-overflow","text-rendering","text-shadow","text-transform","text-underline-position","top","transform","transform-origin","transform-style","transition","transition-delay","transition-duration","transition-property","transition-timing-function","unicode-bidi","vertical-align","visibility","white-space","widows","width","word-break","word-spacing","word-wrap","z-index"].reverse(),r=i.concat(o) +;return a=>{const s=(e=>({IMPORTANT:{className:"meta",begin:"!important"}, +HEXCOLOR:{className:"number",begin:"#([a-fA-F0-9]{6}|[a-fA-F0-9]{3})"}, +ATTRIBUTE_SELECTOR_MODE:{className:"selector-attr",begin:/\[/,end:/\]/, +illegal:"$",contains:[e.APOS_STRING_MODE,e.QUOTE_STRING_MODE]} +}))(a),l=r,d="([\\w-]+|@\\{[\\w-]+\\})",c=[],g=[],b=e=>({className:"string", +begin:"~?"+e+".*?"+e}),m=(e,t,i)=>({className:e,begin:t,relevance:i}),u={ +$pattern:/[a-z-]+/,keyword:"and or not only",attribute:t.join(" ")},p={ +begin:"\\(",end:"\\)",contains:g,keywords:u,relevance:0} +;g.push(a.C_LINE_COMMENT_MODE,a.C_BLOCK_COMMENT_MODE,b("'"),b('"'),a.CSS_NUMBER_MODE,{ +begin:"(url|data-uri)\\(",starts:{className:"string",end:"[\\)\\n]", +excludeEnd:!0} +},s.HEXCOLOR,p,m("variable","@@?[\\w-]+",10),m("variable","@\\{[\\w-]+\\}"),m("built_in","~?`[^`]*?`"),{ +className:"attribute",begin:"[\\w-]+\\s*:",end:":",returnBegin:!0,excludeEnd:!0 +},s.IMPORTANT);const f=g.concat({begin:/\{/,end:/\}/,contains:c}),h={ +beginKeywords:"when",endsWithParent:!0,contains:[{beginKeywords:"and not" +}].concat(g)},w={begin:d+"\\s*:",returnBegin:!0,end:/[;}]/,relevance:0, +contains:[{begin:/-(webkit|moz|ms|o)-/},{className:"attribute", +begin:"\\b("+n.join("|")+")\\b",end:/(?=:)/,starts:{endsWithParent:!0, +illegal:"[<=$]",relevance:0,contains:g}}]},v={className:"keyword", +begin:"@(import|media|charset|font-face|(-[a-z]+-)?keyframes|supports|document|namespace|page|viewport|host)\\b", +starts:{end:"[;{}]",keywords:u,returnEnd:!0,contains:g,relevance:0}},y={ +className:"variable",variants:[{begin:"@[\\w-]+\\s*:",relevance:15},{ +begin:"@[\\w-]+"}],starts:{end:"[;}]",returnEnd:!0,contains:f}},k={variants:[{ +begin:"[\\.#:&\\[>]",end:"[;{}]"},{begin:d,end:/\{/}],returnBegin:!0, +returnEnd:!0,illegal:"[<='$\"]",relevance:0, +contains:[a.C_LINE_COMMENT_MODE,a.C_BLOCK_COMMENT_MODE,h,m("keyword","all\\b"),m("variable","@\\{[\\w-]+\\}"),{ +begin:"\\b("+e.join("|")+")\\b",className:"selector-tag" +},m("selector-tag",d+"%?",0),m("selector-id","#"+d),m("selector-class","\\."+d,0),m("selector-tag","&",0),s.ATTRIBUTE_SELECTOR_MODE,{ +className:"selector-pseudo",begin:":("+i.join("|")+")"},{ +className:"selector-pseudo",begin:"::("+o.join("|")+")"},{begin:"\\(",end:"\\)", +contains:f},{begin:"!important"}]},E={begin:`[\\w-]+:(:)?(${l.join("|")})`, +returnBegin:!0,contains:[k]} +;return c.push(a.C_LINE_COMMENT_MODE,a.C_BLOCK_COMMENT_MODE,v,y,E,w,k),{ +name:"Less",case_insensitive:!0,illegal:"[=>'/<($\"]",contains:c}}})()); +hljs.registerLanguage("lua",(()=>{"use strict";return e=>{ +const t="\\[=*\\[",a="\\]=*\\]",n={begin:t,end:a,contains:["self"] +},o=[e.COMMENT("--(?!\\[=*\\[)","$"),e.COMMENT("--\\[=*\\[",a,{contains:[n], +relevance:10})];return{name:"Lua",keywords:{$pattern:e.UNDERSCORE_IDENT_RE, +literal:"true false nil", +keyword:"and break do else elseif end for goto if in local not or repeat return then until while", +built_in:"_G _ENV _VERSION __index __newindex __mode __call __metatable __tostring __len __gc __add __sub __mul __div __mod __pow __concat __unm __eq __lt __le assert collectgarbage dofile error getfenv getmetatable ipairs load loadfile loadstring module next pairs pcall print rawequal rawget rawset require select setfenv setmetatable tonumber tostring type unpack xpcall arg self coroutine resume yield status wrap create running debug getupvalue debug sethook getmetatable gethook setmetatable setlocal traceback setfenv getinfo setupvalue getlocal getregistry getfenv io lines write close flush open output type read stderr stdin input stdout popen tmpfile math log max acos huge ldexp pi cos tanh pow deg tan cosh sinh random randomseed frexp ceil floor rad abs sqrt modf asin min mod fmod log10 atan2 exp sin atan os exit setlocale date getenv difftime remove time clock tmpname rename execute package preload loadlib loaded loaders cpath config path seeall string sub upper len gfind rep find match char dump gmatch reverse byte format gsub lower table setn insert getn foreachi maxn foreach concat sort remove" +},contains:o.concat([{className:"function",beginKeywords:"function",end:"\\)", +contains:[e.inherit(e.TITLE_MODE,{ +begin:"([_a-zA-Z]\\w*\\.)*([_a-zA-Z]\\w*:)?[_a-zA-Z]\\w*"}),{className:"params", +begin:"\\(",endsWithParent:!0,contains:o}].concat(o) +},e.C_NUMBER_MODE,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,{className:"string", +begin:t,end:a,contains:[n],relevance:5}])}}})()); +hljs.registerLanguage("makefile",(()=>{"use strict";return e=>{const i={ +className:"variable",variants:[{begin:"\\$\\("+e.UNDERSCORE_IDENT_RE+"\\)", +contains:[e.BACKSLASH_ESCAPE]},{begin:/\$[@%{"use strict";function e(e){ +return e?"string"==typeof e?e:e.source:null}function n(e){return a("(?=",e,")")} +function a(...n){return n.map((n=>e(n))).join("")}function s(...n){ +return"("+n.map((n=>e(n))).join("|")+")"}return e=>{ +const t=a(/[A-Z_]/,a("(",/[A-Z0-9_.-]*:/,")?"),/[A-Z0-9_.-]*/),i={ +className:"symbol",begin:/&[a-z]+;|&#[0-9]+;|&#x[a-f0-9]+;/},r={begin:/\s/, +contains:[{className:"meta-keyword",begin:/#?[a-z_][a-z1-9_-]+/,illegal:/\n/}] +},c=e.inherit(r,{begin:/\(/,end:/\)/}),l=e.inherit(e.APOS_STRING_MODE,{ +className:"meta-string"}),g=e.inherit(e.QUOTE_STRING_MODE,{ +className:"meta-string"}),m={endsWithParent:!0,illegal:/`]+/}]}] +}]};return{name:"HTML, XML", +aliases:["html","xhtml","rss","atom","xjb","xsd","xsl","plist","wsf","svg"], +case_insensitive:!0,contains:[{className:"meta",begin://, +relevance:10,contains:[r,g,l,c,{begin:/\[/,end:/\]/,contains:[{className:"meta", +begin://,contains:[r,c,g,l]}]}]},e.COMMENT(//,{ +relevance:10}),{begin://,relevance:10},i,{ +className:"meta",begin:/<\?xml/,end:/\?>/,relevance:10},{className:"tag", +begin:/)/,end:/>/,keywords:{name:"style"},contains:[m],starts:{ +end:/<\/style>/,returnEnd:!0,subLanguage:["css","xml"]}},{className:"tag", +begin:/)/,end:/>/,keywords:{name:"script"},contains:[m],starts:{ +end:/<\/script>/,returnEnd:!0,subLanguage:["javascript","handlebars","xml"]}},{ +className:"tag",begin:/<>|<\/>/},{className:"tag", +begin:a(//,/>/,/\s/)))),end:/\/?>/,contains:[{className:"name", +begin:t,relevance:0,starts:m}]},{className:"tag",begin:a(/<\//,n(a(t,/>/))), +contains:[{className:"name",begin:t,relevance:0},{begin:/>/,relevance:0}]}]}} +})()); +hljs.registerLanguage("markdown",(()=>{"use strict";function n(...n){ +return n.map((n=>{return(e=n)?"string"==typeof e?e:e.source:null;var e +})).join("")}return e=>{const a={begin:/<\/?[A-Za-z_]/,end:">", +subLanguage:"xml",relevance:0},i={variants:[{begin:/\[.+?\]\[.*?\]/,relevance:0 +},{begin:/\[.+?\]\(((data|javascript|mailto):|(?:http|ftp)s?:\/\/).*?\)/, +relevance:2},{begin:n(/\[.+?\]\(/,/[A-Za-z][A-Za-z0-9+.-]*/,/:\/\/.*?\)/), +relevance:2},{begin:/\[.+?\]\([./?&#].*?\)/,relevance:1},{ +begin:/\[.+?\]\(.*?\)/,relevance:0}],returnBegin:!0,contains:[{ +className:"string",relevance:0,begin:"\\[",end:"\\]",excludeBegin:!0, +returnEnd:!0},{className:"link",relevance:0,begin:"\\]\\(",end:"\\)", +excludeBegin:!0,excludeEnd:!0},{className:"symbol",relevance:0,begin:"\\]\\[", +end:"\\]",excludeBegin:!0,excludeEnd:!0}]},s={className:"strong",contains:[], +variants:[{begin:/_{2}/,end:/_{2}/},{begin:/\*{2}/,end:/\*{2}/}]},c={ +className:"emphasis",contains:[],variants:[{begin:/\*(?!\*)/,end:/\*/},{ +begin:/_(?!_)/,end:/_/,relevance:0}]};s.contains.push(c),c.contains.push(s) +;let t=[a,i] +;return s.contains=s.contains.concat(t),c.contains=c.contains.concat(t), +t=t.concat(s,c),{name:"Markdown",aliases:["md","mkdown","mkd"],contains:[{ +className:"section",variants:[{begin:"^#{1,6}",end:"$",contains:t},{ +begin:"(?=^.+?\\n[=-]{2,}$)",contains:[{begin:"^[=-]*$"},{begin:"^",end:"\\n", +contains:t}]}]},a,{className:"bullet",begin:"^[ \t]*([*+-]|(\\d+\\.))(?=\\s+)", +end:"\\s+",excludeEnd:!0},s,c,{className:"quote",begin:"^>\\s+",contains:t, +end:"$"},{className:"code",variants:[{begin:"(`{3,})[^`](.|\\n)*?\\1`*[ ]*"},{ +begin:"(~{3,})[^~](.|\\n)*?\\1~*[ ]*"},{begin:"```",end:"```+[ ]*$"},{ +begin:"~~~",end:"~~~+[ ]*$"},{begin:"`.+?`"},{begin:"(?=^( {4}|\\t))", +contains:[{begin:"^( {4}|\\t)",end:"(\\n)$"}],relevance:0}]},{ +begin:"^[-\\*]{3,}",end:"$"},i,{begin:/^\[[^\n]+\]:/,returnBegin:!0,contains:[{ +className:"symbol",begin:/\[/,end:/\]/,excludeBegin:!0,excludeEnd:!0},{ +className:"link",begin:/:\s*/,end:/$/,excludeBegin:!0}]}]}}})()); +hljs.registerLanguage("nginx",(()=>{"use strict";return e=>{const n={ +className:"variable",variants:[{begin:/\$\d+/},{begin:/\$\{/,end:/\}/},{ +begin:/[$@]/+e.UNDERSCORE_IDENT_RE}]},a={endsWithParent:!0,keywords:{ +$pattern:"[a-z/_]+", +literal:"on off yes no true false none blocked debug info notice warn error crit select break last permanent redirect kqueue rtsig epoll poll /dev/poll" +},relevance:0,illegal:"=>",contains:[e.HASH_COMMENT_MODE,{className:"string", +contains:[e.BACKSLASH_ESCAPE,n],variants:[{begin:/"/,end:/"/},{begin:/'/,end:/'/ +}]},{begin:"([a-z]+):/",end:"\\s",endsWithParent:!0,excludeEnd:!0,contains:[n] +},{className:"regexp",contains:[e.BACKSLASH_ESCAPE,n],variants:[{begin:"\\s\\^", +end:"\\s|\\{|;",returnEnd:!0},{begin:"~\\*?\\s+",end:"\\s|\\{|;",returnEnd:!0},{ +begin:"\\*(\\.[a-z\\-]+)+"},{begin:"([a-z\\-]+\\.)+\\*"}]},{className:"number", +begin:"\\b\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}(:\\d{1,5})?\\b"},{ +className:"number",begin:"\\b\\d+[kKmMgGdshdwy]*\\b",relevance:0},n]};return{ +name:"Nginx config",aliases:["nginxconf"],contains:[e.HASH_COMMENT_MODE,{ +begin:e.UNDERSCORE_IDENT_RE+"\\s+\\{",returnBegin:!0,end:/\{/,contains:[{ +className:"section",begin:e.UNDERSCORE_IDENT_RE}],relevance:0},{ +begin:e.UNDERSCORE_IDENT_RE+"\\s",end:";|\\{",returnBegin:!0,contains:[{ +className:"attribute",begin:e.UNDERSCORE_IDENT_RE,starts:a}],relevance:0}], +illegal:"[^\\s\\}]"}}})()); +hljs.registerLanguage("objectivec",(()=>{"use strict";return e=>{ +const n=/[a-zA-Z@][a-zA-Z0-9_]*/,_={$pattern:n, +keyword:"@interface @class @protocol @implementation"};return{ +name:"Objective-C",aliases:["mm","objc","obj-c","obj-c++","objective-c++"], +keywords:{$pattern:n, +keyword:"int float while char export sizeof typedef const struct for union unsigned long volatile static bool mutable if do return goto void enum else break extern asm case short default double register explicit signed typename this switch continue wchar_t inline readonly assign readwrite self @synchronized id typeof nonatomic super unichar IBOutlet IBAction strong weak copy in out inout bycopy byref oneway __strong __weak __block __autoreleasing @private @protected @public @try @property @end @throw @catch @finally @autoreleasepool @synthesize @dynamic @selector @optional @required @encode @package @import @defs @compatibility_alias __bridge __bridge_transfer __bridge_retained __bridge_retain __covariant __contravariant __kindof _Nonnull _Nullable _Null_unspecified __FUNCTION__ __PRETTY_FUNCTION__ __attribute__ getter setter retain unsafe_unretained nonnull nullable null_unspecified null_resettable class instancetype NS_DESIGNATED_INITIALIZER NS_UNAVAILABLE NS_REQUIRES_SUPER NS_RETURNS_INNER_POINTER NS_INLINE NS_AVAILABLE NS_DEPRECATED NS_ENUM NS_OPTIONS NS_SWIFT_UNAVAILABLE NS_ASSUME_NONNULL_BEGIN NS_ASSUME_NONNULL_END NS_REFINED_FOR_SWIFT NS_SWIFT_NAME NS_SWIFT_NOTHROW NS_DURING NS_HANDLER NS_ENDHANDLER NS_VALUERETURN NS_VOIDRETURN", +literal:"false true FALSE TRUE nil YES NO NULL", +built_in:"BOOL dispatch_once_t dispatch_queue_t dispatch_sync dispatch_async dispatch_once" +},illegal:"/,end:/$/, +illegal:"\\n"},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{ +className:"class",begin:"("+_.keyword.split(" ").join("|")+")\\b",end:/(\{|$)/, +excludeEnd:!0,keywords:_,contains:[e.UNDERSCORE_TITLE_MODE]},{ +begin:"\\."+e.UNDERSCORE_IDENT_RE,relevance:0}]}}})()); +hljs.registerLanguage("perl",(()=>{"use strict";function e(e){ +return e?"string"==typeof e?e:e.source:null}function n(...n){ +return n.map((n=>e(n))).join("")}function t(...n){ +return"("+n.map((n=>e(n))).join("|")+")"}return e=>{ +const r=/[dualxmsipngr]{0,12}/,s={$pattern:/[\w.]+/, +keyword:"abs accept alarm and atan2 bind binmode bless break caller chdir chmod chomp chop chown chr chroot close closedir connect continue cos crypt dbmclose dbmopen defined delete die do dump each else elsif endgrent endhostent endnetent endprotoent endpwent endservent eof eval exec exists exit exp fcntl fileno flock for foreach fork format formline getc getgrent getgrgid getgrnam gethostbyaddr gethostbyname gethostent getlogin getnetbyaddr getnetbyname getnetent getpeername getpgrp getpriority getprotobyname getprotobynumber getprotoent getpwent getpwnam getpwuid getservbyname getservbyport getservent getsockname getsockopt given glob gmtime goto grep gt hex if index int ioctl join keys kill last lc lcfirst length link listen local localtime log lstat lt ma map mkdir msgctl msgget msgrcv msgsnd my ne next no not oct open opendir or ord our pack package pipe pop pos print printf prototype push q|0 qq quotemeta qw qx rand read readdir readline readlink readpipe recv redo ref rename require reset return reverse rewinddir rindex rmdir say scalar seek seekdir select semctl semget semop send setgrent sethostent setnetent setpgrp setpriority setprotoent setpwent setservent setsockopt shift shmctl shmget shmread shmwrite shutdown sin sleep socket socketpair sort splice split sprintf sqrt srand stat state study sub substr symlink syscall sysopen sysread sysseek system syswrite tell telldir tie tied time times tr truncate uc ucfirst umask undef unless unlink unpack unshift untie until use utime values vec wait waitpid wantarray warn when while write x|0 xor y|0" +},i={className:"subst",begin:"[$@]\\{",end:"\\}",keywords:s},a={begin:/->\{/, +end:/\}/},o={variants:[{begin:/\$\d/},{ +begin:n(/[$%@](\^\w\b|#\w+(::\w+)*|\{\w+\}|\w+(::\w*)*)/,"(?![A-Za-z])(?![@$%])") +},{begin:/[$%@][^\s\w{]/,relevance:0}] +},c=[e.BACKSLASH_ESCAPE,i,o],g=[/!/,/\//,/\|/,/\?/,/'/,/"/,/#/],l=(e,t,s="\\1")=>{ +const i="\\1"===s?s:n(s,t) +;return n(n("(?:",e,")"),t,/(?:\\.|[^\\\/])*?/,i,/(?:\\.|[^\\\/])*?/,s,r) +},d=(e,t,s)=>n(n("(?:",e,")"),t,/(?:\\.|[^\\\/])*?/,s,r),p=[o,e.HASH_COMMENT_MODE,e.COMMENT(/^=\w/,/=cut/,{ +endsWithParent:!0}),a,{className:"string",contains:c,variants:[{ +begin:"q[qwxr]?\\s*\\(",end:"\\)",relevance:5},{begin:"q[qwxr]?\\s*\\[", +end:"\\]",relevance:5},{begin:"q[qwxr]?\\s*\\{",end:"\\}",relevance:5},{ +begin:"q[qwxr]?\\s*\\|",end:"\\|",relevance:5},{begin:"q[qwxr]?\\s*<",end:">", +relevance:5},{begin:"qw\\s+q",end:"q",relevance:5},{begin:"'",end:"'", +contains:[e.BACKSLASH_ESCAPE]},{begin:'"',end:'"'},{begin:"`",end:"`", +contains:[e.BACKSLASH_ESCAPE]},{begin:/\{\w+\}/,relevance:0},{ +begin:"-?\\w+\\s*=>",relevance:0}]},{className:"number", +begin:"(\\b0[0-7_]+)|(\\b0x[0-9a-fA-F_]+)|(\\b[1-9][0-9_]*(\\.[0-9_]+)?)|[0_]\\b", +relevance:0},{ +begin:"(\\/\\/|"+e.RE_STARTERS_RE+"|\\b(split|return|print|reverse|grep)\\b)\\s*", +keywords:"split return print reverse grep",relevance:0, +contains:[e.HASH_COMMENT_MODE,{className:"regexp",variants:[{ +begin:l("s|tr|y",t(...g))},{begin:l("s|tr|y","\\(","\\)")},{ +begin:l("s|tr|y","\\[","\\]")},{begin:l("s|tr|y","\\{","\\}")}],relevance:2},{ +className:"regexp",variants:[{begin:/(m|qr)\/\//,relevance:0},{ +begin:d("(?:m|qr)?",/\//,/\//)},{begin:d("m|qr",t(...g),/\1/)},{ +begin:d("m|qr",/\(/,/\)/)},{begin:d("m|qr",/\[/,/\]/)},{ +begin:d("m|qr",/\{/,/\}/)}]}]},{className:"function",beginKeywords:"sub", +end:"(\\s*\\(.*?\\))?[;{]",excludeEnd:!0,relevance:5,contains:[e.TITLE_MODE]},{ +begin:"-\\w\\b",relevance:0},{begin:"^__DATA__$",end:"^__END__$", +subLanguage:"mojolicious",contains:[{begin:"^@@.*",end:"$",className:"comment"}] +}];return i.contains=p,a.contains=p,{name:"Perl",aliases:["pl","pm"],keywords:s, +contains:p}}})()); +hljs.registerLanguage("php",(()=>{"use strict";return e=>{const r={ +className:"variable", +begin:"\\$+[a-zA-Z_\x7f-\xff][a-zA-Z0-9_\x7f-\xff]*(?![A-Za-z0-9])(?![$])"},t={ +className:"meta",variants:[{begin:/<\?php/,relevance:10},{begin:/<\?[=]?/},{ +begin:/\?>/}]},a={className:"subst",variants:[{begin:/\$\w+/},{begin:/\{\$/, +end:/\}/}]},n=e.inherit(e.APOS_STRING_MODE,{illegal:null +}),i=e.inherit(e.QUOTE_STRING_MODE,{illegal:null, +contains:e.QUOTE_STRING_MODE.contains.concat(a)}),o=e.END_SAME_AS_BEGIN({ +begin:/<<<[ \t]*(\w+)\n/,end:/[ \t]*(\w+)\b/, +contains:e.QUOTE_STRING_MODE.contains.concat(a)}),l={className:"string", +contains:[e.BACKSLASH_ESCAPE,t],variants:[e.inherit(n,{begin:"b'",end:"'" +}),e.inherit(i,{begin:'b"',end:'"'}),i,n,o]},c={ +variants:[e.BINARY_NUMBER_MODE,e.C_NUMBER_MODE]},s={ +keyword:"__CLASS__ __DIR__ __FILE__ __FUNCTION__ __LINE__ __METHOD__ __NAMESPACE__ __TRAIT__ die echo exit include include_once print require require_once array abstract and as binary bool boolean break callable case catch class clone const continue declare default do double else elseif empty enddeclare endfor endforeach endif endswitch endwhile eval extends final finally float for foreach from global goto if implements instanceof insteadof int integer interface isset iterable list match|0 new object or private protected public real return string switch throw trait try unset use var void while xor yield", +literal:"false null true", +built_in:"Error|0 AppendIterator ArgumentCountError ArithmeticError ArrayIterator ArrayObject AssertionError BadFunctionCallException BadMethodCallException CachingIterator CallbackFilterIterator CompileError Countable DirectoryIterator DivisionByZeroError DomainException EmptyIterator ErrorException Exception FilesystemIterator FilterIterator GlobIterator InfiniteIterator InvalidArgumentException IteratorIterator LengthException LimitIterator LogicException MultipleIterator NoRewindIterator OutOfBoundsException OutOfRangeException OuterIterator OverflowException ParentIterator ParseError RangeException RecursiveArrayIterator RecursiveCachingIterator RecursiveCallbackFilterIterator RecursiveDirectoryIterator RecursiveFilterIterator RecursiveIterator RecursiveIteratorIterator RecursiveRegexIterator RecursiveTreeIterator RegexIterator RuntimeException SeekableIterator SplDoublyLinkedList SplFileInfo SplFileObject SplFixedArray SplHeap SplMaxHeap SplMinHeap SplObjectStorage SplObserver SplObserver SplPriorityQueue SplQueue SplStack SplSubject SplSubject SplTempFileObject TypeError UnderflowException UnexpectedValueException ArrayAccess Closure Generator Iterator IteratorAggregate Serializable Throwable Traversable WeakReference Directory __PHP_Incomplete_Class parent php_user_filter self static stdClass" +};return{aliases:["php","php3","php4","php5","php6","php7","php8"], +case_insensitive:!0,keywords:s, +contains:[e.HASH_COMMENT_MODE,e.COMMENT("//","$",{contains:[t] +}),e.COMMENT("/\\*","\\*/",{contains:[{className:"doctag",begin:"@[A-Za-z]+"}] +}),e.COMMENT("__halt_compiler.+?;",!1,{endsWithParent:!0, +keywords:"__halt_compiler"}),t,{className:"keyword",begin:/\$this\b/},r,{ +begin:/(::|->)+[a-zA-Z_\x7f-\xff][a-zA-Z0-9_\x7f-\xff]*/},{className:"function", +relevance:0,beginKeywords:"fn function",end:/[;{]/,excludeEnd:!0, +illegal:"[$%\\[]",contains:[e.UNDERSCORE_TITLE_MODE,{begin:"=>"},{ +className:"params",begin:"\\(",end:"\\)",excludeBegin:!0,excludeEnd:!0, +keywords:s,contains:["self",r,e.C_BLOCK_COMMENT_MODE,l,c]}]},{className:"class", +beginKeywords:"class interface",relevance:0,end:/\{/,excludeEnd:!0, +illegal:/[:($"]/,contains:[{beginKeywords:"extends implements" +},e.UNDERSCORE_TITLE_MODE]},{beginKeywords:"namespace",relevance:0,end:";", +illegal:/[.']/,contains:[e.UNDERSCORE_TITLE_MODE]},{beginKeywords:"use", +relevance:0,end:";",contains:[e.UNDERSCORE_TITLE_MODE]},l,c]}}})()); +hljs.registerLanguage("php-template",(()=>{"use strict";return n=>({ +name:"PHP template",subLanguage:"xml",contains:[{begin:/<\?(php|=)?/,end:/\?>/, +subLanguage:"php",contains:[{begin:"/\\*",end:"\\*/",skip:!0},{begin:'b"', +end:'"',skip:!0},{begin:"b'",end:"'",skip:!0},n.inherit(n.APOS_STRING_MODE,{ +illegal:null,className:null,contains:null,skip:!0 +}),n.inherit(n.QUOTE_STRING_MODE,{illegal:null,className:null,contains:null, +skip:!0})]}]})})()); +hljs.registerLanguage("plaintext",(()=>{"use strict";return t=>({ +name:"Plain text",aliases:["text","txt"],disableAutodetect:!0})})()); +hljs.registerLanguage("properties",(()=>{"use strict";return e=>{ +var n="[ \\t\\f]*",a=n+"[:=]"+n,t="("+a+"|[ \\t\\f]+)",r="([^\\\\\\W:= \\t\\f\\n]|\\\\.)+",s="([^\\\\:= \\t\\f\\n]|\\\\.)+",i={ +end:t,relevance:0,starts:{className:"string",end:/$/,relevance:0,contains:[{ +begin:"\\\\\\\\"},{begin:"\\\\\\n"}]}};return{name:".properties", +case_insensitive:!0,illegal:/\S/,contains:[e.COMMENT("^\\s*[!#]","$"),{ +returnBegin:!0,variants:[{begin:r+a,relevance:1},{begin:r+"[ \\t\\f]+", +relevance:0}],contains:[{className:"attr",begin:r,endsParent:!0,relevance:0}], +starts:i},{begin:s+t,returnBegin:!0,relevance:0,contains:[{className:"meta", +begin:s,endsParent:!0,relevance:0}],starts:i},{className:"attr",relevance:0, +begin:s+n+"$"}]}}})()); +hljs.registerLanguage("python",(()=>{"use strict";return e=>{const n={ +keyword:["and","as","assert","async","await","break","class","continue","def","del","elif","else","except","finally","for","","from","global","if","import","in","is","lambda","nonlocal|10","not","or","pass","raise","return","try","while","with","yield"], +built_in:["__import__","abs","all","any","ascii","bin","bool","breakpoint","bytearray","bytes","callable","chr","classmethod","compile","complex","delattr","dict","dir","divmod","enumerate","eval","exec","filter","float","format","frozenset","getattr","globals","hasattr","hash","help","hex","id","input","int","isinstance","issubclass","iter","len","list","locals","map","max","memoryview","min","next","object","oct","open","ord","pow","print","property","range","repr","reversed","round","set","setattr","slice","sorted","staticmethod","str","sum","super","tuple","type","vars","zip"], +literal:["__debug__","Ellipsis","False","None","NotImplemented","True"]},a={ +className:"meta",begin:/^(>>>|\.\.\.) /},s={className:"subst",begin:/\{/, +end:/\}/,keywords:n,illegal:/#/},i={begin:/\{\{/,relevance:0},r={ +className:"string",contains:[e.BACKSLASH_ESCAPE],variants:[{ +begin:/([uU]|[bB]|[rR]|[bB][rR]|[rR][bB])?'''/,end:/'''/, +contains:[e.BACKSLASH_ESCAPE,a],relevance:10},{ +begin:/([uU]|[bB]|[rR]|[bB][rR]|[rR][bB])?"""/,end:/"""/, +contains:[e.BACKSLASH_ESCAPE,a],relevance:10},{ +begin:/([fF][rR]|[rR][fF]|[fF])'''/,end:/'''/, +contains:[e.BACKSLASH_ESCAPE,a,i,s]},{begin:/([fF][rR]|[rR][fF]|[fF])"""/, +end:/"""/,contains:[e.BACKSLASH_ESCAPE,a,i,s]},{begin:/([uU]|[rR])'/,end:/'/, +relevance:10},{begin:/([uU]|[rR])"/,end:/"/,relevance:10},{ +begin:/([bB]|[bB][rR]|[rR][bB])'/,end:/'/},{begin:/([bB]|[bB][rR]|[rR][bB])"/, +end:/"/},{begin:/([fF][rR]|[rR][fF]|[fF])'/,end:/'/, +contains:[e.BACKSLASH_ESCAPE,i,s]},{begin:/([fF][rR]|[rR][fF]|[fF])"/,end:/"/, +contains:[e.BACKSLASH_ESCAPE,i,s]},e.APOS_STRING_MODE,e.QUOTE_STRING_MODE] +},t="[0-9](_?[0-9])*",l=`(\\b(${t}))?\\.(${t})|\\b(${t})\\.`,b={ +className:"number",relevance:0,variants:[{ +begin:`(\\b(${t})|(${l}))[eE][+-]?(${t})[jJ]?\\b`},{begin:`(${l})[jJ]?`},{ +begin:"\\b([1-9](_?[0-9])*|0+(_?0)*)[lLjJ]?\\b"},{ +begin:"\\b0[bB](_?[01])+[lL]?\\b"},{begin:"\\b0[oO](_?[0-7])+[lL]?\\b"},{ +begin:"\\b0[xX](_?[0-9a-fA-F])+[lL]?\\b"},{begin:`\\b(${t})[jJ]\\b`}]},o={ +className:"params",variants:[{begin:/\(\s*\)/,skip:!0,className:null},{ +begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,keywords:n, +contains:["self",a,b,r,e.HASH_COMMENT_MODE]}]};return s.contains=[r,b,a],{ +name:"Python",aliases:["py","gyp","ipython"],keywords:n, +illegal:/(<\/|->|\?)|=>/,contains:[a,b,{begin:/\bself\b/},{beginKeywords:"if", +relevance:0},r,e.HASH_COMMENT_MODE,{variants:[{className:"function", +beginKeywords:"def"},{className:"class",beginKeywords:"class"}],end:/:/, +illegal:/[${=;\n,]/,contains:[e.UNDERSCORE_TITLE_MODE,o,{begin:/->/, +endsWithParent:!0,keywords:"None"}]},{className:"meta",begin:/^[\t ]*@/, +end:/(?=#)|$/,contains:[b,o,r]},{begin:/\b(print|exec)\(/}]}}})()); +hljs.registerLanguage("python-repl",(()=>{"use strict";return s=>({ +aliases:["pycon"],contains:[{className:"meta",starts:{end:/ |$/,starts:{end:"$", +subLanguage:"python"}},variants:[{begin:/^>>>(?=[ ]|$)/},{ +begin:/^\.\.\.(?=[ ]|$)/}]}]})})()); +hljs.registerLanguage("r",(()=>{"use strict";function e(...e){return e.map((e=>{ +return(a=e)?"string"==typeof a?a:a.source:null;var a})).join("")}return a=>{ +const n=/(?:(?:[a-zA-Z]|\.[._a-zA-Z])[._a-zA-Z0-9]*)|\.(?!\d)/;return{name:"R", +illegal:/->/,keywords:{$pattern:n, +keyword:"function if in break next repeat else for while", +literal:"NULL NA TRUE FALSE Inf NaN NA_integer_|10 NA_real_|10 NA_character_|10 NA_complex_|10", +built_in:"LETTERS letters month.abb month.name pi T F abs acos acosh all any anyNA Arg as.call as.character as.complex as.double as.environment as.integer as.logical as.null.default as.numeric as.raw asin asinh atan atanh attr attributes baseenv browser c call ceiling class Conj cos cosh cospi cummax cummin cumprod cumsum digamma dim dimnames emptyenv exp expression floor forceAndCall gamma gc.time globalenv Im interactive invisible is.array is.atomic is.call is.character is.complex is.double is.environment is.expression is.finite is.function is.infinite is.integer is.language is.list is.logical is.matrix is.na is.name is.nan is.null is.numeric is.object is.pairlist is.raw is.recursive is.single is.symbol lazyLoadDBfetch length lgamma list log max min missing Mod names nargs nzchar oldClass on.exit pos.to.env proc.time prod quote range Re rep retracemem return round seq_along seq_len seq.int sign signif sin sinh sinpi sqrt standardGeneric substitute sum switch tan tanh tanpi tracemem trigamma trunc unclass untracemem UseMethod xtfrm" +},compilerExtensions:[(a,n)=>{if(!a.beforeMatch)return +;if(a.starts)throw Error("beforeMatch cannot be used with starts") +;const i=Object.assign({},a);Object.keys(a).forEach((e=>{delete a[e] +})),a.begin=e(i.beforeMatch,e("(?=",i.begin,")")),a.starts={relevance:0, +contains:[Object.assign(i,{endsParent:!0})]},a.relevance=0,delete i.beforeMatch +}],contains:[a.COMMENT(/#'/,/$/,{contains:[{className:"doctag", +begin:"@examples",starts:{contains:[{begin:/\n/},{begin:/#'\s*(?=@[a-zA-Z]+)/, +endsParent:!0},{begin:/#'/,end:/$/,excludeBegin:!0}]}},{className:"doctag", +begin:"@param",end:/$/,contains:[{className:"variable",variants:[{begin:n},{ +begin:/`(?:\\.|[^`\\])+`/}],endsParent:!0}]},{className:"doctag", +begin:/@[a-zA-Z]+/},{className:"meta-keyword",begin:/\\[a-zA-Z]+/}] +}),a.HASH_COMMENT_MODE,{className:"string",contains:[a.BACKSLASH_ESCAPE], +variants:[a.END_SAME_AS_BEGIN({begin:/[rR]"(-*)\(/,end:/\)(-*)"/ +}),a.END_SAME_AS_BEGIN({begin:/[rR]"(-*)\{/,end:/\}(-*)"/ +}),a.END_SAME_AS_BEGIN({begin:/[rR]"(-*)\[/,end:/\](-*)"/ +}),a.END_SAME_AS_BEGIN({begin:/[rR]'(-*)\(/,end:/\)(-*)'/ +}),a.END_SAME_AS_BEGIN({begin:/[rR]'(-*)\{/,end:/\}(-*)'/ +}),a.END_SAME_AS_BEGIN({begin:/[rR]'(-*)\[/,end:/\](-*)'/}),{begin:'"',end:'"', +relevance:0},{begin:"'",end:"'",relevance:0}]},{className:"number",relevance:0, +beforeMatch:/([^a-zA-Z0-9._])/,variants:[{ +match:/0[xX][0-9a-fA-F]+\.[0-9a-fA-F]*[pP][+-]?\d+i?/},{ +match:/0[xX][0-9a-fA-F]+([pP][+-]?\d+)?[Li]?/},{ +match:/(\d+(\.\d*)?|\.\d+)([eE][+-]?\d+)?[Li]?/}]},{begin:"%",end:"%"},{ +begin:e(/[a-zA-Z][a-zA-Z_0-9]*/,"\\s+<-\\s+")},{begin:"`",end:"`",contains:[{ +begin:/\\./}]}]}}})()); +hljs.registerLanguage("ruby",(()=>{"use strict";function e(...e){ +return e.map((e=>{return(n=e)?"string"==typeof n?n:n.source:null;var n +})).join("")}return n=>{ +const a="([a-zA-Z_]\\w*[!?=]?|[-+~]@|<<|>>|=~|===?|<=>|[<>]=?|\\*\\*|[-/+%^&*~`|]|\\[\\]=?)",i={ +keyword:"and then defined module in return redo if BEGIN retry end for self when next until do begin unless END rescue else break undef not super class case require yield alias while ensure elsif or include attr_reader attr_writer attr_accessor __FILE__", +built_in:"proc lambda",literal:"true false nil"},s={className:"doctag", +begin:"@[A-Za-z]+"},r={begin:"#<",end:">"},b=[n.COMMENT("#","$",{contains:[s] +}),n.COMMENT("^=begin","^=end",{contains:[s],relevance:10 +}),n.COMMENT("^__END__","\\n$")],c={className:"subst",begin:/#\{/,end:/\}/, +keywords:i},t={className:"string",contains:[n.BACKSLASH_ESCAPE,c],variants:[{ +begin:/'/,end:/'/},{begin:/"/,end:/"/},{begin:/`/,end:/`/},{begin:/%[qQwWx]?\(/, +end:/\)/},{begin:/%[qQwWx]?\[/,end:/\]/},{begin:/%[qQwWx]?\{/,end:/\}/},{ +begin:/%[qQwWx]?/},{begin:/%[qQwWx]?\//,end:/\//},{begin:/%[qQwWx]?%/, +end:/%/},{begin:/%[qQwWx]?-/,end:/-/},{begin:/%[qQwWx]?\|/,end:/\|/},{ +begin:/\B\?(\\\d{1,3})/},{begin:/\B\?(\\x[A-Fa-f0-9]{1,2})/},{ +begin:/\B\?(\\u\{?[A-Fa-f0-9]{1,6}\}?)/},{ +begin:/\B\?(\\M-\\C-|\\M-\\c|\\c\\M-|\\M-|\\C-\\M-)[\x20-\x7e]/},{ +begin:/\B\?\\(c|C-)[\x20-\x7e]/},{begin:/\B\?\\?\S/},{ +begin:/<<[-~]?'?(\w+)\n(?:[^\n]*\n)*?\s*\1\b/,returnBegin:!0,contains:[{ +begin:/<<[-~]?'?/},n.END_SAME_AS_BEGIN({begin:/(\w+)/,end:/(\w+)/, +contains:[n.BACKSLASH_ESCAPE,c]})]}]},g="[0-9](_?[0-9])*",d={className:"number", +relevance:0,variants:[{ +begin:`\\b([1-9](_?[0-9])*|0)(\\.(${g}))?([eE][+-]?(${g})|r)?i?\\b`},{ +begin:"\\b0[dD][0-9](_?[0-9])*r?i?\\b"},{begin:"\\b0[bB][0-1](_?[0-1])*r?i?\\b" +},{begin:"\\b0[oO][0-7](_?[0-7])*r?i?\\b"},{ +begin:"\\b0[xX][0-9a-fA-F](_?[0-9a-fA-F])*r?i?\\b"},{ +begin:"\\b0(_?[0-7])+r?i?\\b"}]},l={className:"params",begin:"\\(",end:"\\)", +endsParent:!0,keywords:i},o=[t,{className:"class",beginKeywords:"class module", +end:"$|;",illegal:/=/,contains:[n.inherit(n.TITLE_MODE,{ +begin:"[A-Za-z_]\\w*(::\\w+)*(\\?|!)?"}),{begin:"<\\s*",contains:[{ +begin:"("+n.IDENT_RE+"::)?"+n.IDENT_RE,relevance:0}]}].concat(b)},{ +className:"function",begin:e(/def\s*/,(_=a+"\\s*(\\(|;|$)",e("(?=",_,")"))), +relevance:0,keywords:"def",end:"$|;",contains:[n.inherit(n.TITLE_MODE,{begin:a +}),l].concat(b)},{begin:n.IDENT_RE+"::"},{className:"symbol", +begin:n.UNDERSCORE_IDENT_RE+"(!|\\?)?:",relevance:0},{className:"symbol", +begin:":(?!\\s)",contains:[t,{begin:a}],relevance:0},d,{className:"variable", +begin:"(\\$\\W)|((\\$|@@?)(\\w+))(?=[^@$?])(?![A-Za-z])(?![@$?'])"},{ +className:"params",begin:/\|/,end:/\|/,relevance:0,keywords:i},{ +begin:"("+n.RE_STARTERS_RE+"|unless)\\s*",keywords:"unless",contains:[{ +className:"regexp",contains:[n.BACKSLASH_ESCAPE,c],illegal:/\n/,variants:[{ +begin:"/",end:"/[a-z]*"},{begin:/%r\{/,end:/\}[a-z]*/},{begin:"%r\\(", +end:"\\)[a-z]*"},{begin:"%r!",end:"![a-z]*"},{begin:"%r\\[",end:"\\][a-z]*"}] +}].concat(r,b),relevance:0}].concat(r,b);var _;c.contains=o,l.contains=o +;const E=[{begin:/^\s*=>/,starts:{end:"$",contains:o}},{className:"meta", +begin:"^([>?]>|[\\w#]+\\(\\w+\\):\\d+:\\d+>|(\\w+-)?\\d+\\.\\d+\\.\\d+(p\\d+)?[^\\d][^>]+>)(?=[ ])", +starts:{end:"$",contains:o}}];return b.unshift(r),{name:"Ruby", +aliases:["rb","gemspec","podspec","thor","irb"],keywords:i,illegal:/\/\*/, +contains:[n.SHEBANG({binary:"ruby"})].concat(E).concat(b).concat(o)}}})()); +hljs.registerLanguage("rust",(()=>{"use strict";return e=>{ +const n="([ui](8|16|32|64|128|size)|f(32|64))?",t="drop i8 i16 i32 i64 i128 isize u8 u16 u32 u64 u128 usize f32 f64 str char bool Box Option Result String Vec Copy Send Sized Sync Drop Fn FnMut FnOnce ToOwned Clone Debug PartialEq PartialOrd Eq Ord AsRef AsMut Into From Default Iterator Extend IntoIterator DoubleEndedIterator ExactSizeIterator SliceConcatExt ToString assert! assert_eq! bitflags! bytes! cfg! col! concat! concat_idents! debug_assert! debug_assert_eq! env! panic! file! format! format_args! include_bin! include_str! line! local_data_key! module_path! option_env! print! println! select! stringify! try! unimplemented! unreachable! vec! write! writeln! macro_rules! assert_ne! debug_assert_ne!" +;return{name:"Rust",aliases:["rs"],keywords:{$pattern:e.IDENT_RE+"!?", +keyword:"abstract as async await become box break const continue crate do dyn else enum extern false final fn for if impl in let loop macro match mod move mut override priv pub ref return self Self static struct super trait true try type typeof unsafe unsized use virtual where while yield", +literal:"true false Some None Ok Err",built_in:t},illegal:""}]}}})()); +hljs.registerLanguage("scss",(()=>{"use strict" +;const e=["a","abbr","address","article","aside","audio","b","blockquote","body","button","canvas","caption","cite","code","dd","del","details","dfn","div","dl","dt","em","fieldset","figcaption","figure","footer","form","h1","h2","h3","h4","h5","h6","header","hgroup","html","i","iframe","img","input","ins","kbd","label","legend","li","main","mark","menu","nav","object","ol","p","q","quote","samp","section","span","strong","summary","sup","table","tbody","td","textarea","tfoot","th","thead","time","tr","ul","var","video"],t=["any-hover","any-pointer","aspect-ratio","color","color-gamut","color-index","device-aspect-ratio","device-height","device-width","display-mode","forced-colors","grid","height","hover","inverted-colors","monochrome","orientation","overflow-block","overflow-inline","pointer","prefers-color-scheme","prefers-contrast","prefers-reduced-motion","prefers-reduced-transparency","resolution","scan","scripting","update","width","min-width","max-width","min-height","max-height"],i=["active","any-link","blank","checked","current","default","defined","dir","disabled","drop","empty","enabled","first","first-child","first-of-type","fullscreen","future","focus","focus-visible","focus-within","has","host","host-context","hover","indeterminate","in-range","invalid","is","lang","last-child","last-of-type","left","link","local-link","not","nth-child","nth-col","nth-last-child","nth-last-col","nth-last-of-type","nth-of-type","only-child","only-of-type","optional","out-of-range","past","placeholder-shown","read-only","read-write","required","right","root","scope","target","target-within","user-invalid","valid","visited","where"],r=["after","backdrop","before","cue","cue-region","first-letter","first-line","grammar-error","marker","part","placeholder","selection","slotted","spelling-error"],o=["align-content","align-items","align-self","animation","animation-delay","animation-direction","animation-duration","animation-fill-mode","animation-iteration-count","animation-name","animation-play-state","animation-timing-function","auto","backface-visibility","background","background-attachment","background-clip","background-color","background-image","background-origin","background-position","background-repeat","background-size","border","border-bottom","border-bottom-color","border-bottom-left-radius","border-bottom-right-radius","border-bottom-style","border-bottom-width","border-collapse","border-color","border-image","border-image-outset","border-image-repeat","border-image-slice","border-image-source","border-image-width","border-left","border-left-color","border-left-style","border-left-width","border-radius","border-right","border-right-color","border-right-style","border-right-width","border-spacing","border-style","border-top","border-top-color","border-top-left-radius","border-top-right-radius","border-top-style","border-top-width","border-width","bottom","box-decoration-break","box-shadow","box-sizing","break-after","break-before","break-inside","caption-side","clear","clip","clip-path","color","column-count","column-fill","column-gap","column-rule","column-rule-color","column-rule-style","column-rule-width","column-span","column-width","columns","content","counter-increment","counter-reset","cursor","direction","display","empty-cells","filter","flex","flex-basis","flex-direction","flex-flow","flex-grow","flex-shrink","flex-wrap","float","font","font-display","font-family","font-feature-settings","font-kerning","font-language-override","font-size","font-size-adjust","font-stretch","font-style","font-variant","font-variant-ligatures","font-variation-settings","font-weight","height","hyphens","icon","image-orientation","image-rendering","image-resolution","ime-mode","inherit","initial","justify-content","left","letter-spacing","line-height","list-style","list-style-image","list-style-position","list-style-type","margin","margin-bottom","margin-left","margin-right","margin-top","marks","mask","max-height","max-width","min-height","min-width","nav-down","nav-index","nav-left","nav-right","nav-up","none","normal","object-fit","object-position","opacity","order","orphans","outline","outline-color","outline-offset","outline-style","outline-width","overflow","overflow-wrap","overflow-x","overflow-y","padding","padding-bottom","padding-left","padding-right","padding-top","page-break-after","page-break-before","page-break-inside","perspective","perspective-origin","pointer-events","position","quotes","resize","right","src","tab-size","table-layout","text-align","text-align-last","text-decoration","text-decoration-color","text-decoration-line","text-decoration-style","text-indent","text-overflow","text-rendering","text-shadow","text-transform","text-underline-position","top","transform","transform-origin","transform-style","transition","transition-delay","transition-duration","transition-property","transition-timing-function","unicode-bidi","vertical-align","visibility","white-space","widows","width","word-break","word-spacing","word-wrap","z-index"].reverse() +;return a=>{const n=(e=>({IMPORTANT:{className:"meta",begin:"!important"}, +HEXCOLOR:{className:"number",begin:"#([a-fA-F0-9]{6}|[a-fA-F0-9]{3})"}, +ATTRIBUTE_SELECTOR_MODE:{className:"selector-attr",begin:/\[/,end:/\]/, +illegal:"$",contains:[e.APOS_STRING_MODE,e.QUOTE_STRING_MODE]} +}))(a),l=r,s=i,d="@[a-z-]+",c={className:"variable", +begin:"(\\$[a-zA-Z-][a-zA-Z0-9_-]*)\\b"};return{name:"SCSS",case_insensitive:!0, +illegal:"[=/|']",contains:[a.C_LINE_COMMENT_MODE,a.C_BLOCK_COMMENT_MODE,{ +className:"selector-id",begin:"#[A-Za-z0-9_-]+",relevance:0},{ +className:"selector-class",begin:"\\.[A-Za-z0-9_-]+",relevance:0 +},n.ATTRIBUTE_SELECTOR_MODE,{className:"selector-tag", +begin:"\\b("+e.join("|")+")\\b",relevance:0},{className:"selector-pseudo", +begin:":("+s.join("|")+")"},{className:"selector-pseudo", +begin:"::("+l.join("|")+")"},c,{begin:/\(/,end:/\)/,contains:[a.CSS_NUMBER_MODE] +},{className:"attribute",begin:"\\b("+o.join("|")+")\\b"},{ +begin:"\\b(whitespace|wait|w-resize|visible|vertical-text|vertical-ideographic|uppercase|upper-roman|upper-alpha|underline|transparent|top|thin|thick|text|text-top|text-bottom|tb-rl|table-header-group|table-footer-group|sw-resize|super|strict|static|square|solid|small-caps|separate|se-resize|scroll|s-resize|rtl|row-resize|ridge|right|repeat|repeat-y|repeat-x|relative|progress|pointer|overline|outside|outset|oblique|nowrap|not-allowed|normal|none|nw-resize|no-repeat|no-drop|newspaper|ne-resize|n-resize|move|middle|medium|ltr|lr-tb|lowercase|lower-roman|lower-alpha|loose|list-item|line|line-through|line-edge|lighter|left|keep-all|justify|italic|inter-word|inter-ideograph|inside|inset|inline|inline-block|inherit|inactive|ideograph-space|ideograph-parenthesis|ideograph-numeric|ideograph-alpha|horizontal|hidden|help|hand|groove|fixed|ellipsis|e-resize|double|dotted|distribute|distribute-space|distribute-letter|distribute-all-lines|disc|disabled|default|decimal|dashed|crosshair|collapse|col-resize|circle|char|center|capitalize|break-word|break-all|bottom|both|bolder|bold|block|bidi-override|below|baseline|auto|always|all-scroll|absolute|table|table-cell)\\b" +},{begin:":",end:";", +contains:[c,n.HEXCOLOR,a.CSS_NUMBER_MODE,a.QUOTE_STRING_MODE,a.APOS_STRING_MODE,n.IMPORTANT] +},{begin:"@(page|font-face)",lexemes:d,keywords:"@page @font-face"},{begin:"@", +end:"[{;]",returnBegin:!0,keywords:{$pattern:/[a-z-]+/, +keyword:"and or not only",attribute:t.join(" ")},contains:[{begin:d, +className:"keyword"},{begin:/[a-z-]+(?=:)/,className:"attribute" +},c,a.QUOTE_STRING_MODE,a.APOS_STRING_MODE,n.HEXCOLOR,a.CSS_NUMBER_MODE]}]}} +})()); +hljs.registerLanguage("shell",(()=>{"use strict";return s=>({ +name:"Shell Session",aliases:["console"],contains:[{className:"meta", +begin:/^\s{0,3}[/~\w\d[\]()@-]*[>%$#]/,starts:{end:/[^\\](?=\s*$)/, +subLanguage:"bash"}}]})})()); +hljs.registerLanguage("sql",(()=>{"use strict";function e(e){ +return e?"string"==typeof e?e:e.source:null}function r(...r){ +return r.map((r=>e(r))).join("")}function t(...r){ +return"("+r.map((r=>e(r))).join("|")+")"}return e=>{ +const n=e.COMMENT("--","$"),a=["true","false","unknown"],i=["bigint","binary","blob","boolean","char","character","clob","date","dec","decfloat","decimal","float","int","integer","interval","nchar","nclob","national","numeric","real","row","smallint","time","timestamp","varchar","varying","varbinary"],s=["abs","acos","array_agg","asin","atan","avg","cast","ceil","ceiling","coalesce","corr","cos","cosh","count","covar_pop","covar_samp","cume_dist","dense_rank","deref","element","exp","extract","first_value","floor","json_array","json_arrayagg","json_exists","json_object","json_objectagg","json_query","json_table","json_table_primitive","json_value","lag","last_value","lead","listagg","ln","log","log10","lower","max","min","mod","nth_value","ntile","nullif","percent_rank","percentile_cont","percentile_disc","position","position_regex","power","rank","regr_avgx","regr_avgy","regr_count","regr_intercept","regr_r2","regr_slope","regr_sxx","regr_sxy","regr_syy","row_number","sin","sinh","sqrt","stddev_pop","stddev_samp","substring","substring_regex","sum","tan","tanh","translate","translate_regex","treat","trim","trim_array","unnest","upper","value_of","var_pop","var_samp","width_bucket"],o=["create table","insert into","primary key","foreign key","not null","alter table","add constraint","grouping sets","on overflow","character set","respect nulls","ignore nulls","nulls first","nulls last","depth first","breadth first"],c=s,l=["abs","acos","all","allocate","alter","and","any","are","array","array_agg","array_max_cardinality","as","asensitive","asin","asymmetric","at","atan","atomic","authorization","avg","begin","begin_frame","begin_partition","between","bigint","binary","blob","boolean","both","by","call","called","cardinality","cascaded","case","cast","ceil","ceiling","char","char_length","character","character_length","check","classifier","clob","close","coalesce","collate","collect","column","commit","condition","connect","constraint","contains","convert","copy","corr","corresponding","cos","cosh","count","covar_pop","covar_samp","create","cross","cube","cume_dist","current","current_catalog","current_date","current_default_transform_group","current_path","current_role","current_row","current_schema","current_time","current_timestamp","current_path","current_role","current_transform_group_for_type","current_user","cursor","cycle","date","day","deallocate","dec","decimal","decfloat","declare","default","define","delete","dense_rank","deref","describe","deterministic","disconnect","distinct","double","drop","dynamic","each","element","else","empty","end","end_frame","end_partition","end-exec","equals","escape","every","except","exec","execute","exists","exp","external","extract","false","fetch","filter","first_value","float","floor","for","foreign","frame_row","free","from","full","function","fusion","get","global","grant","group","grouping","groups","having","hold","hour","identity","in","indicator","initial","inner","inout","insensitive","insert","int","integer","intersect","intersection","interval","into","is","join","json_array","json_arrayagg","json_exists","json_object","json_objectagg","json_query","json_table","json_table_primitive","json_value","lag","language","large","last_value","lateral","lead","leading","left","like","like_regex","listagg","ln","local","localtime","localtimestamp","log","log10","lower","match","match_number","match_recognize","matches","max","member","merge","method","min","minute","mod","modifies","module","month","multiset","national","natural","nchar","nclob","new","no","none","normalize","not","nth_value","ntile","null","nullif","numeric","octet_length","occurrences_regex","of","offset","old","omit","on","one","only","open","or","order","out","outer","over","overlaps","overlay","parameter","partition","pattern","per","percent","percent_rank","percentile_cont","percentile_disc","period","portion","position","position_regex","power","precedes","precision","prepare","primary","procedure","ptf","range","rank","reads","real","recursive","ref","references","referencing","regr_avgx","regr_avgy","regr_count","regr_intercept","regr_r2","regr_slope","regr_sxx","regr_sxy","regr_syy","release","result","return","returns","revoke","right","rollback","rollup","row","row_number","rows","running","savepoint","scope","scroll","search","second","seek","select","sensitive","session_user","set","show","similar","sin","sinh","skip","smallint","some","specific","specifictype","sql","sqlexception","sqlstate","sqlwarning","sqrt","start","static","stddev_pop","stddev_samp","submultiset","subset","substring","substring_regex","succeeds","sum","symmetric","system","system_time","system_user","table","tablesample","tan","tanh","then","time","timestamp","timezone_hour","timezone_minute","to","trailing","translate","translate_regex","translation","treat","trigger","trim","trim_array","true","truncate","uescape","union","unique","unknown","unnest","update ","upper","user","using","value","values","value_of","var_pop","var_samp","varbinary","varchar","varying","versioning","when","whenever","where","width_bucket","window","with","within","without","year","add","asc","collation","desc","final","first","last","view"].filter((e=>!s.includes(e))),u={ +begin:r(/\b/,t(...c),/\s*\(/),keywords:{built_in:c}};return{name:"SQL", +case_insensitive:!0,illegal:/[{}]|<\//,keywords:{$pattern:/\b[\w\.]+/, +keyword:((e,{exceptions:r,when:t}={})=>{const n=t +;return r=r||[],e.map((e=>e.match(/\|\d+$/)||r.includes(e)?e:n(e)?e+"|0":e)) +})(l,{when:e=>e.length<3}),literal:a,type:i, +built_in:["current_catalog","current_date","current_default_transform_group","current_path","current_role","current_schema","current_transform_group_for_type","current_user","session_user","system_time","system_user","current_time","localtime","current_timestamp","localtimestamp"] +},contains:[{begin:t(...o),keywords:{$pattern:/[\w\.]+/,keyword:l.concat(o), +literal:a,type:i}},{className:"type", +begin:t("double precision","large object","with timezone","without timezone") +},u,{className:"variable",begin:/@[a-z0-9]+/},{className:"string",variants:[{ +begin:/'/,end:/'/,contains:[{begin:/''/}]}]},{begin:/"/,end:/"/,contains:[{ +begin:/""/}]},e.C_NUMBER_MODE,e.C_BLOCK_COMMENT_MODE,n,{className:"operator", +begin:/[-+*/=%^~]|&&?|\|\|?|!=?|<(?:=>?|<|>)?|>[>=]?/,relevance:0}]}}})()); +hljs.registerLanguage("swift",(()=>{"use strict";function e(e){ +return e?"string"==typeof e?e:e.source:null}function n(e){return a("(?=",e,")")} +function a(...n){return n.map((n=>e(n))).join("")}function t(...n){ +return"("+n.map((n=>e(n))).join("|")+")"} +const i=e=>a(/\b/,e,/\w$/.test(e)?/\b/:/\B/),s=["Protocol","Type"].map(i),u=["init","self"].map(i),c=["Any","Self"],r=["associatedtype",/as\?/,/as!/,"as","break","case","catch","class","continue","convenience","default","defer","deinit","didSet","do","dynamic","else","enum","extension","fallthrough",/fileprivate\(set\)/,"fileprivate","final","for","func","get","guard","if","import","indirect","infix",/init\?/,/init!/,"inout",/internal\(set\)/,"internal","in","is","lazy","let","mutating","nonmutating",/open\(set\)/,"open","operator","optional","override","postfix","precedencegroup","prefix",/private\(set\)/,"private","protocol",/public\(set\)/,"public","repeat","required","rethrows","return","set","some","static","struct","subscript","super","switch","throws","throw",/try\?/,/try!/,"try","typealias",/unowned\(safe\)/,/unowned\(unsafe\)/,"unowned","var","weak","where","while","willSet"],o=["false","nil","true"],l=["assignment","associativity","higherThan","left","lowerThan","none","right"],m=["#colorLiteral","#column","#dsohandle","#else","#elseif","#endif","#error","#file","#fileID","#fileLiteral","#filePath","#function","#if","#imageLiteral","#keyPath","#line","#selector","#sourceLocation","#warn_unqualified_access","#warning"],d=["abs","all","any","assert","assertionFailure","debugPrint","dump","fatalError","getVaList","isKnownUniquelyReferenced","max","min","numericCast","pointwiseMax","pointwiseMin","precondition","preconditionFailure","print","readLine","repeatElement","sequence","stride","swap","swift_unboxFromSwiftValueWithType","transcode","type","unsafeBitCast","unsafeDowncast","withExtendedLifetime","withUnsafeMutablePointer","withUnsafePointer","withVaList","withoutActuallyEscaping","zip"],p=t(/[/=\-+!*%<>&|^~?]/,/[\u00A1-\u00A7]/,/[\u00A9\u00AB]/,/[\u00AC\u00AE]/,/[\u00B0\u00B1]/,/[\u00B6\u00BB\u00BF\u00D7\u00F7]/,/[\u2016-\u2017]/,/[\u2020-\u2027]/,/[\u2030-\u203E]/,/[\u2041-\u2053]/,/[\u2055-\u205E]/,/[\u2190-\u23FF]/,/[\u2500-\u2775]/,/[\u2794-\u2BFF]/,/[\u2E00-\u2E7F]/,/[\u3001-\u3003]/,/[\u3008-\u3020]/,/[\u3030]/),F=t(p,/[\u0300-\u036F]/,/[\u1DC0-\u1DFF]/,/[\u20D0-\u20FF]/,/[\uFE00-\uFE0F]/,/[\uFE20-\uFE2F]/),b=a(p,F,"*"),h=t(/[a-zA-Z_]/,/[\u00A8\u00AA\u00AD\u00AF\u00B2-\u00B5\u00B7-\u00BA]/,/[\u00BC-\u00BE\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u00FF]/,/[\u0100-\u02FF\u0370-\u167F\u1681-\u180D\u180F-\u1DBF]/,/[\u1E00-\u1FFF]/,/[\u200B-\u200D\u202A-\u202E\u203F-\u2040\u2054\u2060-\u206F]/,/[\u2070-\u20CF\u2100-\u218F\u2460-\u24FF\u2776-\u2793]/,/[\u2C00-\u2DFF\u2E80-\u2FFF]/,/[\u3004-\u3007\u3021-\u302F\u3031-\u303F\u3040-\uD7FF]/,/[\uF900-\uFD3D\uFD40-\uFDCF\uFDF0-\uFE1F\uFE30-\uFE44]/,/[\uFE47-\uFEFE\uFF00-\uFFFD]/),f=t(h,/\d/,/[\u0300-\u036F\u1DC0-\u1DFF\u20D0-\u20FF\uFE20-\uFE2F]/),w=a(h,f,"*"),y=a(/[A-Z]/,f,"*"),g=["autoclosure",a(/convention\(/,t("swift","block","c"),/\)/),"discardableResult","dynamicCallable","dynamicMemberLookup","escaping","frozen","GKInspectable","IBAction","IBDesignable","IBInspectable","IBOutlet","IBSegueAction","inlinable","main","nonobjc","NSApplicationMain","NSCopying","NSManaged",a(/objc\(/,w,/\)/),"objc","objcMembers","propertyWrapper","requires_stored_property_inits","testable","UIApplicationMain","unknown","usableFromInline"],E=["iOS","iOSApplicationExtension","macOS","macOSApplicationExtension","macCatalyst","macCatalystApplicationExtension","watchOS","watchOSApplicationExtension","tvOS","tvOSApplicationExtension","swift"] +;return e=>{const p={match:/\s+/,relevance:0},h=e.COMMENT("/\\*","\\*/",{ +contains:["self"]}),v=[e.C_LINE_COMMENT_MODE,h],N={className:"keyword", +begin:a(/\./,n(t(...s,...u))),end:t(...s,...u),excludeBegin:!0},A={ +match:a(/\./,t(...r)),relevance:0 +},C=r.filter((e=>"string"==typeof e)).concat(["_|0"]),_={variants:[{ +className:"keyword", +match:t(...r.filter((e=>"string"!=typeof e)).concat(c).map(i),...u)}]},D={ +$pattern:t(/\b\w+/,/#\w+/),keyword:C.concat(m),literal:o},B=[N,A,_],k=[{ +match:a(/\./,t(...d)),relevance:0},{className:"built_in", +match:a(/\b/,t(...d),/(?=\()/)}],M={match:/->/,relevance:0},S=[M,{ +className:"operator",relevance:0,variants:[{match:b},{match:`\\.(\\.|${F})+`}] +}],x="([0-9a-fA-F]_*)+",I={className:"number",relevance:0,variants:[{ +match:"\\b(([0-9]_*)+)(\\.(([0-9]_*)+))?([eE][+-]?(([0-9]_*)+))?\\b"},{ +match:`\\b0x(${x})(\\.(${x}))?([pP][+-]?(([0-9]_*)+))?\\b`},{ +match:/\b0o([0-7]_*)+\b/},{match:/\b0b([01]_*)+\b/}]},O=(e="")=>({ +className:"subst",variants:[{match:a(/\\/,e,/[0\\tnr"']/)},{ +match:a(/\\/,e,/u\{[0-9a-fA-F]{1,8}\}/)}]}),T=(e="")=>({className:"subst", +match:a(/\\/,e,/[\t ]*(?:[\r\n]|\r\n)/)}),L=(e="")=>({className:"subst", +label:"interpol",begin:a(/\\/,e,/\(/),end:/\)/}),P=(e="")=>({begin:a(e,/"""/), +end:a(/"""/,e),contains:[O(e),T(e),L(e)]}),$=(e="")=>({begin:a(e,/"/), +end:a(/"/,e),contains:[O(e),L(e)]}),K={className:"string", +variants:[P(),P("#"),P("##"),P("###"),$(),$("#"),$("##"),$("###")]},j={ +match:a(/`/,w,/`/)},z=[j,{className:"variable",match:/\$\d+/},{ +className:"variable",match:`\\$${f}+`}],q=[{match:/(@|#)available/, +className:"keyword",starts:{contains:[{begin:/\(/,end:/\)/,keywords:E, +contains:[...S,I,K]}]}},{className:"keyword",match:a(/@/,t(...g))},{ +className:"meta",match:a(/@/,w)}],U={match:n(/\b[A-Z]/),relevance:0,contains:[{ +className:"type", +match:a(/(AV|CA|CF|CG|CI|CL|CM|CN|CT|MK|MP|MTK|MTL|NS|SCN|SK|UI|WK|XC)/,f,"+") +},{className:"type",match:y,relevance:0},{match:/[?!]+/,relevance:0},{ +match:/\.\.\./,relevance:0},{match:a(/\s+&\s+/,n(y)),relevance:0}]},Z={ +begin://,keywords:D,contains:[...v,...B,...q,M,U]};U.contains.push(Z) +;const G={begin:/\(/,end:/\)/,relevance:0,keywords:D,contains:["self",{ +match:a(w,/\s*:/),keywords:"_|0",relevance:0 +},...v,...B,...k,...S,I,K,...z,...q,U]},H={beginKeywords:"func",contains:[{ +className:"title",match:t(j.match,w,b),endsParent:!0,relevance:0},p]},R={ +begin://,contains:[...v,U]},V={begin:/\(/,end:/\)/,keywords:D, +contains:[{begin:t(n(a(w,/\s*:/)),n(a(w,/\s+/,w,/\s*:/))),end:/:/,relevance:0, +contains:[{className:"keyword",match:/\b_\b/},{className:"params",match:w}] +},...v,...B,...S,I,K,...q,U,G],endsParent:!0,illegal:/["']/},W={ +className:"function",match:n(/\bfunc\b/),contains:[H,R,V,p],illegal:[/\[/,/%/] +},X={className:"function",match:/\b(subscript|init[?!]?)\s*(?=[<(])/,keywords:{ +keyword:"subscript init init? init!",$pattern:/\w+[?!]?/},contains:[R,V,p], +illegal:/\[|%/},J={beginKeywords:"operator",end:e.MATCH_NOTHING_RE,contains:[{ +className:"title",match:b,endsParent:!0,relevance:0}]},Q={ +beginKeywords:"precedencegroup",end:e.MATCH_NOTHING_RE,contains:[{ +className:"title",match:y,relevance:0},{begin:/{/,end:/}/,relevance:0, +endsParent:!0,keywords:[...l,...o],contains:[U]}]};for(const e of K.variants){ +const n=e.contains.find((e=>"interpol"===e.label));n.keywords=D +;const a=[...B,...k,...S,I,K,...z];n.contains=[...a,{begin:/\(/,end:/\)/, +contains:["self",...a]}]}return{name:"Swift",keywords:D,contains:[...v,W,X,{ +className:"class",beginKeywords:"struct protocol class extension enum", +end:"\\{",excludeEnd:!0,keywords:D,contains:[e.inherit(e.TITLE_MODE,{ +begin:/[A-Za-z$_][\u00C0-\u02B80-9A-Za-z$_]*/}),...B]},J,Q,{ +beginKeywords:"import",end:/$/,contains:[...v],relevance:0 +},...B,...k,...S,I,K,...z,...q,U,G]}}})()); +hljs.registerLanguage("typescript",(()=>{"use strict" +;const e="[A-Za-z$_][0-9A-Za-z$_]*",n=["as","in","of","if","for","while","finally","var","new","function","do","return","void","else","break","catch","instanceof","with","throw","case","default","try","switch","continue","typeof","delete","let","yield","const","class","debugger","async","await","static","import","from","export","extends"],a=["true","false","null","undefined","NaN","Infinity"],s=[].concat(["setInterval","setTimeout","clearInterval","clearTimeout","require","exports","eval","isFinite","isNaN","parseFloat","parseInt","decodeURI","decodeURIComponent","encodeURI","encodeURIComponent","escape","unescape"],["arguments","this","super","console","window","document","localStorage","module","global"],["Intl","DataView","Number","Math","Date","String","RegExp","Object","Function","Boolean","Error","Symbol","Set","Map","WeakSet","WeakMap","Proxy","Reflect","JSON","Promise","Float64Array","Int16Array","Int32Array","Int8Array","Uint16Array","Uint32Array","Float32Array","Array","Uint8Array","Uint8ClampedArray","ArrayBuffer"],["EvalError","InternalError","RangeError","ReferenceError","SyntaxError","TypeError","URIError"]) +;function t(e){return r("(?=",e,")")}function r(...e){return e.map((e=>{ +return(n=e)?"string"==typeof n?n:n.source:null;var n})).join("")}return i=>{ +const c={$pattern:e, +keyword:n.concat(["type","namespace","typedef","interface","public","private","protected","implements","declare","abstract","readonly"]), +literal:a, +built_in:s.concat(["any","void","number","boolean","string","object","never","enum"]) +},o={className:"meta",begin:"@[A-Za-z$_][0-9A-Za-z$_]*"},l=(e,n,a)=>{ +const s=e.contains.findIndex((e=>e.label===n)) +;if(-1===s)throw Error("can not find mode to replace");e.contains.splice(s,1,a) +},b=(i=>{const c=e,o={begin:/<[A-Za-z0-9\\._:-]+/, +end:/\/[A-Za-z0-9\\._:-]+>|\/>/,isTrulyOpeningTag:(e,n)=>{ +const a=e[0].length+e.index,s=e.input[a];"<"!==s?">"===s&&(((e,{after:n})=>{ +const a="", +returnBegin:!0,end:"\\s*=>",contains:[{className:"params",variants:[{ +begin:i.UNDERSCORE_IDENT_RE,relevance:0},{className:null,begin:/\(\s*\)/,skip:!0 +},{begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,keywords:l,contains:f}]}] +},{begin:/,/,relevance:0},{className:"",begin:/\s/,end:/\s*/,skip:!0},{ +variants:[{begin:"<>",end:""},{begin:o.begin,"on:begin":o.isTrulyOpeningTag, +end:o.end}],subLanguage:"xml",contains:[{begin:o.begin,end:o.end,skip:!0, +contains:["self"]}]}],relevance:0},{className:"function", +beginKeywords:"function",end:/[{;]/,excludeEnd:!0,keywords:l, +contains:["self",i.inherit(i.TITLE_MODE,{begin:c}),A],illegal:/%/},{ +beginKeywords:"while if switch catch for"},{className:"function", +begin:i.UNDERSCORE_IDENT_RE+"\\([^()]*(\\([^()]*(\\([^()]*\\)[^()]*)*\\)[^()]*)*\\)\\s*\\{", +returnBegin:!0,contains:[A,i.inherit(i.TITLE_MODE,{begin:c})]},{variants:[{ +begin:"\\."+c},{begin:"\\$"+c}],relevance:0},{className:"class", +beginKeywords:"class",end:/[{;=]/,excludeEnd:!0,illegal:/[:"[\]]/,contains:[{ +beginKeywords:"extends"},i.UNDERSCORE_TITLE_MODE]},{begin:/\b(?=constructor)/, +end:/[{;]/,excludeEnd:!0,contains:[i.inherit(i.TITLE_MODE,{begin:c}),"self",A] +},{begin:"(get|set)\\s+(?="+c+"\\()",end:/\{/,keywords:"get set", +contains:[i.inherit(i.TITLE_MODE,{begin:c}),{begin:/\(\)/},A]},{begin:/\$[(.]/}] +}})(i) +;return Object.assign(b.keywords,c),b.exports.PARAMS_CONTAINS.push(o),b.contains=b.contains.concat([o,{ +beginKeywords:"namespace",end:/\{/,excludeEnd:!0},{beginKeywords:"interface", +end:/\{/,excludeEnd:!0,keywords:"interface extends" +}]),l(b,"shebang",i.SHEBANG()),l(b,"use_strict",{className:"meta",relevance:10, +begin:/^\s*['"]use strict['"]/ +}),b.contains.find((e=>"function"===e.className)).relevance=0,Object.assign(b,{ +name:"TypeScript",aliases:["ts"]}),b}})()); +hljs.registerLanguage("vbnet",(()=>{"use strict";function e(e){ +return e?"string"==typeof e?e:e.source:null}function n(...n){ +return n.map((n=>e(n))).join("")}function t(...n){ +return"("+n.map((n=>e(n))).join("|")+")"}return e=>{ +const a=/\d{1,2}\/\d{1,2}\/\d{4}/,i=/\d{4}-\d{1,2}-\d{1,2}/,s=/(\d|1[012])(:\d+){0,2} *(AM|PM)/,r=/\d{1,2}(:\d{1,2}){1,2}/,o={ +className:"literal",variants:[{begin:n(/# */,t(i,a),/ *#/)},{ +begin:n(/# */,r,/ *#/)},{begin:n(/# */,s,/ *#/)},{ +begin:n(/# */,t(i,a),/ +/,t(s,r),/ *#/)}]},l=e.COMMENT(/'''/,/$/,{contains:[{ +className:"doctag",begin:/<\/?/,end:/>/}]}),c=e.COMMENT(null,/$/,{variants:[{ +begin:/'/},{begin:/([\t ]|^)REM(?=\s)/}]});return{name:"Visual Basic .NET", +aliases:["vb"],case_insensitive:!0,classNameAliases:{label:"symbol"},keywords:{ +keyword:"addhandler alias aggregate ansi as async assembly auto binary by byref byval call case catch class compare const continue custom declare default delegate dim distinct do each equals else elseif end enum erase error event exit explicit finally for friend from function get global goto group handles if implements imports in inherits interface into iterator join key let lib loop me mid module mustinherit mustoverride mybase myclass namespace narrowing new next notinheritable notoverridable of off on operator option optional order overloads overridable overrides paramarray partial preserve private property protected public raiseevent readonly redim removehandler resume return select set shadows shared skip static step stop structure strict sub synclock take text then throw to try unicode until using when where while widening with withevents writeonly yield", +built_in:"addressof and andalso await directcast gettype getxmlnamespace is isfalse isnot istrue like mod nameof new not or orelse trycast typeof xor cbool cbyte cchar cdate cdbl cdec cint clng cobj csbyte cshort csng cstr cuint culng cushort", +type:"boolean byte char date decimal double integer long object sbyte short single string uinteger ulong ushort", +literal:"true false nothing"}, +illegal:"//|\\{|\\}|endif|gosub|variant|wend|^\\$ ",contains:[{ +className:"string",begin:/"(""|[^/n])"C\b/},{className:"string",begin:/"/, +end:/"/,illegal:/\n/,contains:[{begin:/""/}]},o,{className:"number",relevance:0, +variants:[{begin:/\b\d[\d_]*((\.[\d_]+(E[+-]?[\d_]+)?)|(E[+-]?[\d_]+))[RFD@!#]?/ +},{begin:/\b\d[\d_]*((U?[SIL])|[%&])?/},{begin:/&H[\dA-F_]+((U?[SIL])|[%&])?/},{ +begin:/&O[0-7_]+((U?[SIL])|[%&])?/},{begin:/&B[01_]+((U?[SIL])|[%&])?/}]},{ +className:"label",begin:/^\w+:/},l,c,{className:"meta", +begin:/[\t ]*#(const|disable|else|elseif|enable|end|externalsource|if|region)\b/, +end:/$/,keywords:{ +"meta-keyword":"const disable else elseif enable end externalsource if region then" +},contains:[c]}]}}})()); +hljs.registerLanguage("yaml",(()=>{"use strict";return e=>{ +var n="true false yes no null",a="[\\w#;/?:@&=+$,.~*'()[\\]]+",s={ +className:"string",relevance:0,variants:[{begin:/'/,end:/'/},{begin:/"/,end:/"/ +},{begin:/\S+/}],contains:[e.BACKSLASH_ESCAPE,{className:"template-variable", +variants:[{begin:/\{\{/,end:/\}\}/},{begin:/%\{/,end:/\}/}]}]},i=e.inherit(s,{ +variants:[{begin:/'/,end:/'/},{begin:/"/,end:/"/},{begin:/[^\s,{}[\]]+/}]}),l={ +end:",",endsWithParent:!0,excludeEnd:!0,keywords:n,relevance:0},t={begin:/\{/, +end:/\}/,contains:[l],illegal:"\\n",relevance:0},g={begin:"\\[",end:"\\]", +contains:[l],illegal:"\\n",relevance:0},b=[{className:"attr",variants:[{ +begin:"\\w[\\w :\\/.-]*:(?=[ \t]|$)"},{begin:'"\\w[\\w :\\/.-]*":(?=[ \t]|$)'},{ +begin:"'\\w[\\w :\\/.-]*':(?=[ \t]|$)"}]},{className:"meta",begin:"^---\\s*$", +relevance:10},{className:"string", +begin:"[\\|>]([1-9]?[+-])?[ ]*\\n( +)[^ ][^\\n]*\\n(\\2[^\\n]+\\n?)*"},{ +begin:"<%[%=-]?",end:"[%-]?%>",subLanguage:"ruby",excludeBegin:!0,excludeEnd:!0, +relevance:0},{className:"type",begin:"!\\w+!"+a},{className:"type", +begin:"!<"+a+">"},{className:"type",begin:"!"+a},{className:"type",begin:"!!"+a +},{className:"meta",begin:"&"+e.UNDERSCORE_IDENT_RE+"$"},{className:"meta", +begin:"\\*"+e.UNDERSCORE_IDENT_RE+"$"},{className:"bullet",begin:"-(?=[ ]|$)", +relevance:0},e.HASH_COMMENT_MODE,{beginKeywords:n,keywords:{literal:n}},{ +className:"number", +begin:"\\b[0-9]{4}(-[0-9][0-9]){0,2}([Tt \\t][0-9][0-9]?(:[0-9][0-9]){2})?(\\.[0-9]*)?([ \\t])*(Z|[-+][0-9][0-9]?(:[0-9][0-9])?)?\\b" +},{className:"number",begin:e.C_NUMBER_RE+"\\b",relevance:0},t,g,s],r=[...b] +;return r.pop(),r.push(i),l.contains=r,{name:"YAML",case_insensitive:!0, +aliases:["yml","YAML"],contains:b}}})()); \ No newline at end of file diff --git a/vulnerablecode/static/js/highlight.js-10.6.0.ABOUT b/vulnerablecode/static/js/highlight.js-10.6.0.ABOUT new file mode 100644 index 000000000..a36c4861b --- /dev/null +++ b/vulnerablecode/static/js/highlight.js-10.6.0.ABOUT @@ -0,0 +1,12 @@ +about_resource: highlight-10.6.0.min.js +name: highlight.js +version: 10.6.0 +download_url: https://github.com/highlightjs/highlight.js/archive/10.6.0.zip +description: Syntax highlighting with language autodetection. +homepage_url: https://highlightjs.org/ +license_expression: bsd-new +package_url: pkg:npm/highlight.js@10.6.0 +licenses: + - key: bsd-new + name: BSD-3-Clause + file: bsd-new.LICENSE From ba3879fac64dadfdfcb07aac8c604d3130cf1e7f Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Mon, 5 May 2025 23:49:59 +0530 Subject: [PATCH 107/545] Add pipeline run list view Signed-off-by: Keshav Priyadarshi --- .../templates/pipeline_run_list.html | 126 ++++++++++++++++++ vulnerabilities/views.py | 25 ++++ 2 files changed, 151 insertions(+) create mode 100644 vulnerabilities/templates/pipeline_run_list.html diff --git a/vulnerabilities/templates/pipeline_run_list.html b/vulnerabilities/templates/pipeline_run_list.html new file mode 100644 index 000000000..4a7d59dda --- /dev/null +++ b/vulnerabilities/templates/pipeline_run_list.html @@ -0,0 +1,126 @@ +{% extends "base.html" %} + +{% block title %} +{{ pipeline_name }} Runs +{% endblock %} + +{% block extrahead %} + +{% endblock %} + + +{% block content %} +
    +
    +
    + +
    +
    +

    {{ pipeline_name }} Runs

    +
    +
    + + {% if is_paginated %} + + {% endif %} +
    +
    +
    +{% endblock %} \ No newline at end of file diff --git a/vulnerabilities/views.py b/vulnerabilities/views.py index d3b842e95..06fa4e93e 100644 --- a/vulnerabilities/views.py +++ b/vulnerabilities/views.py @@ -365,3 +365,28 @@ def get_queryset(self): pipeline_id__icontains=form.cleaned_data.get("search") ) return PipelineSchedule.objects.all() + + +class PipelineRunListView(ListView): + model = PipelineRun + context_object_name = "run_list" + template_name = "pipeline_run_list.html" + paginate_by = 30 + slug_url_kwarg = "pipeline_id" + slug_field = "pipeline_id" + + def get_queryset(self): + pipeline = get_object_or_404( + PipelineSchedule, + pipeline_id=self.kwargs["pipeline_id"], + ) + return pipeline.all_runs + + def get_context_data(self, **kwargs): + context = super().get_context_data(**kwargs) + pipeline = get_object_or_404( + PipelineSchedule, + pipeline_id=self.kwargs["pipeline_id"], + ) + context["pipeline_name"] = pipeline.pipeline_class.__name__ + return context From e8b1cda8467dc573dca9d2dd2a5a5b90118b76b2 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Mon, 5 May 2025 23:50:57 +0530 Subject: [PATCH 108/545] Add detail view for pipeline run Signed-off-by: Keshav Priyadarshi --- .../0092_pipelineschedule_pipelinerun.py | 104 ++++++++++ .../templates/pipeline_run_details.html | 178 ++++++++++++++++++ vulnerabilities/templatetags/show_cvss.py | 10 + vulnerabilities/templatetags/url_filters.py | 11 +- vulnerabilities/templatetags/utils.py | 20 ++ vulnerabilities/views.py | 27 +++ vulnerablecode/urls.py | 20 ++ 7 files changed, 369 insertions(+), 1 deletion(-) create mode 100644 vulnerabilities/migrations/0092_pipelineschedule_pipelinerun.py create mode 100644 vulnerabilities/templates/pipeline_run_details.html create mode 100644 vulnerabilities/templatetags/utils.py diff --git a/vulnerabilities/migrations/0092_pipelineschedule_pipelinerun.py b/vulnerabilities/migrations/0092_pipelineschedule_pipelinerun.py new file mode 100644 index 000000000..5e254dc88 --- /dev/null +++ b/vulnerabilities/migrations/0092_pipelineschedule_pipelinerun.py @@ -0,0 +1,104 @@ +# Generated by Django 4.2.20 on 2025-05-01 12:53 + +import django.core.validators +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ("vulnerabilities", "0091_alter_advisory_unique_together_and_more"), + ] + + operations = [ + migrations.CreateModel( + name="PipelineSchedule", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ( + "pipeline_id", + models.CharField( + help_text="Identify a registered Pipeline class.", + max_length=600, + unique=True, + ), + ), + ( + "is_active", + models.BooleanField( + db_index=True, + default=True, + help_text="When set to True (Yes), this Pipeline is active. When set to False (No), this Pipeline is inactive and not run.", + null=True, + ), + ), + ( + "run_interval", + models.PositiveSmallIntegerField( + default=1, + help_text="Number of days to wait between run of this pipeline.", + validators=[ + django.core.validators.MinValueValidator( + 1, message="Interval must be at least 1 day." + ), + django.core.validators.MaxValueValidator( + 365, message="Interval must be at most 365 days." + ), + ], + ), + ), + ( + "schedule_work_id", + models.CharField( + blank=True, + db_index=True, + help_text="Identifier used to manage the periodic run job.", + max_length=255, + null=True, + unique=True, + ), + ), + ("created_date", models.DateTimeField(auto_now_add=True, db_index=True)), + ], + options={ + "ordering": ["-created_date"], + }, + ), + migrations.CreateModel( + name="PipelineRun", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ("run_id", models.CharField(blank=True, editable=False, null=True)), + ("run_start_date", models.DateTimeField(blank=True, editable=False, null=True)), + ("run_end_date", models.DateTimeField(blank=True, editable=False, null=True)), + ("run_exitcode", models.IntegerField(blank=True, editable=False, null=True)), + ("run_output", models.TextField(blank=True, editable=False)), + ("created_date", models.DateTimeField(auto_now_add=True, db_index=True)), + ("vulnerablecode_version", models.CharField(blank=True, max_length=100, null=True)), + ("vulnerablecode_commit", models.CharField(blank=True, max_length=300, null=True)), + ("log", models.TextField(blank=True, editable=False)), + ( + "pipeline", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="pipelineruns", + to="vulnerabilities.pipelineschedule", + ), + ), + ], + options={ + "ordering": ["-created_date"], + }, + ), + ] diff --git a/vulnerabilities/templates/pipeline_run_details.html b/vulnerabilities/templates/pipeline_run_details.html new file mode 100644 index 000000000..ea7e9a3db --- /dev/null +++ b/vulnerabilities/templates/pipeline_run_details.html @@ -0,0 +1,178 @@ +{% extends "base.html" %} +{% load static %} +{% load utils %} + +{% block title %}Run Log{% endblock %} + +{% block extrahead %} + + + + +{% endblock %} + +{% block content %} +
    +
    +

    {{ pipeline_name }} Run Log

    +
    + +
    +
    +
    +

    Pipeline ID

    +

    {{ run.pipeline.pipeline_id }}

    +
    +
    +

    Status

    +

    + {% if run.status == "running" %} + Running + {% elif run.status == "success" %} + Success + {% elif run.status == "failure" %} + Failure + {% elif run.status == "scheduled" %} + Scheduled + {% else %} + Unknown + {% endif %} +

    +
    +
    +

    Execution Time

    +

    {{ run.execution_time }}

    +
    +
    +

    Exit Code

    +

    {{ run.run_exitcode }}

    +
    +
    +

    Start

    +

    {{ run.run_start_date }}

    +
    +
    +

    End

    +

    {{ run.run_end_date }}

    +
    +
    +

    Created

    +

    {{ run.created_date }}

    +
    +
    +

    Version

    +

    {{ run.vulnerablecode_version }}

    +
    +
    +

    Commit

    +

    + {% if run.vulnerablecode_commit %} + + {{ run.vulnerablecode_commit }} + + + {% endif %} + +

    +
    +
    +

    Job ID

    +

    {{ run.run_id }}

    +
    +
    +
    + + + {% if run.run_output|strip %} +
    +

    Run Error

    +
    + +
    {{ run.run_output }}
    +
    +
    + {% endif %} + + {% if run.log|strip %} +
    +

    Log Output

    +
    + +
    {{ run.log }}
    +
    +
    + {% endif %} + + + ← Back to All + Runs +
    +
    +{% endblock %} + + +{% block scripts %} + + + + + +{% endblock %} \ No newline at end of file diff --git a/vulnerabilities/templatetags/show_cvss.py b/vulnerabilities/templatetags/show_cvss.py index 52533d0f2..8d14ac9d7 100644 --- a/vulnerabilities/templatetags/show_cvss.py +++ b/vulnerabilities/templatetags/show_cvss.py @@ -1,3 +1,13 @@ +# +# Copyright (c) nexB Inc. and others. All rights reserved. +# VulnerableCode is a trademark of nexB Inc. +# SPDX-License-Identifier: Apache-2.0 +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/aboutcode-org/vulnerablecode for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. +# + + from django import template from django.utils.safestring import mark_safe diff --git a/vulnerabilities/templatetags/url_filters.py b/vulnerabilities/templatetags/url_filters.py index a6dda1dd8..ff38210af 100644 --- a/vulnerabilities/templatetags/url_filters.py +++ b/vulnerabilities/templatetags/url_filters.py @@ -1,6 +1,15 @@ +# +# Copyright (c) nexB Inc. and others. All rights reserved. +# VulnerableCode is a trademark of nexB Inc. +# SPDX-License-Identifier: Apache-2.0 +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/aboutcode-org/vulnerablecode for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. +# + + from urllib.parse import quote -import packageurl from django import template register = template.Library() diff --git a/vulnerabilities/templatetags/utils.py b/vulnerabilities/templatetags/utils.py new file mode 100644 index 000000000..4423cc977 --- /dev/null +++ b/vulnerabilities/templatetags/utils.py @@ -0,0 +1,20 @@ +# +# Copyright (c) nexB Inc. and others. All rights reserved. +# VulnerableCode is a trademark of nexB Inc. +# SPDX-License-Identifier: Apache-2.0 +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/aboutcode-org/vulnerablecode for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. +# + + +from django import template + +register = template.Library() + + +@register.filter +def strip(value): + if isinstance(value, str): + return value.strip() + return value diff --git a/vulnerabilities/views.py b/vulnerabilities/views.py index 06fa4e93e..942f26385 100644 --- a/vulnerabilities/views.py +++ b/vulnerabilities/views.py @@ -390,3 +390,30 @@ def get_context_data(self, **kwargs): ) context["pipeline_name"] = pipeline.pipeline_class.__name__ return context + + +class PipelineRunDetailView(DetailView): + model = PipelineRun + template_name = "pipeline_run_details.html" + context_object_name = "run" + + def get_object(self): + pipeline_id = self.kwargs["pipeline_id"] + run_id = self.kwargs["run_id"] + return get_object_or_404( + PipelineRun, + pipeline__pipeline_id=pipeline_id, + run_id=run_id, + ) + + def get_context_data(self, **kwargs): + context = super().get_context_data(**kwargs) + pipeline_id = self.kwargs["pipeline_id"] + run_id = self.kwargs["run_id"] + run = get_object_or_404( + PipelineRun, + pipeline__pipeline_id=pipeline_id, + run_id=run_id, + ) + context["pipeline_name"] = run.pipeline_class.__name__ + return context diff --git a/vulnerablecode/urls.py b/vulnerablecode/urls.py index c6dd3da44..c64192054 100644 --- a/vulnerablecode/urls.py +++ b/vulnerablecode/urls.py @@ -22,14 +22,19 @@ from vulnerabilities.api import VulnerabilityViewSet from vulnerabilities.api_v2 import CodeFixViewSet from vulnerabilities.api_v2 import PackageV2ViewSet +from vulnerabilities.api_v2 import PipelineScheduleV2ViewSet from vulnerabilities.api_v2 import VulnerabilityV2ViewSet from vulnerabilities.views import ApiUserCreateView from vulnerabilities.views import HomePage from vulnerabilities.views import PackageDetails from vulnerabilities.views import PackageSearch +from vulnerabilities.views import PipelineRunDetailView +from vulnerabilities.views import PipelineRunListView +from vulnerabilities.views import PipelineScheduleListView from vulnerabilities.views import VulnerabilityDetails from vulnerabilities.views import VulnerabilityPackagesDetails from vulnerabilities.views import VulnerabilitySearch +from vulnerablecode.settings import DEBUG from vulnerablecode.settings import DEBUG_TOOLBAR @@ -51,6 +56,7 @@ def __init__(self, *args, **kwargs): api_v2_router.register("packages", PackageV2ViewSet, basename="package-v2") api_v2_router.register("vulnerabilities", VulnerabilityV2ViewSet, basename="vulnerability-v2") api_v2_router.register("codefixes", CodeFixViewSet, basename="codefix") +api_v2_router.register("schedule", PipelineScheduleV2ViewSet, basename="schedule") urlpatterns = [ @@ -64,6 +70,17 @@ def __init__(self, *args, **kwargs): HomePage.as_view(), name="home", ), + path("pipelines/schedule", PipelineScheduleListView.as_view(), name="schedule"), + path( + "pipelines//runs/", + PipelineRunListView.as_view(), + name="runs-list", + ), + path( + "pipelines//run//", + PipelineRunDetailView.as_view(), + name="run-details", + ), path( "packages/search", PackageSearch.as_view(), @@ -120,6 +137,9 @@ def __init__(self, *args, **kwargs): ), ] +if DEBUG: + urlpatterns += [path("django-rq/", include("django_rq.urls"))] + if DEBUG_TOOLBAR: urlpatterns += [ path( From d616ce6f832525943c6f6a1518785415e06f957e Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Tue, 6 May 2025 17:24:51 +0530 Subject: [PATCH 109/545] Show default values for empty fields Signed-off-by: Keshav Priyadarshi --- vulnerabilities/templates/pipeline_run_details.html | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/vulnerabilities/templates/pipeline_run_details.html b/vulnerabilities/templates/pipeline_run_details.html index ea7e9a3db..8ce65d004 100644 --- a/vulnerabilities/templates/pipeline_run_details.html +++ b/vulnerabilities/templates/pipeline_run_details.html @@ -73,11 +73,11 @@

    {{ pipeline_name }} Run Log

    Execution Time

    -

    {{ run.execution_time }}

    +

    {{ run.execution_time|default_if_none:"N/A" }}

    Exit Code

    -

    {{ run.run_exitcode }}

    +

    {{ run.run_exitcode|default_if_none:"N/A" }}

    Start

    @@ -85,7 +85,7 @@

    {{ pipeline_name }} Run Log

    End

    -

    {{ run.run_end_date }}

    +

    {{ run.run_end_date|default_if_none:"N/A" }}

    Created

    From 0ba0549482c4eb92202890d614ca5c7d5484c3ee Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Wed, 7 May 2025 14:03:15 +0530 Subject: [PATCH 110/545] Enable datetime localization for client - Detect and store client timezone in cookies - Add UserTimezoneMiddleware to activate localization based on cookies Signed-off-by: Keshav Priyadarshi --- vulnerabilities/middleware/timezone.py | 30 ++++++++++++++++++++++++++ vulnerabilities/templates/base.html | 8 ++++++- vulnerablecode/settings.py | 1 + 3 files changed, 38 insertions(+), 1 deletion(-) create mode 100644 vulnerabilities/middleware/timezone.py diff --git a/vulnerabilities/middleware/timezone.py b/vulnerabilities/middleware/timezone.py new file mode 100644 index 000000000..93c7c5eaa --- /dev/null +++ b/vulnerabilities/middleware/timezone.py @@ -0,0 +1,30 @@ +# +# Copyright (c) nexB Inc. and others. All rights reserved. +# VulnerableCode is a trademark of nexB Inc. +# SPDX-License-Identifier: Apache-2.0 +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/aboutcode-org/vulnerablecode for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. +# + +import zoneinfo + +from django.utils import timezone + + +class UserTimezoneMiddleware: + def __init__(self, get_response): + self.get_response = get_response + + def __call__(self, request): + try: + # Activate local timezone for user using cookies + tzname = request.COOKIES.get("user_timezone") + if tzname: + timezone.activate(zoneinfo.ZoneInfo(tzname)) + else: + timezone.deactivate() + except Exception as e: + timezone.deactivate() + + return self.get_response(request) diff --git a/vulnerabilities/templates/base.html b/vulnerabilities/templates/base.html index f86e68715..337e9318d 100644 --- a/vulnerabilities/templates/base.html +++ b/vulnerabilities/templates/base.html @@ -22,7 +22,13 @@ {% block content %}{% endblock %} {% include "footer.html" %}
    - + {% block scripts %} {% endblock %} diff --git a/vulnerablecode/settings.py b/vulnerablecode/settings.py index 451e7e51c..7179bba85 100644 --- a/vulnerablecode/settings.py +++ b/vulnerablecode/settings.py @@ -102,6 +102,7 @@ "django.contrib.messages.middleware.MessageMiddleware", "django.middleware.clickjacking.XFrameOptionsMiddleware", "vulnerabilities.middleware.ban_user_agent.BanUserAgent", + "vulnerabilities.middleware.timezone.UserTimezoneMiddleware", ) ROOT_URLCONF = "vulnerablecode.urls" From 34dcd72e4d4bb93055a41632f13506bd54237b79 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Fri, 9 May 2025 23:03:17 +0530 Subject: [PATCH 111/545] Allow temporary copy of .git to extract commit hash Signed-off-by: Keshav Priyadarshi --- .dockerignore | 1 - 1 file changed, 1 deletion(-) diff --git a/.dockerignore b/.dockerignore index 2f5f4d10b..41a5983c5 100644 --- a/.dockerignore +++ b/.dockerignore @@ -6,7 +6,6 @@ docker-compose.yml # Ignore Git directory and files and github directory. -**/.git **/.gitignore **/.gitattributes **/.gitmodules From 1936d9fc80dd384acc49957acc9d5a31552d3cb1 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Mon, 12 May 2025 20:46:54 +0530 Subject: [PATCH 112/545] Populate tag and commit on pulling git archive Signed-off-by: Keshav Priyadarshi --- .VERSION | 2 +- .gitattributes | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 .gitattributes diff --git a/.VERSION b/.VERSION index 008a20e06..0b48d2267 100644 --- a/.VERSION +++ b/.VERSION @@ -1,3 +1,3 @@ refs=$Format:%D$ -commit=$Format:%H$ +commit=$Format:%h$ abbrev_commit=$Format:%H$ diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 000000000..8b50a1453 --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +.VERSION export-subst \ No newline at end of file From 354c605ac6c50d377fc58ef2ae17c2109a22a62c Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Mon, 12 May 2025 22:27:45 +0530 Subject: [PATCH 113/545] Extract commit hash from git archive and local docker deployment Signed-off-by: Keshav Priyadarshi --- Dockerfile | 7 +++++++ vulnerablecode/__init__.py | 29 +++++++++++++++++++++++++++-- 2 files changed, 34 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 0a193d7c0..1bd35b926 100644 --- a/Dockerfile +++ b/Dockerfile @@ -28,3 +28,10 @@ COPY setup.cfg setup.py requirements.txt pyproject.toml /app/ RUN pip install . -c requirements.txt COPY . /app + +# Store commit hash for docker deployment from local checkout. +RUN if [ -d ".git" ]; then \ + GIT_COMMIT=$(git rev-parse --short HEAD) && \ + echo "VULNERABLECODE_GIT_COMMIT=\"$GIT_COMMIT\"" >> /app/vulnerablecode/settings.py; \ + rm -rf .git; \ +fi diff --git a/vulnerablecode/__init__.py b/vulnerablecode/__init__.py index 1b552cdda..6b4e51bfd 100644 --- a/vulnerablecode/__init__.py +++ b/vulnerablecode/__init__.py @@ -30,11 +30,36 @@ def get_git_describe_from_local_checkout(): return git.Repo(".").git.describe(tags=True, always=True) +def get_git_commit_from_version_file(): + """ + Return the git commit from the ".VERSION" file. + This will only provide a result when the codebase is an extracted git archive. + """ + version_file = ROOT_DIR / ".VERSION" + if not version_file.exists(): + return + + try: + lines = version_file.read_text().splitlines() + commit_line = lines[1] + if not commit_line.startswith("commit=") or commit_line.startswith("commit=$Format"): + return + return commit_line.replace("commit=", "") + except (UnicodeDecodeError): + return + + def get_short_commit(): """ - Return the short commit hash from a Git describe string while removing - any leading "g" character if present. + Return the short commit hash from the .VERSION file or from `git describe` + in a local checkout or docker deployment using a local checkout. """ + from vulnerablecode import settings + + if short_commit := get_git_commit_from_version_file(): + return short_commit + if hasattr(settings, "VULNERABLECODE_GIT_COMMIT"): + return settings.VULNERABLECODE_GIT_COMMIT if git_describe := get_git_describe_from_local_checkout(): short_commit = git_describe.split("-")[-1] return short_commit.lstrip("g") From ae1a260a72656f5a22922e0eed8d08d667a4c389 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Mon, 12 May 2025 22:36:41 +0530 Subject: [PATCH 114/545] Use uuid to track pipeline job id Signed-off-by: Keshav Priyadarshi --- .../0092_pipelineschedule_pipelinerun.py | 14 ++++++---- vulnerabilities/models.py | 28 +++++++++++++------ vulnerablecode/urls.py | 2 +- 3 files changed, 30 insertions(+), 14 deletions(-) diff --git a/vulnerabilities/migrations/0092_pipelineschedule_pipelinerun.py b/vulnerabilities/migrations/0092_pipelineschedule_pipelinerun.py index 5e254dc88..e029596d8 100644 --- a/vulnerabilities/migrations/0092_pipelineschedule_pipelinerun.py +++ b/vulnerabilities/migrations/0092_pipelineschedule_pipelinerun.py @@ -1,8 +1,9 @@ -# Generated by Django 4.2.20 on 2025-05-01 12:53 +# Generated by Django 4.2.20 on 2025-05-12 17:04 import django.core.validators from django.db import migrations, models import django.db.models.deletion +import uuid class Migration(migrations.Migration): @@ -74,12 +75,15 @@ class Migration(migrations.Migration): name="PipelineRun", fields=[ ( - "id", - models.AutoField( - auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + "run_id", + models.UUIDField( + default=uuid.uuid4, + editable=False, + primary_key=True, + serialize=False, + unique=True, ), ), - ("run_id", models.CharField(blank=True, editable=False, null=True)), ("run_start_date", models.DateTimeField(blank=True, editable=False, null=True)), ("run_end_date", models.DateTimeField(blank=True, editable=False, null=True)), ("run_exitcode", models.IntegerField(blank=True, editable=False, null=True)), diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index 430f85cba..5ba20d0e2 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -10,6 +10,7 @@ import csv import datetime import logging +import uuid import xml.etree.ElementTree as ET from contextlib import suppress from functools import cached_property @@ -1828,11 +1829,14 @@ class PipelineRun(models.Model): related_name="pipelineruns", on_delete=models.CASCADE, ) - run_id = models.CharField( - blank=True, - null=True, + + run_id = models.UUIDField( + primary_key=True, + default=uuid.uuid4, editable=False, + unique=True, ) + run_start_date = models.DateTimeField( blank=True, null=True, @@ -1880,6 +1884,7 @@ class Status(models.TextChoices): SUCCESS = "success" FAILURE = "failure" STOPPED = "stopped" + QUEUED = "queued" STALE = "stale" @property @@ -1902,6 +1907,9 @@ def status(self): elif self.run_start_date: return status.RUNNING + elif self.created_date: + return status.QUEUED + return status.UNKNOWN @property @@ -1982,8 +1990,10 @@ def set_run_stopped(self): self.set_run_ended(exitcode=99) def stop_run(self): - self.append_to_log("Stop run requested") + if self.run_succeeded: + return + self.append_to_log("Stop run requested") if not VULNERABLECODE_ASYNC: self.set_run_stopped() return @@ -2084,7 +2094,9 @@ def __str__(self): return f"{self.pipeline_id}" def save(self, *args, **kwargs): - if self.pk and (existing := PipelineSchedule.objects.get(pk=self.pk)): + if not self.pk: + self.schedule_work_id = self.create_new_job(execute_now=True) + elif self.pk and (existing := PipelineSchedule.objects.get(pk=self.pk)): if existing.is_active != self.is_active or existing.run_interval != self.run_interval: self.schedule_work_id = self.create_new_job() self.full_clean() @@ -2116,7 +2128,7 @@ def earliest_run(self): @property def latest_run_date(self): - return self.latest_run.created_date if self.latest_run else None + return self.latest_run.run_start_date if self.latest_run else None @property def next_run_date(self): @@ -2139,7 +2151,7 @@ def status(self): if self.latest_run: return self.latest_run.status - def create_new_job(self): + def create_new_job(self, execute_now=False): """ Create a new scheduled job. If a previous scheduled job exists remove the existing job from the scheduler. @@ -2151,4 +2163,4 @@ def create_new_job(self): if self.schedule_work_id: schedules.clear_job(self.schedule_work_id) - return schedules.schedule_execution(self) if self.is_active else None + return schedules.schedule_execution(self, execute_now) if self.is_active else None diff --git a/vulnerablecode/urls.py b/vulnerablecode/urls.py index c64192054..7e202a720 100644 --- a/vulnerablecode/urls.py +++ b/vulnerablecode/urls.py @@ -77,7 +77,7 @@ def __init__(self, *args, **kwargs): name="runs-list", ), path( - "pipelines//run//", + "pipelines//run//", PipelineRunDetailView.as_view(), name="run-details", ), From a96f77520e4bf889580596f1fb866434c077f31c Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Mon, 12 May 2025 22:45:28 +0530 Subject: [PATCH 115/545] Use scheduler to explicitly queue pipeline execution jobs Signed-off-by: Keshav Priyadarshi --- .../management/commands/run_scheduler.py | 4 ++- vulnerabilities/schedules.py | 17 +++++----- vulnerabilities/tasks.py | 31 +++++++++++++------ 3 files changed, 35 insertions(+), 17 deletions(-) diff --git a/vulnerabilities/management/commands/run_scheduler.py b/vulnerabilities/management/commands/run_scheduler.py index 108264da8..f7583ed41 100644 --- a/vulnerabilities/management/commands/run_scheduler.py +++ b/vulnerabilities/management/commands/run_scheduler.py @@ -18,7 +18,9 @@ def init_pipeline_scheduled(): """Initialize schedule jobs for active PipelineSchedule.""" - active_pipeline_qs = models.PipelineSchedule.objects.filter(is_active=True) + active_pipeline_qs = models.PipelineSchedule.objects.filter(is_active=True).order_by( + "created_date" + ) for pipeline_schedule in active_pipeline_qs: if scheduled_job_exists(pipeline_schedule.schedule_work_id): continue diff --git a/vulnerabilities/schedules.py b/vulnerabilities/schedules.py index edd56540a..d2a62e7f5 100644 --- a/vulnerabilities/schedules.py +++ b/vulnerabilities/schedules.py @@ -7,34 +7,37 @@ # See https://aboutcode.org for more information about nexB OSS projects. # +import datetime import logging import django_rq from redis.exceptions import ConnectionError -from vulnerabilities.tasks import execute_pipeline +from vulnerabilities.tasks import enqueue_pipeline from vulnerablecode.settings import VULNERABLECODE_PIPELINE_TIMEOUT log = logging.getLogger(__name__) scheduler = django_rq.get_scheduler() -def schedule_execution(pipeline_schedule): +def schedule_execution(pipeline_schedule, execute_now=False): """ Takes a `PackageSchedule` object as input and schedule a recurring job using `rq_scheduler` to execute the pipeline. """ - first_execution = pipeline_schedule.next_run_date + first_execution = datetime.datetime.now(tz=datetime.timezone.utc) + if not execute_now: + first_execution = pipeline_schedule.next_run_date + interval_in_seconds = pipeline_schedule.run_interval * 24 * 60 * 60 job = scheduler.schedule( scheduled_time=first_execution, - func=execute_pipeline, + func=enqueue_pipeline, args=[pipeline_schedule.pipeline_id], interval=interval_in_seconds, - result_ttl=interval_in_seconds, # Remove job results after next run timeout=VULNERABLECODE_PIPELINE_TIMEOUT, - repeat=None, # None for repeat forever + repeat=None, ) return job._id @@ -89,6 +92,6 @@ def update_pipeline_schedule(): from vulnerabilities.models import PipelineSchedule pipeline_ids = [*IMPORTERS_REGISTRY.keys(), *IMPROVERS_REGISTRY.keys()] - # pipeline_ids = ["nvd_importer", "vulnerabilities.importers.curl.CurlImporter"] + PipelineSchedule.objects.exclude(pipeline_id__in=pipeline_ids).delete() [PipelineSchedule.objects.get_or_create(pipeline_id=id) for id in pipeline_ids] diff --git a/vulnerabilities/tasks.py b/vulnerabilities/tasks.py index 623f00a5f..bc0e9479c 100644 --- a/vulnerabilities/tasks.py +++ b/vulnerabilities/tasks.py @@ -12,28 +12,26 @@ from io import StringIO from traceback import format_exc as traceback_format_exc -from rq import get_current_job +import django_rq from vulnerabilities import models from vulnerabilities.importer import Importer from vulnerabilities.improver import Improver +from vulnerablecode.settings import VULNERABLECODE_PIPELINE_TIMEOUT logger = logging.getLogger(__name__) +queue = django_rq.get_queue("default") -def execute_pipeline(pipeline_id): + +def execute_pipeline(pipeline_id, run_id): from vulnerabilities.pipelines import VulnerableCodePipeline logger.info(f"Enter `execute_pipeline` {pipeline_id}") - pipeline_schedule = models.PipelineSchedule.objects.get(pipeline_id=pipeline_id) - job = get_current_job() - - run = models.PipelineRun.objects.create( - pipeline=pipeline_schedule, - run_id=job.id, + run = models.PipelineRun.objects.get( + run_id=run_id, ) - run.set_vulnerablecode_version_and_commit() run.set_run_started() @@ -104,3 +102,18 @@ def set_run_failure(job, connection, type, value, traceback): return run.set_run_ended(exitcode=1, output=f"value={value} trace={traceback}") + + +def enqueue_pipeline(pipeline_id): + pipeline_schedule = models.PipelineSchedule.objects.get(pipeline_id=pipeline_id) + run = models.PipelineRun.objects.create( + pipeline=pipeline_schedule, + ) + job = queue.enqueue( + execute_pipeline, + pipeline_id, + run.run_id, + job_id=str(run.run_id), + on_failure=set_run_failure, + job_timeout=VULNERABLECODE_PIPELINE_TIMEOUT, + ) From 279bd061e02feb5eac4afd65251a3ab68905480b Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Tue, 13 May 2025 14:57:13 +0530 Subject: [PATCH 116/545] Handle the stats for queued pipeline Signed-off-by: Keshav Priyadarshi --- .../templates/pipeline_run_details.html | 14 +++--- .../templates/pipeline_run_list.html | 46 ++++++++++-------- .../templates/pipeline_schedule_list.html | 47 ++++++++++--------- 3 files changed, 58 insertions(+), 49 deletions(-) diff --git a/vulnerabilities/templates/pipeline_run_details.html b/vulnerabilities/templates/pipeline_run_details.html index 8ce65d004..014afa1cc 100644 --- a/vulnerabilities/templates/pipeline_run_details.html +++ b/vulnerabilities/templates/pipeline_run_details.html @@ -59,15 +59,15 @@

    {{ pipeline_name }} Run Log

    Status

    {% if run.status == "running" %} - Running + Running {% elif run.status == "success" %} - Success + Success {% elif run.status == "failure" %} - Failure - {% elif run.status == "scheduled" %} - Scheduled + Failure + {% elif run.status == "queued" %} + Queued {% else %} - Unknown + Unknown {% endif %}

    @@ -81,7 +81,7 @@

    {{ pipeline_name }} Run Log

    Start

    -

    {{ run.run_start_date }}

    +

    {{ run.run_start_date|default:"N/A" }}

    End

    diff --git a/vulnerabilities/templates/pipeline_run_list.html b/vulnerabilities/templates/pipeline_run_list.html index 4a7d59dda..80d8af0ce 100644 --- a/vulnerabilities/templates/pipeline_run_list.html +++ b/vulnerabilities/templates/pipeline_run_list.html @@ -6,7 +6,6 @@ {% block extrahead %} - - {% endblock %} {% block bodyclass %}{{ block.super }} login{% endblock %} From 30919e312117cab71269d53bef5b5a34dd3d7d62 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Mon, 23 Jun 2025 17:58:26 +0530 Subject: [PATCH 188/545] Replace reCAPTCHA with Altcha on API signup page Signed-off-by: Keshav Priyadarshi --- requirements.txt | 2 +- setup.cfg | 2 - vulnerabilities/forms.py | 14 ++---- vulnerabilities/templates/admin_login.html | 6 +-- .../templates/api_user_creation_form.html | 49 +++++++++++++------ vulnerablecode/settings.py | 11 ----- 6 files changed, 40 insertions(+), 44 deletions(-) diff --git a/requirements.txt b/requirements.txt index 357b6fe30..44bd810d2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -28,10 +28,10 @@ decorator==5.1.1 defusedxml==0.7.1 distro==1.7.0 Django==4.2.22 +django-altcha==0.2.0 django-crispy-forms==2.3 django-environ==0.11.2 django-filter==24.3 -django-recaptcha==4.0.0 django-widget-tweaks==1.5.0 djangorestframework==3.15.2 doc8==0.11.1 diff --git a/setup.cfg b/setup.cfg index 5b2c5927e..f6b529e8c 100644 --- a/setup.cfg +++ b/setup.cfg @@ -102,8 +102,6 @@ install_requires = python-dotenv texttable - django-recaptcha>=4.0.0 - [options.extras_require] dev = diff --git a/vulnerabilities/forms.py b/vulnerabilities/forms.py index ebce9e953..d980a5d9d 100644 --- a/vulnerabilities/forms.py +++ b/vulnerabilities/forms.py @@ -11,8 +11,6 @@ from django.contrib.admin.forms import AdminAuthenticationForm from django.core.validators import validate_email from django_altcha import AltchaField -from django_recaptcha.fields import ReCaptchaField -from django_recaptcha.widgets import ReCaptchaV2Checkbox from vulnerabilities.models import ApiUser @@ -46,13 +44,9 @@ class AdvisorySearchForm(forms.Form): class ApiUserCreationForm(forms.ModelForm): - """ - Support a simplified creation for API-only users directly from the UI. - """ + """Support a simplified creation for API-only users directly from the UI.""" - captcha = ReCaptchaField( - error_messages={"required": ("Captcha is required")}, widget=ReCaptchaV2Checkbox - ) + captcha = AltchaField(floating=True) class Meta: model = ApiUser @@ -111,6 +105,4 @@ class PipelineSchedulePackageForm(forms.Form): class AdminLoginForm(AdminAuthenticationForm): - captcha = AltchaField( - floating=True, - ) + captcha = AltchaField(floating=True) diff --git a/vulnerabilities/templates/admin_login.html b/vulnerabilities/templates/admin_login.html index 58f137793..a656ab058 100644 --- a/vulnerabilities/templates/admin_login.html +++ b/vulnerabilities/templates/admin_login.html @@ -89,10 +89,8 @@ {% translate 'Forgotten your password or username?' %}
    {% endif %} -
    -
    - {{ form.captcha }} -
    +
    + {{ form.captcha }}
    diff --git a/vulnerabilities/templates/api_user_creation_form.html b/vulnerabilities/templates/api_user_creation_form.html index 4c596f094..5a913f5ae 100644 --- a/vulnerabilities/templates/api_user_creation_form.html +++ b/vulnerabilities/templates/api_user_creation_form.html @@ -26,11 +26,13 @@
    {% endif %}
    -

    - VulnerableCode API key request -

    -
    +
    +

    VulnerableCode API Key Request

    +
    +
    + +

    You need an API key to access the VulnerableCode JSON REST API. Please check the live OpenAPI documentation @@ -41,17 +43,34 @@


    -
    - {% csrf_token %} - {% for field in form %} -
    - -
    - {{ field }} -
    +
    +
    + + {% csrf_token %} +
    +
    +
    - {% endfor %} - - +
    +
    +
    + +
    +
    +
    +
    + +
    +
    +
    + {{ form.captcha }} +
    + + +
    +
    {% endblock %} diff --git a/vulnerablecode/settings.py b/vulnerablecode/settings.py index 735e01a82..3111c9523 100644 --- a/vulnerablecode/settings.py +++ b/vulnerablecode/settings.py @@ -83,21 +83,10 @@ "drf_spectacular", # required for Django collectstatic discovery "drf_spectacular_sidecar", - "django_recaptcha", "django_rq", "django_altcha", ) -if env.str("RECAPTCHA_PUBLIC_KEY", None): - RECAPTCHA_PUBLIC_KEY = env.str("RECAPTCHA_PUBLIC_KEY") - -if env.str("RECAPTCHA_PRIVATE_KEY", None): - RECAPTCHA_PRIVATE_KEY = env.str("RECAPTCHA_PRIVATE_KEY") - -SILENCED_SYSTEM_CHECKS = ["django_recaptcha.recaptcha_test_key_error"] -SILENCED_SYSTEM_CHECKS = ["django_recaptcha.recaptcha_test_key_error"] -RECAPTCHA_DOMAIN = env.str("RECAPTCHA_DOMAIN", "www.recaptcha.net") - MIDDLEWARE = ( "django.middleware.security.SecurityMiddleware", From e1a028fc690a3ce00244db418659b459a97e2c03 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Tue, 24 Jun 2025 21:52:41 +0530 Subject: [PATCH 189/545] Add top navigation button to pipeline dashboard Signed-off-by: Keshav Priyadarshi --- vulnerabilities/forms.py | 12 ++++++++++-- .../templates/pipeline_run_details.html | 14 ++++++++------ vulnerabilities/templates/pipeline_run_list.html | 5 ++++- 3 files changed, 22 insertions(+), 9 deletions(-) diff --git a/vulnerabilities/forms.py b/vulnerabilities/forms.py index d980a5d9d..7ee348354 100644 --- a/vulnerabilities/forms.py +++ b/vulnerabilities/forms.py @@ -46,7 +46,11 @@ class AdvisorySearchForm(forms.Form): class ApiUserCreationForm(forms.ModelForm): """Support a simplified creation for API-only users directly from the UI.""" - captcha = AltchaField(floating=True) + captcha = AltchaField( + floating=True, + hidefooter=True, + hidelogo=True, + ) class Meta: model = ApiUser @@ -105,4 +109,8 @@ class PipelineSchedulePackageForm(forms.Form): class AdminLoginForm(AdminAuthenticationForm): - captcha = AltchaField(floating=True) + captcha = AltchaField( + floating=True, + hidefooter=True, + hidelogo=True, + ) diff --git a/vulnerabilities/templates/pipeline_run_details.html b/vulnerabilities/templates/pipeline_run_details.html index 6ebf92807..8dcaba959 100644 --- a/vulnerabilities/templates/pipeline_run_details.html +++ b/vulnerabilities/templates/pipeline_run_details.html @@ -34,8 +34,10 @@ {% endblock %} {% block content %} -
    + + Back to All Runs +

    {{ pipeline_name }} Run Log


    @@ -181,12 +183,12 @@

    Log Output

    {% endif %} - - - Back to All Runs - + {% if run.run_output or run.log %} + + Back to All Runs + + {% endif %} -
    {% endblock %} diff --git a/vulnerabilities/templates/pipeline_run_list.html b/vulnerabilities/templates/pipeline_run_list.html index 522e78cd6..f11eca0b8 100644 --- a/vulnerabilities/templates/pipeline_run_list.html +++ b/vulnerabilities/templates/pipeline_run_list.html @@ -34,6 +34,7 @@
    + Back to Dashboard

    {{ pipeline_name }} Runs


    @@ -109,7 +110,9 @@

    {{ pipeline_name }} Runs

    {% endfor %} - Back to Dashboard + {% if run_list|length > 10 %} + Back to Dashboard + {% endif %}
    {% if is_paginated %}

  • -
  • - - - Severity details ({{ severity_vectors|length }}) - - -
  • {% if advisory.exploits %}
  • @@ -70,6 +63,16 @@
  • + {% if ssvcs %} +
  • + + + Related SSVCS ({{ ssvcs|length }}) + + +
  • + {% endif %} + + + + +
    +
    +
    + {% if affected_by_advisories_v2|length != 0 or affected_by_advisories_v2_url %} +
    + {% else %} +
    + {% endif %} + + + + + + + {% if package.is_ghost %} + + + + + {% endif %} + +
    + + purl + + + {{ package.purl }} +
    + Tags + + + Ghost + +
    +
    + {% if affected_by_advisories_v2|length != 0 or affected_by_advisories_v2_url %} + +
    + + + + + + + + + + + + + + + +
    + Next non-vulnerable version + + {% if next_non_vulnerable.version %} + {{ next_non_vulnerable.version }} + {% else %} + None. + {% endif %} +
    + Latest non-vulnerable version + + {% if latest_non_vulnerable.version %} + {{ latest_non_vulnerable.version }} + {% else %} + None. + {% endif %} +
    + Risk score + + {{package.risk_score}} +
    +
    + + {% endif %} + +
    + {% if affected_by_advisories_v2|length != 0 %} +
    + Vulnerabilities affecting this package ({{ affected_by_advisories_v2|length }}) +
    + + + + + + + + + + + + + {% for advisory in affected_by_advisories_v2 %} + + + + + + + + {% empty %} + + + + {% endfor %} + +
    AdvisorySourceDate PublishedSummaryFixed in package version
    + + {{advisory.primary_advisory.advisory_id }} + +
    + {% if advisory.identifiers|length != 0 %} + Aliases: + {% endif %} +
    + {% for alias in advisory.identifiers %} + {% if alias.url %} + {{ alias }} +
    + {% else %} + {{ alias }} +
    + {% endif %} + {% endfor %} +
    + {% if advisory.secondary_members|length != 0 %} +

    Supporting advisories are listed below the primary advisory.

    + {% for secondary in advisory.secondary_members %} + + {{secondary.advisory.avid }}
    +
    + {% endfor %} + {% endif %} +
    + {{advisory.primary_advisory.url}} + + {{advisory.primary_advisory.date_published}} + + {{ advisory.primary_advisory.summary }} + + {% with fixed=fixed_package_details|get_item:advisory.primary_advisory.avid %} + {% if fixed %} + {% for item in fixed %} +
    + {{ item.pkg.version }} +
    + {% if item.pkg.is_vulnerable %} + + Vulnerable + + {% else %} + + Not vulnerable + + {% endif %} +
    + {% endfor %} + {% else %} + There are no reported fixed by versions. + {% endif %} + {% endwith %} +
    + This package is not known to be subject of any advisories. +
    + {% elif affected_by_advisories_v2_url %} +
    + This package is subject to more than 100 advisories. Please refer to the following + URL for vulnerabilities affecting this package: Advisories +
    + {% else %} +
    + This package is not known to be subject of any advisories. +
    + {% endif %} +
    + +
    + {% if fixing_advisories_v2|length != 0 %} +
    + Vulnerabilities fixed by this package ({{ fixing_advisories_v2|length }}) +
    + + + + + + + + + + + + + {% for advisory in fixing_advisories_v2 %} + + + + + + + + {% empty %} + + + + {% endfor %} + +
    AdvisorySourceDate PublishedSummaryAliases
    + + {{advisory.primary_advisory.advisory_id }} + +
    + {% if advisory.secondary_members|length != 0 %} +

    Supporting advisories are listed below the primary advisory.

    + {% for secondary in advisory.secondary_members %} + + {{secondary.advisory.avid }}
    +
    + {% endfor %} + {% endif %} +
    + {{advisory.primary_advisory.url}} + + {{advisory.primary_advisory.date_published}} + + {{ advisory.primary_advisory.summary }} + + {% for alias in advisory.identifiers %} + {% if alias.url %} + {{ alias }} +
    + {% else %} + {{ alias }} +
    + {% endif %} + {% endfor %} +
    + This package is not known to fix any advisories. +
    + +
    + {% elif fixing_advisories_v2_url %} +
    + This package is known to fix more than 100 advisories. Please refer to the following + URL for vulnerabilities fixed by this package: Advisories +
    + {% else %} +
    + This package is not known to fix any advisories. +
    + {% endif %} +
    +
    +
    + + +
    +
    + + + +{% endif %} +{% endblock %} diff --git a/vulnerabilities/views.py b/vulnerabilities/views.py index 856c10ce6..b9d172ca1 100644 --- a/vulnerabilities/views.py +++ b/vulnerabilities/views.py @@ -37,6 +37,7 @@ from vulnerabilities.forms import PackageSearchForm from vulnerabilities.forms import PipelineSchedulePackageForm from vulnerabilities.forms import VulnerabilitySearchForm +from vulnerabilities.models import AdvisorySetMember from vulnerabilities.models import ImpactedPackage from vulnerabilities.models import PipelineRun from vulnerabilities.models import PipelineSchedule @@ -292,6 +293,110 @@ def get_object(self, queryset=None): return package +class PackageV3Details(DetailView): + model = models.PackageV2 + template_name = "package_details_v3.html" + slug_url_kwarg = "purl" + slug_field = "purl" + + def get_context_data(self, **kwargs): + context = super().get_context_data(**kwargs) + package = self.object + + next_non_vulnerable, latest_non_vulnerable = package.get_non_vulnerable_versions() + + context["package"] = package + context["next_non_vulnerable"] = next_non_vulnerable + context["latest_non_vulnerable"] = latest_non_vulnerable + context["package_search_form"] = PackageSearchForm(self.request.GET) + + affected_by_advisories_qs = ( + models.AdvisorySet.objects.filter(package=package, relation_type="affecting") + .select_related("primary_advisory") + .prefetch_related( + Prefetch( + "members", + queryset=AdvisorySetMember.objects.filter(is_primary=False).select_related( + "advisory" + ), + to_attr="secondary_members", + ) + ) + ) + + fixing_advisories_qs = ( + models.AdvisorySet.objects.filter(package=package, relation_type="fixing") + .select_related("primary_advisory") + .prefetch_related( + Prefetch( + "members", + queryset=AdvisorySetMember.objects.filter(is_primary=False).select_related( + "advisory" + ), + to_attr="secondary_members", + ) + ) + ) + + print(affected_by_advisories_qs) + print(fixing_advisories_qs) + + affected_by_advisories_url = None + fixing_advisories_url = None + + affected_by_advisories_qs_ids = affected_by_advisories_qs.only("id") + fixing_advisories_qs_ids = fixing_advisories_qs.only("id") + + # affected_by_advisories = list(affected_by_advisories_qs_ids[:101]) + # if len(affected_by_advisories) > 100: + # affected_by_advisories_url = reverse_lazy( + # "affected_by_advisories_v2", kwargs={"purl": package.package_url} + # ) + # context["affected_by_advisories_v2_url"] = affected_by_advisories_url + # context["affected_by_advisories_v2"] = [] + # context["fixed_package_details"] = {} + + # else: + fixed_pkg_details = get_fixed_package_details(package) + + context["affected_by_advisories_v2"] = affected_by_advisories_qs + context["fixed_package_details"] = fixed_pkg_details + context["affected_by_advisories_v2_url"] = None + + # fixing_advisories = list(fixing_advisories_qs_ids[:101]) + # if len(fixing_advisories) > 100: + # fixing_advisories_url = reverse_lazy( + # "fixing_advisories_v2", kwargs={"purl": package.package_url} + # ) + # context["fixing_advisories_v2_url"] = fixing_advisories_url + # context["fixing_advisories_v2"] = [] + + # else: + context["fixing_advisories_v2"] = fixing_advisories_qs + context["fixing_advisories_v2_url"] = None + + return context + + def get_object(self, queryset=None): + if queryset is None: + queryset = self.get_queryset() + + purl = self.kwargs.get(self.slug_url_kwarg) + if purl: + queryset = queryset.for_purl(purl) + else: + cls = self.__class__.__name__ + raise AttributeError( + f"Package details view {cls} must be called with a purl, " f"but got: {purl!r}" + ) + + try: + package = queryset.get() + except queryset.model.DoesNotExist: + raise Http404(f"No Package found for purl: {purl}") + return package + + def get_fixed_package_details(package): rows = package.affected_in_impacts.values_list( "advisory__avid", diff --git a/vulnerablecode/urls.py b/vulnerablecode/urls.py index eb1bc006b..efbfc9c6f 100644 --- a/vulnerablecode/urls.py +++ b/vulnerablecode/urls.py @@ -29,7 +29,7 @@ from vulnerabilities.api_v3 import AffectedByAdvisoriesViewSet from vulnerabilities.api_v3 import FixingAdvisoriesViewSet from vulnerabilities.api_v3 import PackageV3ViewSet -from vulnerabilities.views import AdminLoginView +from vulnerabilities.views import AdminLoginView, PackageV3Details from vulnerabilities.views import AdvisoryDetails from vulnerabilities.views import AdvisoryPackagesDetails from vulnerabilities.views import AffectedByAdvisoriesListView @@ -141,7 +141,7 @@ def __init__(self, *args, **kwargs): ), re_path( r"^packages/v2/(?Ppkg:.+)$", - PackageV2Details.as_view(), + PackageV3Details.as_view(), name="package_details_v2", ), re_path( From 59fd85ff1bc14535e206d941fb5eb804e360ebbf Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Fri, 27 Mar 2026 01:47:24 +0530 Subject: [PATCH 478/545] Fix content hash logic Signed-off-by: Tushar Goel --- vulnerabilities/utils.py | 5 +---- vulnerablecode/urls.py | 3 ++- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/vulnerabilities/utils.py b/vulnerabilities/utils.py index f90d42401..88adf1c41 100644 --- a/vulnerabilities/utils.py +++ b/vulnerabilities/utils.py @@ -611,6 +611,7 @@ def normalize_text(text): def normalize_list(lst): """Sort a list to ensure consistent ordering.""" + lst = [x for x in lst if x] return sorted(lst) if lst else [] @@ -885,13 +886,9 @@ def compute_advisory_content(advisory_data): if isinstance(advisory_data, AdvisoryV2): advisory_data = advisory_data.to_advisory_data() normalized_data = { - "summary": normalize_text(advisory_data.summary), "affected_packages": [ pkg.to_dict() for pkg in normalize_list(advisory_data.affected_packages) if pkg ], - "severities": [sev.to_dict() for sev in normalize_list(advisory_data.severities) if sev], - "weaknesses": normalize_list(advisory_data.weaknesses), - "patches": [patch.to_dict() for patch in normalize_list(advisory_data.patches)], } normalized_json = json.dumps(normalized_data, separators=(",", ":"), sort_keys=True) diff --git a/vulnerablecode/urls.py b/vulnerablecode/urls.py index efbfc9c6f..745d2a469 100644 --- a/vulnerablecode/urls.py +++ b/vulnerablecode/urls.py @@ -29,7 +29,7 @@ from vulnerabilities.api_v3 import AffectedByAdvisoriesViewSet from vulnerabilities.api_v3 import FixingAdvisoriesViewSet from vulnerabilities.api_v3 import PackageV3ViewSet -from vulnerabilities.views import AdminLoginView, PackageV3Details +from vulnerabilities.views import AdminLoginView from vulnerabilities.views import AdvisoryDetails from vulnerabilities.views import AdvisoryPackagesDetails from vulnerabilities.views import AffectedByAdvisoriesListView @@ -41,6 +41,7 @@ from vulnerabilities.views import PackageSearch from vulnerabilities.views import PackageSearchV2 from vulnerabilities.views import PackageV2Details +from vulnerabilities.views import PackageV3Details from vulnerabilities.views import PipelineRunDetailView from vulnerabilities.views import PipelineRunListView from vulnerabilities.views import PipelineScheduleListView From f562fd85c8440aab8229fcec73774f83c82deb09 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Fri, 27 Mar 2026 01:48:13 +0530 Subject: [PATCH 479/545] Test out small use case Signed-off-by: Tushar Goel --- .../pipelines/v2_improvers/group_advisories_for_packages.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py b/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py index 244de770e..386ce63af 100644 --- a/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py +++ b/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py @@ -118,7 +118,7 @@ def get_merged_identifier_groups(advisories): def group_advisoris_for_packages(logger=None): - for package in PackageV2.objects.iterator(): + for package in PackageV2.objects.filter(package_url="pkg:pypi/django@1.5.2").iterator(): affecting_advisories = AdvisoryV2.objects.latest_affecting_advisories_for_purl( purl=package.purl ).prefetch_related("aliases") From 931e111e8eed66a23894c203406c31693a035708 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Fri, 27 Mar 2026 02:09:16 +0530 Subject: [PATCH 480/545] Group for all packages Signed-off-by: Tushar Goel --- .../pipelines/v2_improvers/group_advisories_for_packages.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py b/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py index 386ce63af..f26211f10 100644 --- a/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py +++ b/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py @@ -36,11 +36,13 @@ def merge_advisories(advisories): advisories = list(advisories) + print(len(advisories)) + content_hash_map = defaultdict(list) result_groups = [] for adv in advisories: - + print(adv.avid) if adv.advisory_content_hash: content_hash_map[adv.advisory_content_hash].append(adv) else: @@ -118,7 +120,7 @@ def get_merged_identifier_groups(advisories): def group_advisoris_for_packages(logger=None): - for package in PackageV2.objects.filter(package_url="pkg:pypi/django@1.5.2").iterator(): + for package in PackageV2.objects.iterator(): affecting_advisories = AdvisoryV2.objects.latest_affecting_advisories_for_purl( purl=package.purl ).prefetch_related("aliases") From 3286f90cb8214544dcb7609bbf1b4e418c7a50ad Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Fri, 27 Mar 2026 02:53:28 +0530 Subject: [PATCH 481/545] Change process to compute hash Signed-off-by: Tushar Goel --- .../group_advisories_for_packages.py | 31 +++++++++++++------ 1 file changed, 22 insertions(+), 9 deletions(-) diff --git a/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py b/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py index f26211f10..52d16c093 100644 --- a/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py +++ b/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py @@ -7,6 +7,8 @@ # See https://aboutcode.org for more information about nexB OSS projects. # +import hashlib +import json from collections import defaultdict from django.db import transaction @@ -16,7 +18,7 @@ from vulnerabilities.models import AdvisoryV2 from vulnerabilities.models import PackageV2 from vulnerabilities.pipelines import VulnerableCodePipeline -from vulnerabilities.utils import compute_advisory_content +from vulnerabilities.utils import normalize_list class GroupAdvisoriesForPackages(VulnerableCodePipeline): @@ -42,15 +44,26 @@ def merge_advisories(advisories): result_groups = [] for adv in advisories: - print(adv.avid) - if adv.advisory_content_hash: - content_hash_map[adv.advisory_content_hash].append(adv) + affected = [] + fixed = [] + + for impact in adv.impacted_packages.all(): + affected.extend([pkg.package_url for pkg in impact.affecting_packages.all()]) + + fixed.extend([pkg.package_url for pkg in impact.fixed_by_packages.all()]) + + normalized_data = { + "affected_packages": normalize_list(affected), + "fixed_packages": normalize_list(fixed), + } + + normalized_json = json.dumps(normalized_data, separators=(",", ":"), sort_keys=True) + content_hash = hashlib.sha256(normalized_json.encode("utf-8")).hexdigest() + + if content_hash: + content_hash_map[content_hash].append(adv) else: - content_hash = compute_advisory_content(advisory_data=adv) - if content_hash: - content_hash_map[content_hash].append(adv) - else: - result_groups.append([adv]) + result_groups.append([adv]) final_groups = [] From 8266b254887d3e70e9eb391824e64bec4d6d2cd4 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Fri, 27 Mar 2026 03:01:05 +0530 Subject: [PATCH 482/545] Prefetch affected packages Signed-off-by: Tushar Goel --- .../group_advisories_for_packages.py | 34 ++++++++++++------- 1 file changed, 21 insertions(+), 13 deletions(-) diff --git a/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py b/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py index 52d16c093..0d466e44a 100644 --- a/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py +++ b/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py @@ -38,8 +38,6 @@ def merge_advisories(advisories): advisories = list(advisories) - print(len(advisories)) - content_hash_map = defaultdict(list) result_groups = [] @@ -77,18 +75,15 @@ def merge_advisories(advisories): def get_merged_identifier_groups(advisories): identifier_groups = defaultdict(set) - advisory_to_identifiers = defaultdict(set) advisories = list(advisories) for adv in advisories: identifier_groups[adv.advisory_id].add(adv) - advisory_to_identifiers[adv].add(adv.advisory_id) - for alias in adv.aliases.all(): - identifier_groups[alias.alias].add(adv) - advisory_to_identifiers[adv].add(alias.alias) + for alias in adv.aliases.values_list("alias", flat=True): + identifier_groups[alias].add(adv) groups = [set(advs) for advs in identifier_groups.values() if len(advs) > 1] @@ -134,13 +129,26 @@ def get_merged_identifier_groups(advisories): def group_advisoris_for_packages(logger=None): for package in PackageV2.objects.iterator(): - affecting_advisories = AdvisoryV2.objects.latest_affecting_advisories_for_purl( - purl=package.purl - ).prefetch_related("aliases") + print(package) + affecting_advisories = ( + AdvisoryV2.objects + .latest_affecting_advisories_for_purl(purl=package.purl) + .prefetch_related( + "aliases", + "impacted_packages__affecting_packages", + "impacted_packages__fixed_by_packages", + ) + ) - fixed_by_advisories = AdvisoryV2.objects.latest_fixed_by_advisories_for_purl( - purl=package.purl - ).prefetch_related("aliases") + fixed_by_advisories = ( + AdvisoryV2.objects + .latest_fixed_by_advisories_for_purl(purl=package.purl) + .prefetch_related( + "aliases", + "impacted_packages__affecting_packages", + "impacted_packages__fixed_by_packages", + ) + ) try: delete_and_save_advisory_set(package, affecting_advisories, relation="affecting") From bf29369b4c4e67b8e31648f41be78db0ca9d6463 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Fri, 27 Mar 2026 03:22:05 +0530 Subject: [PATCH 483/545] Cache the advisory content hash Signed-off-by: Tushar Goel --- .../group_advisories_for_packages.py | 76 ++++++++++--------- 1 file changed, 40 insertions(+), 36 deletions(-) diff --git a/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py b/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py index 0d466e44a..87b05eb5f 100644 --- a/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py +++ b/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py @@ -34,34 +34,23 @@ def group_advisories_for_packages(self): group_advisoris_for_packages(logger=self.log) +CONTENT_HASH_CACHE = {} + + def merge_advisories(advisories): advisories = list(advisories) content_hash_map = defaultdict(list) - result_groups = [] for adv in advisories: - affected = [] - fixed = [] - - for impact in adv.impacted_packages.all(): - affected.extend([pkg.package_url for pkg in impact.affecting_packages.all()]) - - fixed.extend([pkg.package_url for pkg in impact.fixed_by_packages.all()]) - - normalized_data = { - "affected_packages": normalize_list(affected), - "fixed_packages": normalize_list(fixed), - } - - normalized_json = json.dumps(normalized_data, separators=(",", ":"), sort_keys=True) - content_hash = hashlib.sha256(normalized_json.encode("utf-8")).hexdigest() - - if content_hash: - content_hash_map[content_hash].append(adv) + if adv.avid in CONTENT_HASH_CACHE: + content_hash = CONTENT_HASH_CACHE[adv.avid] else: - result_groups.append([adv]) + content_hash = compute_advisory_content_hash(adv) + CONTENT_HASH_CACHE[adv.avid] = content_hash + + content_hash_map[content_hash].append(adv) final_groups = [] @@ -72,6 +61,25 @@ def merge_advisories(advisories): return final_groups +def compute_advisory_content_hash(adv): + affected = [] + fixed = [] + + for impact in adv.impacted_packages.all(): + affected.extend([pkg.package_url for pkg in impact.affecting_packages.all()]) + + fixed.extend([pkg.package_url for pkg in impact.fixed_by_packages.all()]) + + normalized_data = { + "affected_packages": normalize_list(affected), + "fixed_packages": normalize_list(fixed), + } + + normalized_json = json.dumps(normalized_data, separators=(",", ":"), sort_keys=True) + content_hash = hashlib.sha256(normalized_json.encode("utf-8")).hexdigest() + return content_hash + + def get_merged_identifier_groups(advisories): identifier_groups = defaultdict(set) @@ -130,24 +138,20 @@ def get_merged_identifier_groups(advisories): def group_advisoris_for_packages(logger=None): for package in PackageV2.objects.iterator(): print(package) - affecting_advisories = ( - AdvisoryV2.objects - .latest_affecting_advisories_for_purl(purl=package.purl) - .prefetch_related( - "aliases", - "impacted_packages__affecting_packages", - "impacted_packages__fixed_by_packages", - ) + affecting_advisories = AdvisoryV2.objects.latest_affecting_advisories_for_purl( + purl=package.purl + ).prefetch_related( + "aliases", + "impacted_packages__affecting_packages", + "impacted_packages__fixed_by_packages", ) - fixed_by_advisories = ( - AdvisoryV2.objects - .latest_fixed_by_advisories_for_purl(purl=package.purl) - .prefetch_related( - "aliases", - "impacted_packages__affecting_packages", - "impacted_packages__fixed_by_packages", - ) + fixed_by_advisories = AdvisoryV2.objects.latest_fixed_by_advisories_for_purl( + purl=package.purl + ).prefetch_related( + "aliases", + "impacted_packages__affecting_packages", + "impacted_packages__fixed_by_packages", ) try: From 680f45e920342325f5e3040c25aea135dd524890 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Fri, 27 Mar 2026 12:29:14 +0530 Subject: [PATCH 484/545] Group specific ecosystems Signed-off-by: Tushar Goel --- .../v2_improvers/group_advisories_for_packages.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py b/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py index 87b05eb5f..99aa079f7 100644 --- a/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py +++ b/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py @@ -41,6 +41,9 @@ def merge_advisories(advisories): advisories = list(advisories) + if len(advisories) > 1000: + return + content_hash_map = defaultdict(list) for adv in advisories: @@ -136,7 +139,9 @@ def get_merged_identifier_groups(advisories): def group_advisoris_for_packages(logger=None): - for package in PackageV2.objects.iterator(): + for package in PackageV2.objects.filter( + type__in=["npm", "pypi", "nuget", "maven", "composer"] + ).iterator(): print(package) affecting_advisories = AdvisoryV2.objects.latest_affecting_advisories_for_purl( purl=package.purl From b9c4f185abf7a3ce295b82a10f0ccea00f1c65a7 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Fri, 27 Mar 2026 12:32:46 +0530 Subject: [PATCH 485/545] Group specific ecosystems Signed-off-by: Tushar Goel --- .../pipelines/v2_improvers/group_advisories_for_packages.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py b/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py index 99aa079f7..75b983e1c 100644 --- a/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py +++ b/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py @@ -41,9 +41,6 @@ def merge_advisories(advisories): advisories = list(advisories) - if len(advisories) > 1000: - return - content_hash_map = defaultdict(list) for adv in advisories: From 312d4444b0e98de04888d47e07c5a393960e224c Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Sat, 28 Mar 2026 01:00:19 +0530 Subject: [PATCH 486/545] Use V2 views Signed-off-by: Tushar Goel --- .../templates/package_details_v2.html | 52 ++------- vulnerabilities/utils.py | 100 ++++++++++++++++++ vulnerabilities/views.py | 67 ++++++++---- vulnerablecode/urls.py | 2 +- 4 files changed, 156 insertions(+), 65 deletions(-) diff --git a/vulnerabilities/templates/package_details_v2.html b/vulnerabilities/templates/package_details_v2.html index f90585b9d..06c15f0d0 100644 --- a/vulnerabilities/templates/package_details_v2.html +++ b/vulnerabilities/templates/package_details_v2.html @@ -136,8 +136,6 @@ Advisory - Source - Date Published Summary Fixed in package version @@ -147,15 +145,15 @@ {% for advisory in affected_by_advisories_v2 %} - - {{advisory.primary.avid }} + + {{advisory.identifier }}
    - {% if advisory.primary.alias|length != 0 %} + {% if advisory.aliases|length != 0 %} Aliases: {% endif %}
    - {% for alias in advisory.primary.alias %} + {% for alias in advisory.aliases %} {% if alias.url %} {{ alias }} @@ -166,26 +164,12 @@ {% endif %} {% endfor %} - {% if advisory.secondary|length != 0 %} -

    Supporting advisories are listed below the primary advisory.

    - {% for secondary in advisory.secondary %} - - {{secondary.avid }} - - {% endfor %} - {% endif %} - {{advisory.primary.url}} - - - {{advisory.primary.date_published}} - - - {{ advisory.primary.summary }} + {{ advisory.advisory.summary|truncatewords:20 }} - {% with fixed=fixed_package_details|get_item:advisory.primary.avid %} + {% with fixed=fixed_package_details|get_item:advisory.advisory.avid %} {% if fixed %} {% for item in fixed %}
    @@ -240,8 +224,6 @@ Advisory - Source - Date Published Summary Aliases @@ -250,30 +232,16 @@ {% for advisory in fixing_advisories_v2 %} - - {{advisory.primary.avid }} + + {{advisory.identifier }}
    - {% if advisory.secondary|length != 0 %} -

    Supporting advisories are listed below the primary advisory.

    - {% for secondary in advisory.secondary %} - - {{secondary.avid }} - - {% endfor %} - {% endif %} - - - {{advisory.primary.url}} - - - {{advisory.primary.date_published}} - {{ advisory.primary.summary }} + {{ advisory.advisory.summary|truncatewords:20 }} - {% for alias in advisory.primary.alias %} + {% for alias in advisory.aliases %} {% if alias.url %} {{ alias }} diff --git a/vulnerabilities/utils.py b/vulnerabilities/utils.py index 88adf1c41..2dd606a92 100644 --- a/vulnerabilities/utils.py +++ b/vulnerabilities/utils.py @@ -895,3 +895,103 @@ def compute_advisory_content(advisory_data): content_hash = hashlib.sha256(normalized_json.encode("utf-8")).hexdigest() return content_hash + + +def merge_advisories(advisories, package): + + advisories = list(advisories) + + content_hash_map = defaultdict(list) + + for adv in advisories: + content_hash = compute_advisory_content_hash(adv, package) + content_hash_map[content_hash].append(adv) + + final_groups = [] + + for group in content_hash_map.values(): + groups = get_merged_identifier_groups(group) + final_groups.extend(groups) + + return final_groups + + +def compute_advisory_content_hash(adv, package): + affected = [] + fixed = [] + + version_less_purl = PackageURL( + type=package.type, + namespace=package.namespace, + name=package.name, + qualifiers=package.qualifiers, + subpath=package.subpath, + ) + + for impact in adv.impacted_packages.filter(base_purl=str(version_less_purl)): + affected.extend([pkg.package_url for pkg in impact.affecting_packages.all()]) + fixed.extend([pkg.package_url for pkg in impact.fixed_by_packages.all()]) + + normalized_data = { + "affected_packages": normalize_list(affected), + "fixed_packages": normalize_list(fixed), + } + + normalized_json = json.dumps(normalized_data, separators=(",", ":"), sort_keys=True) + content_hash = hashlib.sha256(normalized_json.encode("utf-8")).hexdigest() + return content_hash + + +def get_merged_identifier_groups(advisories): + + identifier_groups = defaultdict(set) + + advisories = list(advisories) + + for adv in advisories: + + identifier_groups[adv.advisory_id].add(adv) + + for alias in adv.aliases.values_list("alias", flat=True): + identifier_groups[alias].add(adv) + + groups = [set(advs) for advs in identifier_groups.values() if len(advs) > 1] + + merged = [] + + for group in groups: + group = set(group) + + i = 0 + while i < len(merged): + if group & merged[i]: + group |= merged[i] + merged.pop(i) + else: + i += 1 + + merged.append(group) + + all_grouped = set() + for g in merged: + all_grouped |= g + + for adv in advisories: + if adv not in all_grouped: + merged.append({adv}) + + final_groups = [] + + for group in merged: + identifiers = set() + for adv in group: + for alias in adv.aliases.all(): + identifiers.add(alias) + + primary = max(group, key=lambda a: a.precedence if a.precedence is not None else -1) + + secondary = [a for a in group if a != primary] + + final_groups.append((identifiers, primary, secondary)) + + return final_groups diff --git a/vulnerabilities/views.py b/vulnerabilities/views.py index b9d172ca1..c8bfc6634 100644 --- a/vulnerabilities/views.py +++ b/vulnerabilities/views.py @@ -38,6 +38,7 @@ from vulnerabilities.forms import PipelineSchedulePackageForm from vulnerabilities.forms import VulnerabilitySearchForm from vulnerabilities.models import AdvisorySetMember +from vulnerabilities.models import AdvisoryV2 from vulnerabilities.models import ImpactedPackage from vulnerabilities.models import PipelineRun from vulnerabilities.models import PipelineSchedule @@ -45,6 +46,7 @@ from vulnerabilities.severity_systems import EPSS from vulnerabilities.severity_systems import SCORING_SYSTEMS from vulnerabilities.utils import group_advisories_by_content +from vulnerabilities.utils import merge_advisories from vulnerablecode import __version__ as VULNERABLECODE_VERSION from vulnerablecode.settings import env @@ -218,22 +220,30 @@ def get_context_data(self, **kwargs): context["latest_non_vulnerable"] = latest_non_vulnerable context["package_search_form"] = PackageSearchForm(self.request.GET) - affected_by_advisories_qs = models.AdvisoryV2.objects.latest_affecting_advisories_for_purl( - package.package_url + affecting_advisories = AdvisoryV2.objects.latest_affecting_advisories_for_purl( + purl=package.purl + ).prefetch_related( + "aliases", + "impacted_packages__affecting_packages", + "impacted_packages__fixed_by_packages", ) - fixing_advisories_qs = models.AdvisoryV2.objects.latest_fixed_by_advisories_for_purl( - package.package_url + fixed_by_advisories = AdvisoryV2.objects.latest_fixed_by_advisories_for_purl( + purl=package.purl + ).prefetch_related( + "aliases", + "impacted_packages__affecting_packages", + "impacted_packages__fixed_by_packages", ) affected_by_advisories_url = None fixing_advisories_url = None - affected_by_advisories_qs_ids = affected_by_advisories_qs.only("id") - fixing_advisories_qs_ids = fixing_advisories_qs.only("id") + affected_by_advisories_qs_ids = affecting_advisories.only("id") + fixing_advisories_qs_ids = fixed_by_advisories.only("id") - affected_by_advisories = list(affected_by_advisories_qs_ids[:101]) - if len(affected_by_advisories) > 100: + affected_by_advisories = list(affected_by_advisories_qs_ids[:1001]) + if len(affected_by_advisories) > 1001: affected_by_advisories_url = reverse_lazy( "affected_by_advisories_v2", kwargs={"purl": package.package_url} ) @@ -242,19 +252,25 @@ def get_context_data(self, **kwargs): context["fixed_package_details"] = {} else: + advisories = [] + fixed_pkg_details = get_fixed_package_details(package) - affected_avid_by_hash = {} - affected_avid_by_hash = group_advisories_by_content(affected_by_advisories_qs) - affecting_advs = [] + groups = merge_advisories(affecting_advisories, package) + for aliases, primary, _ in groups: + identifier = primary.advisory_id.split("/")[-1] + + filtered_aliases = [alias for alias in aliases if alias.alias != identifier] + + advisories.append( + {"aliases": filtered_aliases, "advisory": primary, "identifier": identifier} + ) - for hash in affected_avid_by_hash: - affecting_advs.append(affected_avid_by_hash[hash]) - context["affected_by_advisories_v2"] = affecting_advs + context["affected_by_advisories_v2"] = advisories context["fixed_package_details"] = fixed_pkg_details context["affected_by_advisories_v2_url"] = None - fixing_advisories = list(fixing_advisories_qs_ids[:101]) - if len(fixing_advisories) > 100: + fixing_advisories = list(fixing_advisories_qs_ids[:1001]) + if len(fixing_advisories) > 1001: fixing_advisories_url = reverse_lazy( "fixing_advisories_v2", kwargs={"purl": package.package_url} ) @@ -262,13 +278,20 @@ def get_context_data(self, **kwargs): context["fixing_advisories_v2"] = [] else: - fixing_avid_by_hash = {} - fixing_avid_by_hash = group_advisories_by_content(fixing_advisories_qs) - fixing_advs = [] + advisories = [] + + fixed_pkg_details = get_fixed_package_details(package) + groups = merge_advisories(fixing_advisories, package) + for aliases, primary, _ in groups: + identifier = primary.advisory_id.split("/")[-1] + + filtered_aliases = [alias for alias in aliases if alias.alias != identifier] + + advisories.append( + {"aliases": filtered_aliases, "advisory": primary, "identifier": identifier} + ) - for hash in fixing_avid_by_hash: - fixing_advs.append(fixing_avid_by_hash[hash]) - context["fixing_advisories_v2"] = fixing_advs + context["fixing_advisories_v2"] = advisories context["fixing_advisories_v2_url"] = None return context diff --git a/vulnerablecode/urls.py b/vulnerablecode/urls.py index 745d2a469..44cacd9b0 100644 --- a/vulnerablecode/urls.py +++ b/vulnerablecode/urls.py @@ -142,7 +142,7 @@ def __init__(self, *args, **kwargs): ), re_path( r"^packages/v2/(?Ppkg:.+)$", - PackageV3Details.as_view(), + PackageV2Details.as_view(), name="package_details_v2", ), re_path( From 0b753c9cf35bab2450a8502c1ffc89d710b5f047 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Mon, 30 Mar 2026 16:32:04 +0530 Subject: [PATCH 487/545] Adjust API and UI for new grouping Signed-off-by: Tushar Goel --- vulnerabilities/api_v3.py | 115 +++++++++-- vulnerabilities/improvers/__init__.py | 2 - ...remove_advisoryset_identifiers_and_more.py | 30 +++ vulnerabilities/models.py | 15 +- .../v2_importers/github_osv_importer.py | 2 +- .../pipelines/v2_importers/pypa_importer.py | 2 +- .../pipelines/v2_importers/pysec_importer.py | 2 +- .../compute_advisory_content_hash.py | 65 ------ .../group_advisories_for_packages.py | 162 +-------------- vulnerabilities/pipes/advisory.py | 3 - vulnerabilities/pipes/group_advisories.py | 50 +++++ .../templates/package_details_v2.html | 102 +++++++++- .../test_compute_advisory_content_hash.py | 88 -------- vulnerabilities/tests/test_advisory_merge.py | 192 ++++++++++++++++++ vulnerabilities/tests/test_api_v3.py | 21 +- vulnerabilities/utils.py | 104 +++++----- vulnerabilities/views.py | 126 +++++++----- 17 files changed, 620 insertions(+), 461 deletions(-) create mode 100644 vulnerabilities/migrations/0119_remove_advisoryset_identifiers_and_more.py delete mode 100644 vulnerabilities/pipelines/v2_improvers/compute_advisory_content_hash.py create mode 100644 vulnerabilities/pipes/group_advisories.py delete mode 100644 vulnerabilities/tests/pipelines/v2_improvers/test_compute_advisory_content_hash.py create mode 100644 vulnerabilities/tests/test_advisory_merge.py diff --git a/vulnerabilities/api_v3.py b/vulnerabilities/api_v3.py index b55822e41..ea82dcce3 100644 --- a/vulnerabilities/api_v3.py +++ b/vulnerabilities/api_v3.py @@ -20,13 +20,16 @@ from rest_framework.throttling import AnonRateThrottle from vulnerabilities.models import AdvisoryReference +from vulnerabilities.models import AdvisorySet from vulnerabilities.models import AdvisorySeverity from vulnerabilities.models import AdvisoryV2 from vulnerabilities.models import AdvisoryWeakness from vulnerabilities.models import ImpactedPackageAffecting from vulnerabilities.models import PackageV2 from vulnerabilities.throttling import PermissionBasedUserRateThrottle -from vulnerabilities.utils import group_advisories_by_content +from vulnerabilities.utils import TYPES_WITH_MULTIPLE_IMPORTERS +from vulnerabilities.utils import get_advisories_from_groups +from vulnerabilities.utils import merge_and_save_grouped_advisories class PackageQuerySerializer(serializers.Serializer): @@ -210,6 +213,32 @@ def get_affected_by_vulnerabilities(self, package): """Return a dictionary with advisory as keys and their details, including fixed_by_packages.""" advisories_qs = AdvisoryV2.objects.latest_affecting_advisories_for_purl(package.package_url) + advisories = [] + + is_grouped = AdvisorySet.objects.filter(package=package, relation_type="affecting").exists() + + if is_grouped: + affected_by_advisories_qs = AdvisorySet.objects.filter( + package=package, relation_type="affecting" + ).select_related("primary_advisory") + + affected_groups = [ + (list(adv.aliases.all()), adv.primary_advisory, "") + for adv in affected_by_advisories_qs + ] + + advisories = get_advisories_from_groups(affected_groups) + return self.return_advisories_data(package, advisories_qs, advisories) + + if package.type in TYPES_WITH_MULTIPLE_IMPORTERS: + advisories_qs = advisories_qs.prefetch_related( + "aliases", + "impacted_packages__affecting_packages", + "impacted_packages__fixed_by_packages", + ) + advisories = merge_and_save_grouped_advisories(package, advisories_qs, "affecting") + return self.return_advisories_data(package, advisories_qs, advisories) + advisories_ids = advisories_qs.only("id") advisories_ids = list(advisories_ids[:101]) @@ -227,20 +256,19 @@ def get_affected_by_vulnerabilities(self, package): impact_by_avid = {impact.advisory.avid: impact for impact in impacts} - grouped = group_advisories_by_content(advisories_qs) - result = [] - for entry in grouped.values(): - primary = entry["primary"] - impact = impact_by_avid.get(primary.avid) + + for advisory in advisories_qs: + impact = impact_by_avid.get(advisory.avid) if not impact: continue result.append( { - "advisory_id": primary.avid, + "advisory_id": advisory.advisory_id.split("/")[-1], + "aliases": [alias.alias for alias in advisory.aliases.all()], + "summary": advisory.summary, "fixed_by_packages": [pkg.purl for pkg in impact.fixed_by_packages.all()], - "duplicate_advisory_ids": [a.avid for a in entry["secondary"]], } ) @@ -249,21 +277,82 @@ def get_affected_by_vulnerabilities(self, package): def get_fixing_vulnerabilities(self, package): advisories_qs = AdvisoryV2.objects.latest_fixed_by_advisories_for_purl(package.package_url) + advisories = [] + + is_grouped = AdvisorySet.objects.filter(package=package, relation_type="fixing").exists() + + if is_grouped: + fixing_advisories_qs = AdvisorySet.objects.filter( + package=package, relation_type="fixing" + ).select_related("primary_advisory") + + fixing_groups = [ + (list(adv.aliases.all()), adv.primary_advisory, "") for adv in fixing_advisories_qs + ] + + advisories = get_advisories_from_groups(fixing_groups) + return self.return_fixing_advisories_data(advisories) + + if package.type in TYPES_WITH_MULTIPLE_IMPORTERS: + advisories_qs = advisories_qs.prefetch_related( + "aliases", + "impacted_packages__affecting_packages", + "impacted_packages__fixed_by_packages", + ) + advisories = merge_and_save_grouped_advisories(package, advisories_qs, "fixing") + return self.return_fixing_advisories_data(advisories) + advisories_ids = advisories_qs.only("id") advisories_ids = list(advisories_ids[:101]) if len(advisories_ids) > 100: return None - grouped = group_advisories_by_content(advisories_qs) + results = [] + for advisory in advisories_qs: + results.append( + { + "advisory_id": advisory.advisory_id.split("/")[-1], + } + ) + return results + + def return_fixing_advisories_data(self, advisories): result = [] - for entry in grouped.values(): - primary = entry["primary"] + for advisory in advisories: result.append( { - "advisory_id": primary.avid, - "duplicate_advisory_ids": [a.avid for a in entry["secondary"]], + "advisory_id": advisory["identifier"], + } + ) + + return result + + def return_advisories_data(self, package, advisories_qs, advisories): + advisory_by_avid = {adv.avid: adv for adv in advisories_qs} + avids = advisory_by_avid.keys() + + impacts = ( + package.affected_in_impacts.filter(advisory__avid__in=avids) + .select_related("advisory") + .prefetch_related("fixed_by_packages") + ) + + impact_by_avid = {impact.advisory.avid: impact for impact in impacts} + + result = [] + for advisory in advisories: + impact = impact_by_avid.get(advisory["advisory"].avid) + if not impact: + continue + + result.append( + { + "advisory_id": advisory["identifier"], + "aliases": [alias.alias for alias in advisory["aliases"]], + "summary": advisory["advisory"].summary, + "fixed_by_packages": [pkg.purl for pkg in impact.fixed_by_packages.all()], } ) diff --git a/vulnerabilities/improvers/__init__.py b/vulnerabilities/improvers/__init__.py index 61a1fc882..3e991d658 100644 --- a/vulnerabilities/improvers/__init__.py +++ b/vulnerabilities/improvers/__init__.py @@ -20,7 +20,6 @@ from vulnerabilities.pipelines import populate_vulnerability_summary_pipeline from vulnerabilities.pipelines import remove_duplicate_advisories from vulnerabilities.pipelines.v2_improvers import collect_ssvc_trees -from vulnerabilities.pipelines.v2_improvers import compute_advisory_content_hash from vulnerabilities.pipelines.v2_improvers import compute_advisory_todo as compute_advisory_todo_v2 from vulnerabilities.pipelines.v2_improvers import compute_package_risk as compute_package_risk_v2 from vulnerabilities.pipelines.v2_improvers import ( @@ -76,7 +75,6 @@ compute_advisory_todo.ComputeToDo, collect_ssvc_trees.CollectSSVCPipeline, relate_severities.RelateSeveritiesPipeline, - compute_advisory_content_hash.ComputeAdvisoryContentHash, group_advisories_for_packages.GroupAdvisoriesForPackages, ] ) diff --git a/vulnerabilities/migrations/0119_remove_advisoryset_identifiers_and_more.py b/vulnerabilities/migrations/0119_remove_advisoryset_identifiers_and_more.py new file mode 100644 index 000000000..503e14f8d --- /dev/null +++ b/vulnerabilities/migrations/0119_remove_advisoryset_identifiers_and_more.py @@ -0,0 +1,30 @@ +# Generated by Django 5.2.11 on 2026-03-30 08:35 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("vulnerabilities", "0118_advisoryset_advisorysetmember"), + ] + + operations = [ + migrations.RemoveField( + model_name="advisoryset", + name="identifiers", + ), + migrations.RemoveField( + model_name="advisoryv2", + name="advisory_content_hash", + ), + migrations.AddField( + model_name="advisoryset", + name="aliases", + field=models.ManyToManyField( + help_text="A list of serializable Alias objects", + related_name="advisory_sets", + to="vulnerabilities.advisoryalias", + ), + ), + ] diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index 05bf86a17..f51a92dbd 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -2949,7 +2949,11 @@ class AdvisorySet(models.Model): package = models.ForeignKey("PackageV2", on_delete=models.CASCADE) relation_type = models.CharField(max_length=20, choices=RELATION_TYPE_CHOICES) - identifiers = models.JSONField() + aliases = models.ManyToManyField( + AdvisoryAlias, + related_name="advisory_sets", + help_text="A list of serializable Alias objects", + ) primary_advisory = models.ForeignKey("AdvisoryV2", on_delete=models.PROTECT) @@ -3101,13 +3105,6 @@ class AdvisoryV2(models.Model): help_text="Related advisories that are used to calculate the severity of this advisory.", ) - advisory_content_hash = models.CharField( - max_length=64, - blank=True, - null=True, - help_text="A unique hash computed from the content of the advisory used to identify advisories with the same content.", - ) - risk_score = models.DecimalField( null=True, blank=True, @@ -3311,7 +3308,7 @@ def search(self, query: str = None): except ValueError: # otherwise use query as a plain string qs = qs.filter(package_url__icontains=query) - return qs.order_by("package_url") + return qs.order_by("package_url").order_by("-version_rank") def with_vulnerability_counts(self): return self.annotate( diff --git a/vulnerabilities/pipelines/v2_importers/github_osv_importer.py b/vulnerabilities/pipelines/v2_importers/github_osv_importer.py index cfe92d93f..33acaf7f8 100644 --- a/vulnerabilities/pipelines/v2_importers/github_osv_importer.py +++ b/vulnerabilities/pipelines/v2_importers/github_osv_importer.py @@ -31,7 +31,7 @@ class GithubOSVImporterPipeline(VulnerableCodeBaseImporterPipelineV2): license_url = "https://github.com/github/advisory-database/blob/main/LICENSE.md" repo_url = "git+https://github.com/github/advisory-database/" - precedence = 100 + precedence = 200 @classmethod def steps(cls): diff --git a/vulnerabilities/pipelines/v2_importers/pypa_importer.py b/vulnerabilities/pipelines/v2_importers/pypa_importer.py index 90599e99d..7a80ed70f 100644 --- a/vulnerabilities/pipelines/v2_importers/pypa_importer.py +++ b/vulnerabilities/pipelines/v2_importers/pypa_importer.py @@ -29,7 +29,7 @@ class PyPaImporterPipeline(VulnerableCodeBaseImporterPipelineV2): spdx_license_expression = "CC-BY-4.0" license_url = "https://github.com/pypa/advisory-database/blob/main/LICENSE" repo_url = "git+https://github.com/pypa/advisory-database" - precedence = 200 + precedence = 500 @classmethod def steps(cls): diff --git a/vulnerabilities/pipelines/v2_importers/pysec_importer.py b/vulnerabilities/pipelines/v2_importers/pysec_importer.py index 05614b961..e9225a4f5 100644 --- a/vulnerabilities/pipelines/v2_importers/pysec_importer.py +++ b/vulnerabilities/pipelines/v2_importers/pysec_importer.py @@ -29,7 +29,7 @@ class PyPIImporterPipeline(VulnerableCodeBaseImporterPipelineV2): license_url = "https://github.com/pypa/advisory-database/blob/main/LICENSE" url = "https://osv-vulnerabilities.storage.googleapis.com/PyPI/all.zip" spdx_license_expression = "CC-BY-4.0" - precedence = 100 + precedence = 300 @classmethod def steps(cls): diff --git a/vulnerabilities/pipelines/v2_improvers/compute_advisory_content_hash.py b/vulnerabilities/pipelines/v2_improvers/compute_advisory_content_hash.py deleted file mode 100644 index 8b285d361..000000000 --- a/vulnerabilities/pipelines/v2_improvers/compute_advisory_content_hash.py +++ /dev/null @@ -1,65 +0,0 @@ -# -# Copyright (c) nexB Inc. and others. All rights reserved. -# VulnerableCode is a trademark of nexB Inc. -# SPDX-License-Identifier: Apache-2.0 -# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -# See https://github.com/aboutcode-org/vulnerablecode for support or download. -# See https://aboutcode.org for more information about nexB OSS projects. -# - - -from aboutcode.pipeline import LoopProgress - -from vulnerabilities.models import AdvisoryV2 -from vulnerabilities.pipelines import VulnerableCodePipeline -from vulnerabilities.utils import compute_advisory_content - - -class ComputeAdvisoryContentHash(VulnerableCodePipeline): - """Compute Advisory Content Hash for Advisory.""" - - pipeline_id = "compute_advisory_content_hash_v2" - - @classmethod - def steps(cls): - return (cls.compute_advisory_content_hash,) - - def compute_advisory_content_hash(self): - """Compute Advisory Content Hash for Advisory.""" - - advisories = AdvisoryV2.objects.latest_per_avid().filter(advisory_content_hash__isnull=True) - - advisories_count = advisories.count() - - progress = LoopProgress( - total_iterations=advisories_count, - logger=self.log, - progress_step=1, - ) - - to_update = [] - batch_size = 5000 - - for advisory in progress.iter(advisories.iterator(chunk_size=batch_size)): - try: - advisory.advisory_content_hash = compute_advisory_content(advisory) - to_update.append(advisory) - except Exception as e: - self.log(f"Error computing advisory_content_hash for {advisory.avid}: {e}") - - if len(to_update) >= batch_size: - AdvisoryV2.objects.bulk_update( - to_update, - ["advisory_content_hash"], - batch_size=batch_size, - ) - to_update.clear() - - if to_update: - AdvisoryV2.objects.bulk_update( - to_update, - ["advisory_content_hash"], - batch_size=batch_size, - ) - - self.log("Finished computing advisory_content_hash") diff --git a/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py b/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py index 75b983e1c..d2c8f6296 100644 --- a/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py +++ b/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py @@ -7,18 +7,12 @@ # See https://aboutcode.org for more information about nexB OSS projects. # -import hashlib -import json -from collections import defaultdict - -from django.db import transaction - -from vulnerabilities.models import AdvisorySet -from vulnerabilities.models import AdvisorySetMember from vulnerabilities.models import AdvisoryV2 from vulnerabilities.models import PackageV2 from vulnerabilities.pipelines import VulnerableCodePipeline -from vulnerabilities.utils import normalize_list +from vulnerabilities.pipes.group_advisories import delete_and_save_advisory_set +from vulnerabilities.utils import TYPES_WITH_MULTIPLE_IMPORTERS +from vulnerabilities.utils import merge_advisories class GroupAdvisoriesForPackages(VulnerableCodePipeline): @@ -34,112 +28,9 @@ def group_advisories_for_packages(self): group_advisoris_for_packages(logger=self.log) -CONTENT_HASH_CACHE = {} - - -def merge_advisories(advisories): - - advisories = list(advisories) - - content_hash_map = defaultdict(list) - - for adv in advisories: - if adv.avid in CONTENT_HASH_CACHE: - content_hash = CONTENT_HASH_CACHE[adv.avid] - else: - content_hash = compute_advisory_content_hash(adv) - CONTENT_HASH_CACHE[adv.avid] = content_hash - - content_hash_map[content_hash].append(adv) - - final_groups = [] - - for group in content_hash_map.values(): - groups = get_merged_identifier_groups(group) - final_groups.extend(groups) - - return final_groups - - -def compute_advisory_content_hash(adv): - affected = [] - fixed = [] - - for impact in adv.impacted_packages.all(): - affected.extend([pkg.package_url for pkg in impact.affecting_packages.all()]) - - fixed.extend([pkg.package_url for pkg in impact.fixed_by_packages.all()]) - - normalized_data = { - "affected_packages": normalize_list(affected), - "fixed_packages": normalize_list(fixed), - } - - normalized_json = json.dumps(normalized_data, separators=(",", ":"), sort_keys=True) - content_hash = hashlib.sha256(normalized_json.encode("utf-8")).hexdigest() - return content_hash - - -def get_merged_identifier_groups(advisories): - - identifier_groups = defaultdict(set) - - advisories = list(advisories) - - for adv in advisories: - - identifier_groups[adv.advisory_id].add(adv) - - for alias in adv.aliases.values_list("alias", flat=True): - identifier_groups[alias].add(adv) - - groups = [set(advs) for advs in identifier_groups.values() if len(advs) > 1] - - merged = [] - - for group in groups: - group = set(group) - - i = 0 - while i < len(merged): - if group & merged[i]: - group |= merged[i] - merged.pop(i) - else: - i += 1 - - merged.append(group) - - all_grouped = set() - for g in merged: - all_grouped |= g - - for adv in advisories: - if adv not in all_grouped: - merged.append({adv}) - - final_groups = [] - - for group in merged: - identifiers = set() - for adv in group: - for alias in adv.aliases.values_list("alias", flat=True): - identifiers.add(alias) - - primary = max(group, key=lambda a: a.precedence if a.precedence is not None else -1) - - secondary = [a for a in group if a != primary] - - final_groups.append((identifiers, primary, secondary)) - - return final_groups - - def group_advisoris_for_packages(logger=None): - for package in PackageV2.objects.filter( - type__in=["npm", "pypi", "nuget", "maven", "composer"] - ).iterator(): - print(package) + for package in PackageV2.objects.filter(type__in=TYPES_WITH_MULTIPLE_IMPORTERS).iterator(): + print(f"Grouping advisories for package {package.purl}") affecting_advisories = AdvisoryV2.objects.latest_affecting_advisories_for_purl( purl=package.purl ).prefetch_related( @@ -157,45 +48,10 @@ def group_advisoris_for_packages(logger=None): ) try: - delete_and_save_advisory_set(package, affecting_advisories, relation="affecting") - delete_and_save_advisory_set(package, fixed_by_advisories, relation="fixing") + affected_groups = merge_advisories(affecting_advisories, package) + fixed_by_groups = merge_advisories(fixed_by_advisories, package) + delete_and_save_advisory_set(affected_groups, package, relation="affecting") + delete_and_save_advisory_set(fixed_by_groups, package, relation="fixing") except Exception as e: print(f"Failed rebuilding advisory sets for package {package.purl}: {e!r}") continue - - -@transaction.atomic -def delete_and_save_advisory_set(package, advisories, relation=None): - AdvisorySet.objects.filter(package=package, relation_type=relation).delete() - - groups = merge_advisories(advisories) - - membership_to_create = [] - - for identifiers, primary, secondary in groups: - - advisory_set = AdvisorySet.objects.create( - package=package, - relation_type=relation, - identifiers=list(identifiers), - primary_advisory=primary, - ) - - membership_to_create.append( - AdvisorySetMember( - advisory_set=advisory_set, - advisory=primary, - is_primary=True, - ) - ) - - for adv in secondary: - membership_to_create.append( - AdvisorySetMember( - advisory_set=advisory_set, - advisory=adv, - is_primary=False, - ) - ) - - AdvisorySetMember.objects.bulk_create(membership_to_create) diff --git a/vulnerabilities/pipes/advisory.py b/vulnerabilities/pipes/advisory.py index a7f67153f..bcdd95075 100644 --- a/vulnerabilities/pipes/advisory.py +++ b/vulnerabilities/pipes/advisory.py @@ -48,7 +48,6 @@ from vulnerabilities.models import VulnerabilitySeverity from vulnerabilities.models import Weakness from vulnerabilities.pipes.univers_utils import get_exact_purls_v2 -from vulnerabilities.utils import compute_advisory_content def get_or_create_aliases(aliases: List) -> QuerySet: @@ -302,7 +301,6 @@ def insert_advisory_v2( advisory_obj = None created = False content_id = compute_content_id_v2(advisory_data=advisory) - advisory_content_hash = compute_advisory_content(advisory_data=advisory) try: default_data = { "datasource_id": pipeline_id, @@ -313,7 +311,6 @@ def insert_advisory_v2( "original_advisory_text": advisory.original_advisory_text, "url": advisory.url, "precedence": precedence, - "advisory_content_hash": advisory_content_hash, } advisory_obj, created = AdvisoryV2.objects.get_or_create( diff --git a/vulnerabilities/pipes/group_advisories.py b/vulnerabilities/pipes/group_advisories.py new file mode 100644 index 000000000..d66365706 --- /dev/null +++ b/vulnerabilities/pipes/group_advisories.py @@ -0,0 +1,50 @@ +# +# Copyright (c) nexB Inc. and others. All rights reserved. +# VulnerableCode is a trademark of nexB Inc. +# SPDX-License-Identifier: Apache-2.0 +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/aboutcode-org/vulnerablecode for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. +# + +from django.db import transaction + + +@transaction.atomic +def delete_and_save_advisory_set(groups, package, relation=None): + from vulnerabilities.models import AdvisorySet + from vulnerabilities.models import AdvisorySetMember + + AdvisorySet.objects.filter(package=package, relation_type=relation).delete() + + membership_to_create = [] + + for identifiers, primary, secondary in groups: + + advisory_set = AdvisorySet.objects.create( + package=package, + relation_type=relation, + primary_advisory=primary, + ) + + advisory_set.aliases.add(*identifiers) + advisory_set.save() + + membership_to_create.append( + AdvisorySetMember( + advisory_set=advisory_set, + advisory=primary, + is_primary=True, + ) + ) + + for adv in secondary: + membership_to_create.append( + AdvisorySetMember( + advisory_set=advisory_set, + advisory=adv, + is_primary=False, + ) + ) + + AdvisorySetMember.objects.bulk_create(membership_to_create) diff --git a/vulnerabilities/templates/package_details_v2.html b/vulnerabilities/templates/package_details_v2.html index 06c15f0d0..8c3f62756 100644 --- a/vulnerabilities/templates/package_details_v2.html +++ b/vulnerabilities/templates/package_details_v2.html @@ -141,6 +141,7 @@ + {% if grouped %} {% for advisory in affected_by_advisories_v2 %} @@ -201,6 +202,68 @@ {% endfor %} + {% else %} + + {% for advisory in affected_by_advisories_v2 %} + + + + {{advisory.advisory_id }} + +
    + {% if advisory.aliases.all|length != 0 %} + Aliases: + {% endif %} +
    + {% for alias in advisory.aliases.all %} + {% if alias.url %} + {{ alias }} +
    + {% else %} + {{ alias }} +
    + {% endif %} + {% endfor %} + + + + {{ advisory.summary|truncatewords:20 }} + + + {% with fixed=fixed_package_details|get_item:advisory.avid %} + {% if fixed %} + {% for item in fixed %} +
    + {{ item.pkg.version }} +
    + {% if item.pkg.is_vulnerable %} + + Vulnerable + + {% else %} + + Not vulnerable + + {% endif %} +
    + {% endfor %} + {% else %} + There are no reported fixed by versions. + {% endif %} + {% endwith %} + + + {% empty %} + + + This package is not known to be subject of any advisories. + + + {% endfor %} + + {% endif %} {% elif affected_by_advisories_v2_url %}
    @@ -228,6 +291,8 @@ Aliases + + {% if grouped %} {% for advisory in fixing_advisories_v2 %} @@ -261,8 +326,43 @@ {% endfor %} - + {% else %} + + {% for advisory in fixing_advisories_v2 %} + + + + {{advisory.advisory_id }} + +
    + + + {{ advisory.summary|truncatewords:20 }} + + + {% for alias in advisory.aliases.all %} + {% if alias.url %} + {{ alias }} +
    + {% else %} + {{ alias }} +
    + {% endif %} + {% endfor %} + + + {% empty %} + + + This package is not known to fix any advisories. + + + {% endfor %} + + + {% endif %}
    {% elif fixing_advisories_v2_url %}
    diff --git a/vulnerabilities/tests/pipelines/v2_improvers/test_compute_advisory_content_hash.py b/vulnerabilities/tests/pipelines/v2_improvers/test_compute_advisory_content_hash.py deleted file mode 100644 index 5b7f0c186..000000000 --- a/vulnerabilities/tests/pipelines/v2_improvers/test_compute_advisory_content_hash.py +++ /dev/null @@ -1,88 +0,0 @@ -# -# Copyright (c) nexB Inc. and others. All rights reserved. -# VulnerableCode is a trademark of nexB Inc. -# SPDX-License-Identifier: Apache-2.0 -# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -# See https://github.com/aboutcode-org/vulnerablecode for support or download. -# See https://aboutcode.org for more information about nexB OSS projects. -# - -from unittest.mock import patch - -import pytest - -from vulnerabilities.models import AdvisoryV2 -from vulnerabilities.pipelines.v2_improvers.compute_advisory_content_hash import ( - ComputeAdvisoryContentHash, -) - -pytestmark = pytest.mark.django_db - - -@pytest.fixture -def advisory_factory(): - def _create(count, with_hash=False, start=0): - objs = [] - for i in range(start, start + count): - objs.append( - AdvisoryV2( - summary=f"summary {i}", - advisory_content_hash="existing_hash" if with_hash else None, - unique_content_id=f"unique_id_{i}", - advisory_id=f"ADV-{i}", - datasource_id="ds", - avid=f"ds/ADV-{i}", - url=f"https://example.com/ADV-{i}", - ) - ) - return AdvisoryV2.objects.bulk_create(objs) - - return _create - - -def run_pipeline(): - pipeline = ComputeAdvisoryContentHash() - pipeline.compute_advisory_content_hash() - - -@patch( - "vulnerabilities.pipelines.v2_improvers.compute_advisory_content_hash.compute_advisory_content" -) -def test_pipeline_updates_only_missing_hash(mock_compute, advisory_factory): - advisory_factory(3, with_hash=False, start=0) - advisory_factory(2, with_hash=True, start=100) - - mock_compute.return_value = "new_hash" - - run_pipeline() - - updated = AdvisoryV2.objects.filter(advisory_content_hash="new_hash").count() - untouched = AdvisoryV2.objects.filter(advisory_content_hash="existing_hash").count() - - assert updated == 3 - assert untouched == 2 - assert mock_compute.call_count == 3 - - -@patch( - "vulnerabilities.pipelines.v2_improvers.compute_advisory_content_hash.compute_advisory_content" -) -def test_pipeline_bulk_update_batches(mock_compute, advisory_factory): - advisory_factory(6000, with_hash=False) - - mock_compute.return_value = "batch_hash" - - run_pipeline() - - assert AdvisoryV2.objects.filter(advisory_content_hash="batch_hash").count() == 6000 - - assert mock_compute.call_count == 6000 - - -@patch( - "vulnerabilities.pipelines.v2_improvers.compute_advisory_content_hash.compute_advisory_content" -) -def test_pipeline_no_advisories(mock_compute): - run_pipeline() - - assert mock_compute.call_count == 0 diff --git a/vulnerabilities/tests/test_advisory_merge.py b/vulnerabilities/tests/test_advisory_merge.py new file mode 100644 index 000000000..ddcc3cadb --- /dev/null +++ b/vulnerabilities/tests/test_advisory_merge.py @@ -0,0 +1,192 @@ +# +# Copyright (c) nexB Inc. and others. All rights reserved. +# VulnerableCode is a trademark of nexB Inc. +# SPDX-License-Identifier: Apache-2.0 +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/aboutcode-org/vulnerablecode for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. +# + +import hashlib + +import pytest + +from vulnerabilities.models import AdvisoryAlias +from vulnerabilities.models import AdvisorySet +from vulnerabilities.models import AdvisorySetMember +from vulnerabilities.models import AdvisoryV2 +from vulnerabilities.models import ImpactedPackage +from vulnerabilities.models import PackageV2 +from vulnerabilities.utils import compute_advisory_content_hash +from vulnerabilities.utils import delete_and_save_advisory_set +from vulnerabilities.utils import get_advisories_from_groups +from vulnerabilities.utils import get_merged_identifier_groups +from vulnerabilities.utils import merge_advisories +from vulnerabilities.utils import merge_and_save_grouped_advisories + + +@pytest.mark.django_db +class TestAdvisoryMerge: + def create_advisory(self, advisory_id, affected_versions, fixed_versions=None, precedence=None): + unique_content_id = hashlib.sha256(advisory_id.encode()).hexdigest() + + adv = AdvisoryV2.objects.create( + datasource_id="ghsa", + advisory_id=advisory_id, + avid=f"ghsa/{advisory_id}", + unique_content_id=unique_content_id, + url="https://example.com/advisory", + date_collected="2025-07-01T00:00:00Z", + precedence=precedence, + ) + + pkg = PackageV2.objects.from_purl("pkg:pypi/sample@1.0.0") + + impact = ImpactedPackage.objects.create( + advisory=adv, + base_purl="pkg:pypi/sample", + ) + + # affected + for v in affected_versions: + p = PackageV2.objects.from_purl(f"pkg:pypi/sample@{v}") + impact.affecting_packages.add(p) + + # fixed + if fixed_versions: + for v in fixed_versions: + p = PackageV2.objects.from_purl(f"pkg:pypi/sample@{v}") + impact.fixed_by_packages.add(p) + + return adv + + def test_content_hash_same(self): + package = PackageV2.objects.from_purl("pkg:pypi/sample@1.0.0") + + adv1 = self.create_advisory("A1", ["1.0"], ["2.0"]) + adv2 = self.create_advisory("A2", ["1.0"], ["2.0"]) + + h1 = compute_advisory_content_hash(adv1, package) + h2 = compute_advisory_content_hash(adv2, package) + + assert h1 == h2 + + def test_content_hash_different(self): + package = PackageV2.objects.from_purl("pkg:pypi/sample@1.0.0") + + adv1 = self.create_advisory("A1", ["1.0"], ["2.0"]) + adv2 = self.create_advisory("A2", ["1.0"], ["3.0"]) + + assert compute_advisory_content_hash(adv1, package) != compute_advisory_content_hash( + adv2, package + ) + + def test_identifier_merging(self): + adv1 = self.create_advisory("A1", ["1.0"]) + adv2 = self.create_advisory("A2", ["1.0"]) + + alias = AdvisoryAlias.objects.create(alias="CVE-123") + + adv1.aliases.add(alias) + adv2.aliases.add(alias) + + groups = get_merged_identifier_groups([adv1, adv2]) + + assert len(groups) == 1 + identifiers, primary, secondary = groups[0] + + assert len(secondary) == 1 + assert primary in [adv1, adv2] + + def test_transitive_merge(self): + a1 = self.create_advisory("A1", ["1.0"]) + a2 = self.create_advisory("A2", ["1.0"]) + a3 = self.create_advisory("A3", ["1.0"]) + + alias_1 = AdvisoryAlias.objects.create(alias="CVE-1") + alias_2 = AdvisoryAlias.objects.create(alias="CVE-2") + + a1.aliases.add(alias_1) + a2.aliases.add(alias_1) + a2.aliases.add(alias_2) + a3.aliases.add(alias_2) + + groups = get_merged_identifier_groups([a1, a2, a3]) + + assert len(groups) == 1 + + def test_primary_selection_by_precedence(self): + a1 = self.create_advisory("A1", ["1.0"], precedence=1) + a2 = self.create_advisory("A2", ["1.0"], precedence=5) + + alias_1 = AdvisoryAlias.objects.create(alias="CVE-1") + + a1.aliases.add(alias_1) + a2.aliases.add(alias_1) + + groups = get_merged_identifier_groups([a1, a2]) + _, primary, _ = groups[0] + + assert primary == a2 + + def test_get_advisories_from_groups(self): + adv = self.create_advisory("GHSA-ABC-123", ["1.0"]) + adv.aliases.create(alias="CVE-999") + + groups = get_merged_identifier_groups([adv]) + result = get_advisories_from_groups(groups) + + assert result[0]["identifier"] == "GHSA-ABC-123" + assert len(result[0]["aliases"]) == 1 + + def test_delete_and_save_advisory_set(self): + package = PackageV2.objects.from_purl("pkg:pypi/sample@1.0.0") + + adv1 = self.create_advisory("A1", ["1.0"]) + adv2 = self.create_advisory("A2", ["1.0"]) + + adv1.aliases.create(alias="CVE-1") + + groups = [(set(adv1.aliases.all()), adv1, [adv2])] + + delete_and_save_advisory_set(groups, package, relation="affecting") + + assert AdvisorySet.objects.count() == 1 + assert AdvisorySetMember.objects.count() == 2 + + advisory_set = AdvisorySet.objects.first() + members = AdvisorySetMember.objects.filter(advisory_set=advisory_set) + + assert any(m.is_primary for m in members) + assert any(not m.is_primary for m in members) + + def test_merge_and_save_integration(self): + package = PackageV2.objects.from_purl("pkg:pypi/sample@1.0.0") + + adv1 = self.create_advisory("A1", ["1.0"], ["2.0"]) + adv2 = self.create_advisory("A2", ["1.0"], ["2.0"]) + + alias = AdvisoryAlias.objects.create(alias="CVE-1") + + adv1.aliases.add(alias) + adv2.aliases.add(alias) + + result = merge_and_save_grouped_advisories( + package, + [adv1, adv2], + relation="test", + ) + + assert len(result) == 1 + assert AdvisorySet.objects.count() == 1 + assert AdvisorySetMember.objects.count() == 2 + + def test_merge_advisories_separates_different_content(self): + package = PackageV2.objects.from_purl("pkg:pypi/sample@1.0.0") + + adv1 = self.create_advisory("A1", ["1.0"], ["2.0"]) + adv2 = self.create_advisory("A2", ["1.0"], ["3.0"]) + + groups = merge_advisories([adv1, adv2], package) + + assert len(groups) == 2 diff --git a/vulnerabilities/tests/test_api_v3.py b/vulnerabilities/tests/test_api_v3.py index 6b88e5ee5..fa8a08b33 100644 --- a/vulnerabilities/tests/test_api_v3.py +++ b/vulnerabilities/tests/test_api_v3.py @@ -53,7 +53,7 @@ def test_packages_post_without_details(self): def test_packages_post_with_details(self): url = reverse("package-v3-list") - with self.assertNumQueries(23): + with self.assertNumQueries(33): response = self.client.post( url, data={ @@ -171,25 +171,6 @@ def setUp(self): self.client = APIClient(enforce_csrf_checks=True) - def test_packages_post_purl_with_many_advisories(self): - url = reverse("package-v3-list") - - with self.assertNumQueries(12): - response = self.client.post( - url, - data={ - "purls": ["pkg:pypi/sample@1.0.0"], - "details": True, - }, - format="json", - ) - - self.assertEqual(response.status_code, status.HTTP_200_OK) - - results = response.data["results"] - self.assertEqual(len(results), 1) - self.assertIsNotNone(results[0]["affected_by_vulnerabilities_url"]) - def test_advisories_post(self): url = reverse("advisory-v3-list") diff --git a/vulnerabilities/utils.py b/vulnerabilities/utils.py index 2dd606a92..5f791d30b 100644 --- a/vulnerabilities/utils.py +++ b/vulnerabilities/utils.py @@ -41,6 +41,7 @@ from univers.version_range import VersionRange from aboutcode.hashid import build_vcid +from vulnerabilities.pipes.group_advisories import delete_and_save_advisory_set logger = logging.getLogger(__name__) @@ -845,59 +846,10 @@ def compute_patch_checksum(patch_text: str): return hashlib.sha512(patch_text.encode("utf-8")).hexdigest() -def group_advisories_by_content(advisories): - grouped = {} - - for advisory in advisories: - content_hash = ( - advisory.advisory_content_hash - if advisory.advisory_content_hash - else compute_advisory_content(advisory) - ) - - entry = grouped.setdefault( - content_hash, - {"primary": advisory, "secondary": set()}, - ) - - primary = entry["primary"] - - if advisory is primary: - continue - - if advisory.precedence > primary.precedence: - entry["primary"] = advisory - entry["secondary"].add(primary) - else: - entry["secondary"].add(advisory) - - return grouped - - -def compute_advisory_content(advisory_data): +def merge_advisories(advisories, package): """ - Compute a unique content hash for an advisory by normalizing its data and hashing it. - - :param advisory_data: An AdvisoryData object - :return: SHA-256 hash digest as content hash + Merge advisories based on their content hash and identifiers. """ - from vulnerabilities.models import AdvisoryV2 - - if isinstance(advisory_data, AdvisoryV2): - advisory_data = advisory_data.to_advisory_data() - normalized_data = { - "affected_packages": [ - pkg.to_dict() for pkg in normalize_list(advisory_data.affected_packages) if pkg - ], - } - - normalized_json = json.dumps(normalized_data, separators=(",", ":"), sort_keys=True) - content_hash = hashlib.sha256(normalized_json.encode("utf-8")).hexdigest() - - return content_hash - - -def merge_advisories(advisories, package): advisories = list(advisories) @@ -917,6 +869,8 @@ def merge_advisories(advisories, package): def compute_advisory_content_hash(adv, package): + """Compute a content hash for an advisory based on its affected and fixed packages for a given package. + This is used to determine if two advisories are the same based on their content.""" affected = [] fixed = [] @@ -943,6 +897,10 @@ def compute_advisory_content_hash(adv, package): def get_merged_identifier_groups(advisories): + """ + Merge advisories based on their identifiers (advisory_id and aliases). + Example: If two advisories share ``advisory_id`` or share an alias, they will be merged together. + """ identifier_groups = defaultdict(set) @@ -985,7 +943,7 @@ def get_merged_identifier_groups(advisories): for group in merged: identifiers = set() for adv in group: - for alias in adv.aliases.all(): + for alias in adv.aliases.all().order_by("alias"): identifiers.add(alias) primary = max(group, key=lambda a: a.precedence if a.precedence is not None else -1) @@ -995,3 +953,45 @@ def get_merged_identifier_groups(advisories): final_groups.append((identifiers, primary, secondary)) return final_groups + + +def get_advisories_from_groups(groups): + """ + Return a list of advisories from the merged groups of advisories. + """ + advisories = [] + for aliases, primary, _ in groups: + identifier = primary.advisory_id.split("/")[-1] + + filtered_aliases = [alias for alias in aliases if alias.alias != identifier] + + advisories.append( + {"aliases": filtered_aliases, "advisory": primary, "identifier": identifier} + ) + + return advisories + + +def merge_and_save_grouped_advisories(package, advisories, relation): + """ + Merge advisories based on their content and identifiers and save the merged advisories to the database. + """ + groups = merge_advisories(advisories, package) + delete_and_save_advisory_set(groups, package, relation) + advisories = get_advisories_from_groups(groups) + + return advisories + + +TYPES_WITH_MULTIPLE_IMPORTERS = [ + "pypi", + "maven", + "nuget", + "golang", + "npm", + "composer", + "hex", + "cargo", + "gem", + "conan", +] diff --git a/vulnerabilities/views.py b/vulnerabilities/views.py index c8bfc6634..8051dfb35 100644 --- a/vulnerabilities/views.py +++ b/vulnerabilities/views.py @@ -39,14 +39,15 @@ from vulnerabilities.forms import VulnerabilitySearchForm from vulnerabilities.models import AdvisorySetMember from vulnerabilities.models import AdvisoryV2 -from vulnerabilities.models import ImpactedPackage from vulnerabilities.models import PipelineRun from vulnerabilities.models import PipelineSchedule from vulnerabilities.pipelines.v2_importers.epss_importer_v2 import EPSSImporterPipeline +from vulnerabilities.pipes.group_advisories import delete_and_save_advisory_set from vulnerabilities.severity_systems import EPSS from vulnerabilities.severity_systems import SCORING_SYSTEMS -from vulnerabilities.utils import group_advisories_by_content -from vulnerabilities.utils import merge_advisories +from vulnerabilities.utils import TYPES_WITH_MULTIPLE_IMPORTERS +from vulnerabilities.utils import get_advisories_from_groups +from vulnerabilities.utils import merge_and_save_grouped_advisories from vulnerablecode import __version__ as VULNERABLECODE_VERSION from vulnerablecode.settings import env @@ -160,12 +161,7 @@ def get_queryset(self, query=None): on exact purl, partial purl or just name and namespace. """ query = query or self.request.GET.get("search") or "" - return ( - self.model.objects.search(query) - .prefetch_related() - .order_by("package_url") - .with_is_vulnerable() - ) + return self.model.objects.search(query).prefetch_related().with_is_vulnerable() class AffectedByAdvisoriesListView(ListView): @@ -220,57 +216,97 @@ def get_context_data(self, **kwargs): context["latest_non_vulnerable"] = latest_non_vulnerable context["package_search_form"] = PackageSearchForm(self.request.GET) + is_grouped = models.AdvisorySet.objects.filter(package=package).exists() + + if is_grouped: + context["grouped"] = True + fixed_pkg_details = get_fixed_package_details(package) + context["fixed_package_details"] = fixed_pkg_details + + affected_by_advisories_qs = models.AdvisorySet.objects.filter( + package=package, relation_type="affecting" + ).select_related("primary_advisory") + + fixing_advisories_qs = models.AdvisorySet.objects.filter( + package=package, relation_type="fixing" + ).select_related("primary_advisory") + + affected_groups = [ + (list(adv.aliases.all()), adv.primary_advisory, "") + for adv in affected_by_advisories_qs + ] + fixing_groups = [ + (list(adv.aliases.all()), adv.primary_advisory, "") for adv in fixing_advisories_qs + ] + + affected_advisories = get_advisories_from_groups(affected_groups) + fixing_advisories = get_advisories_from_groups(fixing_groups) + + context["affected_by_advisories_v2"] = affected_advisories + context["fixing_advisories_v2"] = fixing_advisories + + return context + affecting_advisories = AdvisoryV2.objects.latest_affecting_advisories_for_purl( purl=package.purl - ).prefetch_related( - "aliases", - "impacted_packages__affecting_packages", - "impacted_packages__fixed_by_packages", ) fixed_by_advisories = AdvisoryV2.objects.latest_fixed_by_advisories_for_purl( purl=package.purl - ).prefetch_related( - "aliases", - "impacted_packages__affecting_packages", - "impacted_packages__fixed_by_packages", ) + if package.type in TYPES_WITH_MULTIPLE_IMPORTERS: + fixed_pkg_details = get_fixed_package_details(package) + context["fixed_package_details"] = fixed_pkg_details + context["grouped"] = True + + affecting_advisories = affecting_advisories.prefetch_related( + "aliases", + "impacted_packages__affecting_packages", + "impacted_packages__fixed_by_packages", + ) + + affected_by_advisories = merge_and_save_grouped_advisories( + package, affecting_advisories, "affecting" + ) + + fixed_by_advisories = fixed_by_advisories.prefetch_related( + "aliases", + "impacted_packages__affecting_packages", + "impacted_packages__fixed_by_packages", + ) + + fixing_advisories = merge_and_save_grouped_advisories( + package, fixed_by_advisories, "fixing" + ) + + context["affected_by_advisories_v2"] = affected_by_advisories + context["fixing_advisories_v2"] = fixing_advisories + return context + + context["grouped"] = False + affected_by_advisories_url = None fixing_advisories_url = None affected_by_advisories_qs_ids = affecting_advisories.only("id") fixing_advisories_qs_ids = fixed_by_advisories.only("id") - affected_by_advisories = list(affected_by_advisories_qs_ids[:1001]) - if len(affected_by_advisories) > 1001: + affected_by_advisories = list(affected_by_advisories_qs_ids[:101]) + if len(affected_by_advisories) > 101: affected_by_advisories_url = reverse_lazy( "affected_by_advisories_v2", kwargs={"purl": package.package_url} ) context["affected_by_advisories_v2_url"] = affected_by_advisories_url - context["affected_by_advisories_v2"] = [] - context["fixed_package_details"] = {} else: - advisories = [] - fixed_pkg_details = get_fixed_package_details(package) - groups = merge_advisories(affecting_advisories, package) - for aliases, primary, _ in groups: - identifier = primary.advisory_id.split("/")[-1] - - filtered_aliases = [alias for alias in aliases if alias.alias != identifier] - - advisories.append( - {"aliases": filtered_aliases, "advisory": primary, "identifier": identifier} - ) - - context["affected_by_advisories_v2"] = advisories context["fixed_package_details"] = fixed_pkg_details + context["affected_by_advisories_v2"] = affecting_advisories context["affected_by_advisories_v2_url"] = None - fixing_advisories = list(fixing_advisories_qs_ids[:1001]) - if len(fixing_advisories) > 1001: + fixing_advisories = list(fixing_advisories_qs_ids[:101]) + if len(fixing_advisories) > 101: fixing_advisories_url = reverse_lazy( "fixing_advisories_v2", kwargs={"purl": package.package_url} ) @@ -278,21 +314,7 @@ def get_context_data(self, **kwargs): context["fixing_advisories_v2"] = [] else: - advisories = [] - - fixed_pkg_details = get_fixed_package_details(package) - groups = merge_advisories(fixing_advisories, package) - for aliases, primary, _ in groups: - identifier = primary.advisory_id.split("/")[-1] - - filtered_aliases = [alias for alias in aliases if alias.alias != identifier] - - advisories.append( - {"aliases": filtered_aliases, "advisory": primary, "identifier": identifier} - ) - - context["fixing_advisories_v2"] = advisories - context["fixing_advisories_v2_url"] = None + context["fixing_advisories_v2"] = fixed_by_advisories return context @@ -430,7 +452,7 @@ def get_fixed_package_details(package): pkg_map = { p.id: p - for p in models.PackageV2.objects.filter(id__in=pkg_ids).annotate( + for p in models.PackageV2.objects.filter(id__in=pkg_ids, is_ghost=False).annotate( is_vulnerable=Exists( models.ImpactedPackage.objects.filter(affecting_packages=OuterRef("pk")) ) From 8dac89edfe352eb6ff146a2949bc665c9a700375 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Mon, 30 Mar 2026 17:36:32 +0530 Subject: [PATCH 488/545] Handle None in UI Signed-off-by: Tushar Goel --- .../templates/advisory_detail.html | 21 ++++++++++++++++--- .../templates/package_details_v2.html | 4 ++++ vulnerabilities/templates/packages_v2.html | 8 ++++++- 3 files changed, 29 insertions(+), 4 deletions(-) diff --git a/vulnerabilities/templates/advisory_detail.html b/vulnerabilities/templates/advisory_detail.html index 595412df4..5e0e61584 100644 --- a/vulnerabilities/templates/advisory_detail.html +++ b/vulnerabilities/templates/advisory_detail.html @@ -214,9 +214,20 @@ {% for severity in severities %} {{ severity.scoring_system }} - {{ severity.value }} - + + {% if severity.value is not None %} + {{ severity.value }} + {% else %} + {{ "" }} + {% endif %} + + + {% if severity.url is not None %} + {{ severity.url }} + {% else %} + {{ "" }} + {% endif %} {% empty %} @@ -483,7 +494,11 @@
    {% for severity_vector in severity_vectors %} {% if severity_vector.vector.version == '2.0' %} - Vector: {{ severity_vector.vector.vectorString }} Found at {{ severity_vector.origin }} + Vector: {{ severity_vector.vector.vectorString }} + {% if severity_vector.origin %} + Found at + {{ severity_vector.origin }} + {% endif %} diff --git a/vulnerabilities/templates/package_details_v2.html b/vulnerabilities/templates/package_details_v2.html index 8c3f62756..a6c07c352 100644 --- a/vulnerabilities/templates/package_details_v2.html +++ b/vulnerabilities/templates/package_details_v2.html @@ -118,7 +118,11 @@ Risk score diff --git a/vulnerabilities/templates/packages_v2.html b/vulnerabilities/templates/packages_v2.html index 4348575da..f114a7159 100644 --- a/vulnerabilities/templates/packages_v2.html +++ b/vulnerabilities/templates/packages_v2.html @@ -62,7 +62,13 @@ target="_self">{{ package.purl }} - + {% empty %} From b20dc39b71f64d49288ae45ebbe4cbfd7ef79250 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Mon, 30 Mar 2026 18:41:55 +0530 Subject: [PATCH 489/545] Handle large number of advisories case Signed-off-by: Tushar Goel --- vulnerabilities/api_v3.py | 103 ++++++++++++++++++++------------------ vulnerabilities/views.py | 69 ++++++++++++------------- 2 files changed, 89 insertions(+), 83 deletions(-) diff --git a/vulnerabilities/api_v3.py b/vulnerabilities/api_v3.py index ea82dcce3..fb9847a1b 100644 --- a/vulnerabilities/api_v3.py +++ b/vulnerabilities/api_v3.py @@ -215,6 +215,45 @@ def get_affected_by_vulnerabilities(self, package): advisories = [] + if package.type not in TYPES_WITH_MULTIPLE_IMPORTERS: + advisories_ids = advisories_qs.only("id") + + advisories_ids = list(advisories_ids[:101]) + if len(advisories_ids) > 100: + return None + + advisory_by_avid = {adv.avid: adv for adv in advisories_qs} + avids = advisory_by_avid.keys() + + impacts = ( + package.affected_in_impacts.filter(advisory__avid__in=avids) + .select_related("advisory") + .prefetch_related("fixed_by_packages") + ) + + impact_by_avid = {impact.advisory.avid: impact for impact in impacts} + + result = [] + + for advisory in advisories_qs: + impact = impact_by_avid.get(advisory.avid) + if not impact: + continue + + result.append( + { + "advisory_id": advisory.advisory_id.split("/")[-1], + "aliases": [alias.alias for alias in advisory.aliases.all()], + "summary": advisory.summary, + "fixed_by_packages": [pkg.purl for pkg in impact.fixed_by_packages.all()], + "severity": advisory.weighted_severity, + "exploitability": advisory.exploitability, + "risk_score": advisory.risk_score, + } + ) + + return result + is_grouped = AdvisorySet.objects.filter(package=package, relation_type="affecting").exists() if is_grouped: @@ -239,43 +278,25 @@ def get_affected_by_vulnerabilities(self, package): advisories = merge_and_save_grouped_advisories(package, advisories_qs, "affecting") return self.return_advisories_data(package, advisories_qs, advisories) - advisories_ids = advisories_qs.only("id") - - advisories_ids = list(advisories_ids[:101]) - if len(advisories_ids) > 100: - return None - - advisory_by_avid = {adv.avid: adv for adv in advisories_qs} - avids = advisory_by_avid.keys() - - impacts = ( - package.affected_in_impacts.filter(advisory__avid__in=avids) - .select_related("advisory") - .prefetch_related("fixed_by_packages") - ) - - impact_by_avid = {impact.advisory.avid: impact for impact in impacts} - - result = [] + def get_fixing_vulnerabilities(self, package): + advisories_qs = AdvisoryV2.objects.latest_fixed_by_advisories_for_purl(package.package_url) - for advisory in advisories_qs: - impact = impact_by_avid.get(advisory.avid) - if not impact: - continue + if not package.type in TYPES_WITH_MULTIPLE_IMPORTERS: + advisories_ids = advisories_qs.only("id") - result.append( - { - "advisory_id": advisory.advisory_id.split("/")[-1], - "aliases": [alias.alias for alias in advisory.aliases.all()], - "summary": advisory.summary, - "fixed_by_packages": [pkg.purl for pkg in impact.fixed_by_packages.all()], - } - ) + advisories_ids = list(advisories_ids[:101]) + if len(advisories_ids) > 100: + return None - return result + results = [] - def get_fixing_vulnerabilities(self, package): - advisories_qs = AdvisoryV2.objects.latest_fixed_by_advisories_for_purl(package.package_url) + for advisory in advisories_qs: + results.append( + { + "advisory_id": advisory.advisory_id.split("/")[-1], + } + ) + return results advisories = [] @@ -302,22 +323,6 @@ def get_fixing_vulnerabilities(self, package): advisories = merge_and_save_grouped_advisories(package, advisories_qs, "fixing") return self.return_fixing_advisories_data(advisories) - advisories_ids = advisories_qs.only("id") - - advisories_ids = list(advisories_ids[:101]) - if len(advisories_ids) > 100: - return None - - results = [] - - for advisory in advisories_qs: - results.append( - { - "advisory_id": advisory.advisory_id.split("/")[-1], - } - ) - return results - def return_fixing_advisories_data(self, advisories): result = [] for advisory in advisories: diff --git a/vulnerabilities/views.py b/vulnerabilities/views.py index 8051dfb35..829ff22a7 100644 --- a/vulnerabilities/views.py +++ b/vulnerabilities/views.py @@ -216,6 +216,41 @@ def get_context_data(self, **kwargs): context["latest_non_vulnerable"] = latest_non_vulnerable context["package_search_form"] = PackageSearchForm(self.request.GET) + if not package.type in TYPES_WITH_MULTIPLE_IMPORTERS: + context["grouped"] = False + + affected_by_advisories_url = None + fixing_advisories_url = None + + affected_by_advisories_qs_ids = affecting_advisories.only("id") + fixing_advisories_qs_ids = fixed_by_advisories.only("id") + + affected_by_advisories = list(affected_by_advisories_qs_ids[:101]) + if len(affected_by_advisories) > 101: + affected_by_advisories_url = reverse_lazy( + "affected_by_advisories_v2", kwargs={"purl": package.package_url} + ) + context["affected_by_advisories_v2_url"] = affected_by_advisories_url + + else: + fixed_pkg_details = get_fixed_package_details(package) + context["fixed_package_details"] = fixed_pkg_details + context["affected_by_advisories_v2"] = affecting_advisories + context["affected_by_advisories_v2_url"] = None + + fixing_advisories = list(fixing_advisories_qs_ids[:101]) + if len(fixing_advisories) > 101: + fixing_advisories_url = reverse_lazy( + "fixing_advisories_v2", kwargs={"purl": package.package_url} + ) + context["fixing_advisories_v2_url"] = fixing_advisories_url + context["fixing_advisories_v2"] = [] + + else: + context["fixing_advisories_v2"] = fixed_by_advisories + + return context + is_grouped = models.AdvisorySet.objects.filter(package=package).exists() if is_grouped: @@ -284,40 +319,6 @@ def get_context_data(self, **kwargs): context["fixing_advisories_v2"] = fixing_advisories return context - context["grouped"] = False - - affected_by_advisories_url = None - fixing_advisories_url = None - - affected_by_advisories_qs_ids = affecting_advisories.only("id") - fixing_advisories_qs_ids = fixed_by_advisories.only("id") - - affected_by_advisories = list(affected_by_advisories_qs_ids[:101]) - if len(affected_by_advisories) > 101: - affected_by_advisories_url = reverse_lazy( - "affected_by_advisories_v2", kwargs={"purl": package.package_url} - ) - context["affected_by_advisories_v2_url"] = affected_by_advisories_url - - else: - fixed_pkg_details = get_fixed_package_details(package) - context["fixed_package_details"] = fixed_pkg_details - context["affected_by_advisories_v2"] = affecting_advisories - context["affected_by_advisories_v2_url"] = None - - fixing_advisories = list(fixing_advisories_qs_ids[:101]) - if len(fixing_advisories) > 101: - fixing_advisories_url = reverse_lazy( - "fixing_advisories_v2", kwargs={"purl": package.package_url} - ) - context["fixing_advisories_v2_url"] = fixing_advisories_url - context["fixing_advisories_v2"] = [] - - else: - context["fixing_advisories_v2"] = fixed_by_advisories - - return context - def get_object(self, queryset=None): if queryset is None: queryset = self.get_queryset() From 4f97321140c3b5d16d5ffe2a32d66b57fae7d9d4 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Mon, 30 Mar 2026 19:06:49 +0530 Subject: [PATCH 490/545] Fix views Signed-off-by: Tushar Goel --- .../templates/advisory_detail.html | 18 +++++++++++++--- vulnerabilities/views.py | 21 ++++++++++++------- 2 files changed, 29 insertions(+), 10 deletions(-) diff --git a/vulnerabilities/templates/advisory_detail.html b/vulnerabilities/templates/advisory_detail.html index 5e0e61584..90f1d6d8b 100644 --- a/vulnerabilities/templates/advisory_detail.html +++ b/vulnerabilities/templates/advisory_detail.html @@ -137,7 +137,11 @@ applications, or networks. This metric is determined automatically based on the discovery of known exploits."> Exploitability @@ -146,7 +150,11 @@ data-tooltip="Weighted severity is the highest value calculated by multiplying each severity by its corresponding weight, divided by 10." >Weighted Severity @@ -157,7 +165,11 @@ " >Risk diff --git a/vulnerabilities/views.py b/vulnerabilities/views.py index 829ff22a7..87e0c71d6 100644 --- a/vulnerabilities/views.py +++ b/vulnerabilities/views.py @@ -217,6 +217,14 @@ def get_context_data(self, **kwargs): context["package_search_form"] = PackageSearchForm(self.request.GET) if not package.type in TYPES_WITH_MULTIPLE_IMPORTERS: + affecting_advisories = AdvisoryV2.objects.latest_affecting_advisories_for_purl( + purl=package.purl + ) + + fixed_by_advisories = AdvisoryV2.objects.latest_fixed_by_advisories_for_purl( + purl=package.purl + ) + context["grouped"] = False affected_by_advisories_url = None @@ -282,15 +290,14 @@ def get_context_data(self, **kwargs): return context - affecting_advisories = AdvisoryV2.objects.latest_affecting_advisories_for_purl( - purl=package.purl - ) - - fixed_by_advisories = AdvisoryV2.objects.latest_fixed_by_advisories_for_purl( + if package.type in TYPES_WITH_MULTIPLE_IMPORTERS: + affecting_advisories = AdvisoryV2.objects.latest_affecting_advisories_for_purl( purl=package.purl - ) + ) - if package.type in TYPES_WITH_MULTIPLE_IMPORTERS: + fixed_by_advisories = AdvisoryV2.objects.latest_fixed_by_advisories_for_purl( + purl=package.purl + ) fixed_pkg_details = get_fixed_package_details(package) context["fixed_package_details"] = fixed_pkg_details context["grouped"] = True From 4f2d1495c1fa40283d4e180febbed52430500cd9 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Mon, 30 Mar 2026 19:50:05 +0530 Subject: [PATCH 491/545] Fix views Signed-off-by: Tushar Goel --- vulnerabilities/views.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/vulnerabilities/views.py b/vulnerabilities/views.py index 87e0c71d6..63d02c5b1 100644 --- a/vulnerabilities/views.py +++ b/vulnerabilities/views.py @@ -234,7 +234,7 @@ def get_context_data(self, **kwargs): fixing_advisories_qs_ids = fixed_by_advisories.only("id") affected_by_advisories = list(affected_by_advisories_qs_ids[:101]) - if len(affected_by_advisories) > 101: + if len(affected_by_advisories) > 100: affected_by_advisories_url = reverse_lazy( "affected_by_advisories_v2", kwargs={"purl": package.package_url} ) @@ -247,7 +247,7 @@ def get_context_data(self, **kwargs): context["affected_by_advisories_v2_url"] = None fixing_advisories = list(fixing_advisories_qs_ids[:101]) - if len(fixing_advisories) > 101: + if len(fixing_advisories) > 100: fixing_advisories_url = reverse_lazy( "fixing_advisories_v2", kwargs={"purl": package.package_url} ) From 610c205482060462014170855864108832826dc8 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Tue, 31 Mar 2026 00:26:56 +0530 Subject: [PATCH 492/545] Add risk, severity and exploits Signed-off-by: Tushar Goel --- vulnerabilities/api_v3.py | 3 ++ .../templates/package_details_v2.html | 15 ++++++++++ vulnerabilities/utils.py | 29 ++++++++++++++++--- vulnerabilities/views.py | 4 +-- 4 files changed, 45 insertions(+), 6 deletions(-) diff --git a/vulnerabilities/api_v3.py b/vulnerabilities/api_v3.py index fb9847a1b..986096165 100644 --- a/vulnerabilities/api_v3.py +++ b/vulnerabilities/api_v3.py @@ -356,6 +356,9 @@ def return_advisories_data(self, package, advisories_qs, advisories): { "advisory_id": advisory["identifier"], "aliases": [alias.alias for alias in advisory["aliases"]], + "weighted_severity": advisory["weighted_severity"], + "exploitability": advisory["exploitability"], + "risk_score": advisory["risk_score"], "summary": advisory["advisory"].summary, "fixed_by_packages": [pkg.purl for pkg in impact.fixed_by_packages.all()], } diff --git a/vulnerabilities/templates/package_details_v2.html b/vulnerabilities/templates/package_details_v2.html index a6c07c352..8511348ec 100644 --- a/vulnerabilities/templates/package_details_v2.html +++ b/vulnerabilities/templates/package_details_v2.html @@ -142,6 +142,7 @@ + @@ -197,6 +198,13 @@ {% endif %} {% endwith %} + {% empty %} @@ -258,6 +266,13 @@ {% endif %} {% endwith %} + {% empty %} diff --git a/vulnerabilities/utils.py b/vulnerabilities/utils.py index 5f791d30b..ecf2f6878 100644 --- a/vulnerabilities/utils.py +++ b/vulnerabilities/utils.py @@ -960,13 +960,34 @@ def get_advisories_from_groups(groups): Return a list of advisories from the merged groups of advisories. """ advisories = [] - for aliases, primary, _ in groups: + weighted_severity = None + exploitability = None + risk_score = None + for aliases, primary, secondaries in groups: + severity_scores = [] + exploitability_scores = [] identifier = primary.advisory_id.split("/")[-1] - filtered_aliases = [alias for alias in aliases if alias.alias != identifier] - + severity_scores.extend([adv.weighted_severity for adv in secondaries]) + exploitability_scores.extend([adv.exploitability for adv in secondaries]) + severity_scores.append(primary.weighted_severity) + exploitability_scores.append(primary.exploitability) + if severity_scores: + weighted_severity = round(max(severity_scores), 1) + if exploitability_scores: + exploitability = max(exploitability_scores) + if exploitability and weighted_severity: + risk_score = min(float(exploitability * weighted_severity), 10.0) + risk_score = round(risk_score, 1) advisories.append( - {"aliases": filtered_aliases, "advisory": primary, "identifier": identifier} + { + "aliases": filtered_aliases, + "advisory": primary, + "identifier": identifier, + "weighted_severity": weighted_severity, + "exploitability": exploitability, + "risk_score": risk_score, + } ) return advisories diff --git a/vulnerabilities/views.py b/vulnerabilities/views.py index 63d02c5b1..11852aa59 100644 --- a/vulnerabilities/views.py +++ b/vulnerabilities/views.py @@ -218,7 +218,7 @@ def get_context_data(self, **kwargs): if not package.type in TYPES_WITH_MULTIPLE_IMPORTERS: affecting_advisories = AdvisoryV2.objects.latest_affecting_advisories_for_purl( - purl=package.purl + purl=package.purl ) fixed_by_advisories = AdvisoryV2.objects.latest_fixed_by_advisories_for_purl( @@ -292,7 +292,7 @@ def get_context_data(self, **kwargs): if package.type in TYPES_WITH_MULTIPLE_IMPORTERS: affecting_advisories = AdvisoryV2.objects.latest_affecting_advisories_for_purl( - purl=package.purl + purl=package.purl ) fixed_by_advisories = AdvisoryV2.objects.latest_fixed_by_advisories_for_purl( From af98f071e6408a6c37a62dade593364db740bb67 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Tue, 31 Mar 2026 00:30:31 +0530 Subject: [PATCH 493/545] Dedupe fixed_by_packages Signed-off-by: Tushar Goel --- vulnerabilities/api_v3.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vulnerabilities/api_v3.py b/vulnerabilities/api_v3.py index 986096165..2803ac9b8 100644 --- a/vulnerabilities/api_v3.py +++ b/vulnerabilities/api_v3.py @@ -360,7 +360,7 @@ def return_advisories_data(self, package, advisories_qs, advisories): "exploitability": advisory["exploitability"], "risk_score": advisory["risk_score"], "summary": advisory["advisory"].summary, - "fixed_by_packages": [pkg.purl for pkg in impact.fixed_by_packages.all()], + "fixed_by_packages": list(set([pkg.purl for pkg in impact.fixed_by_packages.all()])), } ) From 97da322e5edeb3af477d65909d7e925a7a4b9e70 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Tue, 31 Mar 2026 00:53:30 +0530 Subject: [PATCH 494/545] Fix severity and exploit calculation Signed-off-by: Tushar Goel --- vulnerabilities/api_v3.py | 50 ++++++++++++++++++++++++++++++++------- vulnerabilities/views.py | 46 ++++++++++++++++++++++++++++------- 2 files changed, 78 insertions(+), 18 deletions(-) diff --git a/vulnerabilities/api_v3.py b/vulnerabilities/api_v3.py index 2803ac9b8..cf8f1c3ec 100644 --- a/vulnerabilities/api_v3.py +++ b/vulnerabilities/api_v3.py @@ -21,6 +21,7 @@ from vulnerabilities.models import AdvisoryReference from vulnerabilities.models import AdvisorySet +from vulnerabilities.models import AdvisorySetMember from vulnerabilities.models import AdvisorySeverity from vulnerabilities.models import AdvisoryV2 from vulnerabilities.models import AdvisoryWeakness @@ -257,12 +258,26 @@ def get_affected_by_vulnerabilities(self, package): is_grouped = AdvisorySet.objects.filter(package=package, relation_type="affecting").exists() if is_grouped: - affected_by_advisories_qs = AdvisorySet.objects.filter( - package=package, relation_type="affecting" - ).select_related("primary_advisory") + affected_by_advisories_qs = ( + AdvisorySet.objects.filter(package=package, relation_type="affecting") + .select_related("primary_advisory") + .prefetch_related( + Prefetch( + "members", + queryset=AdvisorySetMember.objects.filter(is_primary=False).select_related( + "advisory" + ), + to_attr="secondary_members", + ) + ) + ) affected_groups = [ - (list(adv.aliases.all()), adv.primary_advisory, "") + ( + list(adv.aliases.all()), + adv.primary_advisory, + [member.advisory for member in adv.secondary_members], + ) for adv in affected_by_advisories_qs ] @@ -303,12 +318,27 @@ def get_fixing_vulnerabilities(self, package): is_grouped = AdvisorySet.objects.filter(package=package, relation_type="fixing").exists() if is_grouped: - fixing_advisories_qs = AdvisorySet.objects.filter( - package=package, relation_type="fixing" - ).select_related("primary_advisory") + fixing_advisories_qs = ( + AdvisorySet.objects.filter(package=package, relation_type="fixing") + .select_related("primary_advisory") + .prefetch_related( + Prefetch( + "members", + queryset=AdvisorySetMember.objects.filter(is_primary=False).select_related( + "advisory" + ), + to_attr="secondary_members", + ) + ) + ) fixing_groups = [ - (list(adv.aliases.all()), adv.primary_advisory, "") for adv in fixing_advisories_qs + ( + list(adv.aliases.all()), + adv.primary_advisory, + [member.advisory for member in adv.secondary_members], + ) + for adv in fixing_advisories_qs ] advisories = get_advisories_from_groups(fixing_groups) @@ -360,7 +390,9 @@ def return_advisories_data(self, package, advisories_qs, advisories): "exploitability": advisory["exploitability"], "risk_score": advisory["risk_score"], "summary": advisory["advisory"].summary, - "fixed_by_packages": list(set([pkg.purl for pkg in impact.fixed_by_packages.all()])), + "fixed_by_packages": list( + set([pkg.purl for pkg in impact.fixed_by_packages.all()]) + ), } ) diff --git a/vulnerabilities/views.py b/vulnerabilities/views.py index 11852aa59..c88c437b5 100644 --- a/vulnerabilities/views.py +++ b/vulnerabilities/views.py @@ -42,7 +42,6 @@ from vulnerabilities.models import PipelineRun from vulnerabilities.models import PipelineSchedule from vulnerabilities.pipelines.v2_importers.epss_importer_v2 import EPSSImporterPipeline -from vulnerabilities.pipes.group_advisories import delete_and_save_advisory_set from vulnerabilities.severity_systems import EPSS from vulnerabilities.severity_systems import SCORING_SYSTEMS from vulnerabilities.utils import TYPES_WITH_MULTIPLE_IMPORTERS @@ -266,20 +265,49 @@ def get_context_data(self, **kwargs): fixed_pkg_details = get_fixed_package_details(package) context["fixed_package_details"] = fixed_pkg_details - affected_by_advisories_qs = models.AdvisorySet.objects.filter( - package=package, relation_type="affecting" - ).select_related("primary_advisory") + affected_by_advisories_qs = ( + models.AdvisorySet.objects.filter(package=package, relation_type="affecting") + .select_related("primary_advisory") + .prefetch_related( + Prefetch( + "members", + queryset=AdvisorySetMember.objects.filter(is_primary=False).select_related( + "advisory" + ), + to_attr="secondary_members", + ) + ) + ) - fixing_advisories_qs = models.AdvisorySet.objects.filter( - package=package, relation_type="fixing" - ).select_related("primary_advisory") + fixing_advisories_qs = ( + models.AdvisorySet.objects.filter(package=package, relation_type="fixing") + .select_related("primary_advisory") + .prefetch_related( + Prefetch( + "members", + queryset=AdvisorySetMember.objects.filter(is_primary=False).select_related( + "advisory" + ), + to_attr="secondary_members", + ) + ) + ) affected_groups = [ - (list(adv.aliases.all()), adv.primary_advisory, "") + ( + list(adv.aliases.all()), + adv.primary_advisory, + [a.advisory for a in adv.secondary_members], + ) for adv in affected_by_advisories_qs ] fixing_groups = [ - (list(adv.aliases.all()), adv.primary_advisory, "") for adv in fixing_advisories_qs + ( + list(adv.aliases.all()), + adv.primary_advisory, + [a.advisory for a in adv.secondary_members], + ) + for adv in fixing_advisories_qs ] affected_advisories = get_advisories_from_groups(affected_groups) From 07433dc7d65552ff9d6cb50c10a2f68c7e5e77fb Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Tue, 31 Mar 2026 16:13:12 +0530 Subject: [PATCH 495/545] Fix grouping Signed-off-by: Tushar Goel --- vulnerabilities/api_v3.py | 49 +++++++++------ vulnerabilities/models.py | 26 ++++++++ .../group_advisories_for_packages.py | 7 ++- vulnerabilities/pipes/group_advisories.py | 12 ++-- vulnerabilities/tests/test_advisory_merge.py | 7 ++- vulnerabilities/utils.py | 61 ++++++++++++------- vulnerabilities/views.py | 27 +++++--- 7 files changed, 127 insertions(+), 62 deletions(-) diff --git a/vulnerabilities/api_v3.py b/vulnerabilities/api_v3.py index cf8f1c3ec..ea1586394 100644 --- a/vulnerabilities/api_v3.py +++ b/vulnerabilities/api_v3.py @@ -7,6 +7,7 @@ # See https://aboutcode.org for more information about nexB OSS projects. # +from typing import List from urllib.parse import urlencode from django.db.models import Exists @@ -25,6 +26,8 @@ from vulnerabilities.models import AdvisorySeverity from vulnerabilities.models import AdvisoryV2 from vulnerabilities.models import AdvisoryWeakness +from vulnerabilities.models import Group +from vulnerabilities.models import GroupedAdvisory from vulnerabilities.models import ImpactedPackageAffecting from vulnerabilities.models import PackageV2 from vulnerabilities.throttling import PermissionBasedUserRateThrottle @@ -273,15 +276,15 @@ def get_affected_by_vulnerabilities(self, package): ) affected_groups = [ - ( - list(adv.aliases.all()), - adv.primary_advisory, - [member.advisory for member in adv.secondary_members], + Group( + aliases=list(adv.aliases.all()), + primary_advisory=adv.primary_advisory, + secondaries=[member.advisory for member in adv.secondary_members], ) for adv in affected_by_advisories_qs ] - advisories = get_advisories_from_groups(affected_groups) + advisories: List[GroupedAdvisory] = get_advisories_from_groups(affected_groups) return self.return_advisories_data(package, advisories_qs, advisories) if package.type in TYPES_WITH_MULTIPLE_IMPORTERS: @@ -290,7 +293,9 @@ def get_affected_by_vulnerabilities(self, package): "impacted_packages__affecting_packages", "impacted_packages__fixed_by_packages", ) - advisories = merge_and_save_grouped_advisories(package, advisories_qs, "affecting") + advisories: List[GroupedAdvisory] = merge_and_save_grouped_advisories( + package, advisories_qs, "affecting" + ) return self.return_advisories_data(package, advisories_qs, advisories) def get_fixing_vulnerabilities(self, package): @@ -333,15 +338,15 @@ def get_fixing_vulnerabilities(self, package): ) fixing_groups = [ - ( - list(adv.aliases.all()), - adv.primary_advisory, - [member.advisory for member in adv.secondary_members], + Group( + aliases=list(adv.aliases.all()), + primary_advisory=adv.primary_advisory, + secondaries=[member.advisory for member in adv.secondary_members], ) for adv in fixing_advisories_qs ] - advisories = get_advisories_from_groups(fixing_groups) + advisories: List[GroupedAdvisory] = get_advisories_from_groups(fixing_groups) return self.return_fixing_advisories_data(advisories) if package.type in TYPES_WITH_MULTIPLE_IMPORTERS: @@ -350,15 +355,18 @@ def get_fixing_vulnerabilities(self, package): "impacted_packages__affecting_packages", "impacted_packages__fixed_by_packages", ) - advisories = merge_and_save_grouped_advisories(package, advisories_qs, "fixing") + advisories: List[GroupedAdvisory] = merge_and_save_grouped_advisories( + package, advisories_qs, "fixing" + ) return self.return_fixing_advisories_data(advisories) def return_fixing_advisories_data(self, advisories): result = [] for advisory in advisories: + assert isinstance(advisory, GroupedAdvisory) result.append( { - "advisory_id": advisory["identifier"], + "advisory_id": advisory.identifier, } ) @@ -378,18 +386,19 @@ def return_advisories_data(self, package, advisories_qs, advisories): result = [] for advisory in advisories: - impact = impact_by_avid.get(advisory["advisory"].avid) + assert isinstance(advisory, GroupedAdvisory) + impact = impact_by_avid.get(advisory.advisory.avid) if not impact: continue result.append( { - "advisory_id": advisory["identifier"], - "aliases": [alias.alias for alias in advisory["aliases"]], - "weighted_severity": advisory["weighted_severity"], - "exploitability": advisory["exploitability"], - "risk_score": advisory["risk_score"], - "summary": advisory["advisory"].summary, + "advisory_id": advisory.identifier, + "aliases": [alias.alias for alias in advisory.aliases], + "weighted_severity": advisory.weighted_severity, + "exploitability": advisory.exploitability, + "risk_score": advisory.risk_score, + "summary": advisory.advisory.summary, "fixed_by_packages": list( set([pkg.purl for pkg in impact.fixed_by_packages.all()]) ), diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index f51a92dbd..45d8acf55 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -20,6 +20,9 @@ from operator import attrgetter from traceback import format_exc as traceback_format_exc from typing import List +from typing import NamedTuple +from typing import Optional +from typing import Set from typing import Union from urllib.parse import urljoin @@ -3714,3 +3717,26 @@ def __str__(self): class Meta: unique_together = ("vector", "source_advisory") + + +class Group(NamedTuple): + """ + A Group of advisories that have been merged together based on their content and identifiers. + """ + + aliases: Set[AdvisoryAlias] + primary: AdvisoryV2 + secondaries: List[AdvisoryV2] + + +class GroupedAdvisory(NamedTuple): + """ + A GroupedAdvisory represents a single advisory that has been grouped with its aliases and related advisories. + """ + + aliases: Set[AdvisoryAlias] + advisory: AdvisoryV2 + identifier: str + weighted_severity: Optional[float] + exploitability: Optional[float] + risk_score: Optional[float] diff --git a/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py b/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py index d2c8f6296..db49447ff 100644 --- a/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py +++ b/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py @@ -7,7 +7,10 @@ # See https://aboutcode.org for more information about nexB OSS projects. # +from typing import List + from vulnerabilities.models import AdvisoryV2 +from vulnerabilities.models import Group from vulnerabilities.models import PackageV2 from vulnerabilities.pipelines import VulnerableCodePipeline from vulnerabilities.pipes.group_advisories import delete_and_save_advisory_set @@ -48,8 +51,8 @@ def group_advisoris_for_packages(logger=None): ) try: - affected_groups = merge_advisories(affecting_advisories, package) - fixed_by_groups = merge_advisories(fixed_by_advisories, package) + affected_groups: List[Group] = merge_advisories(affecting_advisories, package) + fixed_by_groups: List[Group] = merge_advisories(fixed_by_advisories, package) delete_and_save_advisory_set(affected_groups, package, relation="affecting") delete_and_save_advisory_set(fixed_by_groups, package, relation="fixing") except Exception as e: diff --git a/vulnerabilities/pipes/group_advisories.py b/vulnerabilities/pipes/group_advisories.py index d66365706..983ac3386 100644 --- a/vulnerabilities/pipes/group_advisories.py +++ b/vulnerabilities/pipes/group_advisories.py @@ -14,31 +14,33 @@ def delete_and_save_advisory_set(groups, package, relation=None): from vulnerabilities.models import AdvisorySet from vulnerabilities.models import AdvisorySetMember + from vulnerabilities.models import Group AdvisorySet.objects.filter(package=package, relation_type=relation).delete() membership_to_create = [] - for identifiers, primary, secondary in groups: + for group in groups: + assert isinstance(group, Group) advisory_set = AdvisorySet.objects.create( package=package, relation_type=relation, - primary_advisory=primary, + primary_advisory=group.primary, ) - advisory_set.aliases.add(*identifiers) + advisory_set.aliases.add(*group.aliases) advisory_set.save() membership_to_create.append( AdvisorySetMember( advisory_set=advisory_set, - advisory=primary, + advisory=group.primary, is_primary=True, ) ) - for adv in secondary: + for adv in group.secondaries: membership_to_create.append( AdvisorySetMember( advisory_set=advisory_set, diff --git a/vulnerabilities/tests/test_advisory_merge.py b/vulnerabilities/tests/test_advisory_merge.py index ddcc3cadb..08b586ff3 100644 --- a/vulnerabilities/tests/test_advisory_merge.py +++ b/vulnerabilities/tests/test_advisory_merge.py @@ -15,6 +15,7 @@ from vulnerabilities.models import AdvisorySet from vulnerabilities.models import AdvisorySetMember from vulnerabilities.models import AdvisoryV2 +from vulnerabilities.models import Group from vulnerabilities.models import ImpactedPackage from vulnerabilities.models import PackageV2 from vulnerabilities.utils import compute_advisory_content_hash @@ -136,8 +137,8 @@ def test_get_advisories_from_groups(self): groups = get_merged_identifier_groups([adv]) result = get_advisories_from_groups(groups) - assert result[0]["identifier"] == "GHSA-ABC-123" - assert len(result[0]["aliases"]) == 1 + assert result[0].identifier == "GHSA-ABC-123" + assert len(result[0].aliases) == 1 def test_delete_and_save_advisory_set(self): package = PackageV2.objects.from_purl("pkg:pypi/sample@1.0.0") @@ -147,7 +148,7 @@ def test_delete_and_save_advisory_set(self): adv1.aliases.create(alias="CVE-1") - groups = [(set(adv1.aliases.all()), adv1, [adv2])] + groups = [Group(aliases=set(adv1.aliases.all()), primary=adv1, secondaries=[adv2])] delete_and_save_advisory_set(groups, package, relation="affecting") diff --git a/vulnerabilities/utils.py b/vulnerabilities/utils.py index ecf2f6878..e8a13821e 100644 --- a/vulnerabilities/utils.py +++ b/vulnerabilities/utils.py @@ -20,7 +20,9 @@ from functools import total_ordering from http import HTTPStatus from typing import List +from typing import NamedTuple from typing import Optional +from typing import Set from typing import Tuple from typing import Union from unittest.mock import MagicMock @@ -850,6 +852,7 @@ def merge_advisories(advisories, package): """ Merge advisories based on their content hash and identifiers. """ + from vulnerabilities.models import Group advisories = list(advisories) @@ -859,7 +862,7 @@ def merge_advisories(advisories, package): content_hash = compute_advisory_content_hash(adv, package) content_hash_map[content_hash].append(adv) - final_groups = [] + final_groups: List[Group] = [] for group in content_hash_map.values(): groups = get_merged_identifier_groups(group) @@ -901,6 +904,7 @@ def get_merged_identifier_groups(advisories): Merge advisories based on their identifiers (advisory_id and aliases). Example: If two advisories share ``advisory_id`` or share an alias, they will be merged together. """ + from vulnerabilities.models import Group identifier_groups = defaultdict(set) @@ -938,7 +942,7 @@ def get_merged_identifier_groups(advisories): if adv not in all_grouped: merged.append({adv}) - final_groups = [] + final_groups: List[Group] = [] for group in merged: identifiers = set() @@ -950,7 +954,7 @@ def get_merged_identifier_groups(advisories): secondary = [a for a in group if a != primary] - final_groups.append((identifiers, primary, secondary)) + final_groups.append(Group(aliases=identifiers, primary=primary, secondaries=secondary)) return final_groups @@ -959,35 +963,48 @@ def get_advisories_from_groups(groups): """ Return a list of advisories from the merged groups of advisories. """ + from vulnerabilities.models import Group + from vulnerabilities.models import GroupedAdvisory + advisories = [] - weighted_severity = None - exploitability = None - risk_score = None - for aliases, primary, secondaries in groups: + + for group in groups: + + assert isinstance(group, Group) + weighted_severity = None + exploitability = None + risk_score = None + severity_scores = [] - exploitability_scores = [] - identifier = primary.advisory_id.split("/")[-1] - filtered_aliases = [alias for alias in aliases if alias.alias != identifier] - severity_scores.extend([adv.weighted_severity for adv in secondaries]) - exploitability_scores.extend([adv.exploitability for adv in secondaries]) - severity_scores.append(primary.weighted_severity) - exploitability_scores.append(primary.exploitability) + severity_scores.append(group.primary.weighted_severity or 0.0) + severity_scores.extend([adv.weighted_severity or 0.0 for adv in group.secondaries]) + if severity_scores: weighted_severity = round(max(severity_scores), 1) + + exploitability_scores = [] + exploitability_scores.append(group.primary.exploitability or 0.0) + exploitability_scores.extend([adv.exploitability or 0.0 for adv in group.secondaries]) + if exploitability_scores: exploitability = max(exploitability_scores) + if exploitability and weighted_severity: risk_score = min(float(exploitability * weighted_severity), 10.0) risk_score = round(risk_score, 1) + + identifier = group.primary.advisory_id.split("/")[-1] + filtered_aliases = [alias for alias in group.aliases if alias.alias != identifier] + advisories.append( - { - "aliases": filtered_aliases, - "advisory": primary, - "identifier": identifier, - "weighted_severity": weighted_severity, - "exploitability": exploitability, - "risk_score": risk_score, - } + GroupedAdvisory( + aliases=filtered_aliases, + advisory=group.primary, + identifier=identifier, + weighted_severity=weighted_severity, + exploitability=exploitability, + risk_score=risk_score, + ) ) return advisories diff --git a/vulnerabilities/views.py b/vulnerabilities/views.py index c88c437b5..f9274a18d 100644 --- a/vulnerabilities/views.py +++ b/vulnerabilities/views.py @@ -8,6 +8,7 @@ # import logging from collections import defaultdict +from typing import List from cvss.exceptions import CVSS2MalformedError from cvss.exceptions import CVSS3MalformedError @@ -39,6 +40,8 @@ from vulnerabilities.forms import VulnerabilitySearchForm from vulnerabilities.models import AdvisorySetMember from vulnerabilities.models import AdvisoryV2 +from vulnerabilities.models import Group +from vulnerabilities.models import GroupedAdvisory from vulnerabilities.models import PipelineRun from vulnerabilities.models import PipelineSchedule from vulnerabilities.pipelines.v2_importers.epss_importer_v2 import EPSSImporterPipeline @@ -295,23 +298,27 @@ def get_context_data(self, **kwargs): affected_groups = [ ( - list(adv.aliases.all()), - adv.primary_advisory, - [a.advisory for a in adv.secondary_members], + Group( + aliases=list(adv.aliases.all()), + primary=adv.primary_advisory, + secondaries=[a.advisory for a in adv.secondary_members], + ) ) for adv in affected_by_advisories_qs ] fixing_groups = [ ( - list(adv.aliases.all()), - adv.primary_advisory, - [a.advisory for a in adv.secondary_members], + Group( + aliases=list(adv.aliases.all()), + primary=adv.primary_advisory, + secondaries=[a.advisory for a in adv.secondary_members], + ) ) for adv in fixing_advisories_qs ] - affected_advisories = get_advisories_from_groups(affected_groups) - fixing_advisories = get_advisories_from_groups(fixing_groups) + affected_advisories: List[GroupedAdvisory] = get_advisories_from_groups(affected_groups) + fixing_advisories: List[GroupedAdvisory] = get_advisories_from_groups(fixing_groups) context["affected_by_advisories_v2"] = affected_advisories context["fixing_advisories_v2"] = fixing_advisories @@ -336,7 +343,7 @@ def get_context_data(self, **kwargs): "impacted_packages__fixed_by_packages", ) - affected_by_advisories = merge_and_save_grouped_advisories( + affected_by_advisories: List[GroupedAdvisory] = merge_and_save_grouped_advisories( package, affecting_advisories, "affecting" ) @@ -346,7 +353,7 @@ def get_context_data(self, **kwargs): "impacted_packages__fixed_by_packages", ) - fixing_advisories = merge_and_save_grouped_advisories( + fixing_advisories: List[GroupedAdvisory] = merge_and_save_grouped_advisories( package, fixed_by_advisories, "fixing" ) From cfb2d7d00290656eec944d4e1a3aacef9257f038 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Tue, 31 Mar 2026 16:20:58 +0530 Subject: [PATCH 496/545] Fix API Signed-off-by: Tushar Goel --- vulnerabilities/api_v3.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/vulnerabilities/api_v3.py b/vulnerabilities/api_v3.py index ea1586394..a15d5a0cd 100644 --- a/vulnerabilities/api_v3.py +++ b/vulnerabilities/api_v3.py @@ -278,7 +278,7 @@ def get_affected_by_vulnerabilities(self, package): affected_groups = [ Group( aliases=list(adv.aliases.all()), - primary_advisory=adv.primary_advisory, + primary=adv.primary_advisory, secondaries=[member.advisory for member in adv.secondary_members], ) for adv in affected_by_advisories_qs @@ -340,7 +340,7 @@ def get_fixing_vulnerabilities(self, package): fixing_groups = [ Group( aliases=list(adv.aliases.all()), - primary_advisory=adv.primary_advisory, + primary=adv.primary_advisory, secondaries=[member.advisory for member in adv.secondary_members], ) for adv in fixing_advisories_qs From 28c5c638c1d7eef23ee0bb6d73d08eb3b16ae4b8 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Tue, 31 Mar 2026 16:37:50 +0530 Subject: [PATCH 497/545] Ignore goruped case Signed-off-by: Tushar Goel --- vulnerabilities/api_v3.py | 112 +++++++++++++++++----------------- vulnerabilities/views.py | 122 +++++++++++++++++++------------------- 2 files changed, 117 insertions(+), 117 deletions(-) diff --git a/vulnerabilities/api_v3.py b/vulnerabilities/api_v3.py index a15d5a0cd..0d1df5418 100644 --- a/vulnerabilities/api_v3.py +++ b/vulnerabilities/api_v3.py @@ -258,34 +258,34 @@ def get_affected_by_vulnerabilities(self, package): return result - is_grouped = AdvisorySet.objects.filter(package=package, relation_type="affecting").exists() - - if is_grouped: - affected_by_advisories_qs = ( - AdvisorySet.objects.filter(package=package, relation_type="affecting") - .select_related("primary_advisory") - .prefetch_related( - Prefetch( - "members", - queryset=AdvisorySetMember.objects.filter(is_primary=False).select_related( - "advisory" - ), - to_attr="secondary_members", - ) - ) - ) - - affected_groups = [ - Group( - aliases=list(adv.aliases.all()), - primary=adv.primary_advisory, - secondaries=[member.advisory for member in adv.secondary_members], - ) - for adv in affected_by_advisories_qs - ] - - advisories: List[GroupedAdvisory] = get_advisories_from_groups(affected_groups) - return self.return_advisories_data(package, advisories_qs, advisories) + # is_grouped = AdvisorySet.objects.filter(package=package, relation_type="affecting").exists() + + # if is_grouped: + # affected_by_advisories_qs = ( + # AdvisorySet.objects.filter(package=package, relation_type="affecting") + # .select_related("primary_advisory") + # .prefetch_related( + # Prefetch( + # "members", + # queryset=AdvisorySetMember.objects.filter(is_primary=False).select_related( + # "advisory" + # ), + # to_attr="secondary_members", + # ) + # ) + # ) + + # affected_groups = [ + # Group( + # aliases=list(adv.aliases.all()), + # primary=adv.primary_advisory, + # secondaries=[member.advisory for member in adv.secondary_members], + # ) + # for adv in affected_by_advisories_qs + # ] + + # advisories: List[GroupedAdvisory] = get_advisories_from_groups(affected_groups) + # return self.return_advisories_data(package, advisories_qs, advisories) if package.type in TYPES_WITH_MULTIPLE_IMPORTERS: advisories_qs = advisories_qs.prefetch_related( @@ -320,34 +320,34 @@ def get_fixing_vulnerabilities(self, package): advisories = [] - is_grouped = AdvisorySet.objects.filter(package=package, relation_type="fixing").exists() - - if is_grouped: - fixing_advisories_qs = ( - AdvisorySet.objects.filter(package=package, relation_type="fixing") - .select_related("primary_advisory") - .prefetch_related( - Prefetch( - "members", - queryset=AdvisorySetMember.objects.filter(is_primary=False).select_related( - "advisory" - ), - to_attr="secondary_members", - ) - ) - ) - - fixing_groups = [ - Group( - aliases=list(adv.aliases.all()), - primary=adv.primary_advisory, - secondaries=[member.advisory for member in adv.secondary_members], - ) - for adv in fixing_advisories_qs - ] - - advisories: List[GroupedAdvisory] = get_advisories_from_groups(fixing_groups) - return self.return_fixing_advisories_data(advisories) + # is_grouped = AdvisorySet.objects.filter(package=package, relation_type="fixing").exists() + + # if is_grouped: + # fixing_advisories_qs = ( + # AdvisorySet.objects.filter(package=package, relation_type="fixing") + # .select_related("primary_advisory") + # .prefetch_related( + # Prefetch( + # "members", + # queryset=AdvisorySetMember.objects.filter(is_primary=False).select_related( + # "advisory" + # ), + # to_attr="secondary_members", + # ) + # ) + # ) + + # fixing_groups = [ + # Group( + # aliases=list(adv.aliases.all()), + # primary=adv.primary_advisory, + # secondaries=[member.advisory for member in adv.secondary_members], + # ) + # for adv in fixing_advisories_qs + # ] + + # advisories: List[GroupedAdvisory] = get_advisories_from_groups(fixing_groups) + # return self.return_fixing_advisories_data(advisories) if package.type in TYPES_WITH_MULTIPLE_IMPORTERS: advisories_qs = advisories_qs.prefetch_related( diff --git a/vulnerabilities/views.py b/vulnerabilities/views.py index f9274a18d..a9d599b49 100644 --- a/vulnerabilities/views.py +++ b/vulnerabilities/views.py @@ -261,69 +261,69 @@ def get_context_data(self, **kwargs): return context - is_grouped = models.AdvisorySet.objects.filter(package=package).exists() - - if is_grouped: - context["grouped"] = True - fixed_pkg_details = get_fixed_package_details(package) - context["fixed_package_details"] = fixed_pkg_details - - affected_by_advisories_qs = ( - models.AdvisorySet.objects.filter(package=package, relation_type="affecting") - .select_related("primary_advisory") - .prefetch_related( - Prefetch( - "members", - queryset=AdvisorySetMember.objects.filter(is_primary=False).select_related( - "advisory" - ), - to_attr="secondary_members", - ) - ) - ) - - fixing_advisories_qs = ( - models.AdvisorySet.objects.filter(package=package, relation_type="fixing") - .select_related("primary_advisory") - .prefetch_related( - Prefetch( - "members", - queryset=AdvisorySetMember.objects.filter(is_primary=False).select_related( - "advisory" - ), - to_attr="secondary_members", - ) - ) - ) - - affected_groups = [ - ( - Group( - aliases=list(adv.aliases.all()), - primary=adv.primary_advisory, - secondaries=[a.advisory for a in adv.secondary_members], - ) - ) - for adv in affected_by_advisories_qs - ] - fixing_groups = [ - ( - Group( - aliases=list(adv.aliases.all()), - primary=adv.primary_advisory, - secondaries=[a.advisory for a in adv.secondary_members], - ) - ) - for adv in fixing_advisories_qs - ] - - affected_advisories: List[GroupedAdvisory] = get_advisories_from_groups(affected_groups) - fixing_advisories: List[GroupedAdvisory] = get_advisories_from_groups(fixing_groups) + # is_grouped = models.AdvisorySet.objects.filter(package=package).exists() + + # if is_grouped: + # context["grouped"] = True + # fixed_pkg_details = get_fixed_package_details(package) + # context["fixed_package_details"] = fixed_pkg_details + + # affected_by_advisories_qs = ( + # models.AdvisorySet.objects.filter(package=package, relation_type="affecting") + # .select_related("primary_advisory") + # .prefetch_related( + # Prefetch( + # "members", + # queryset=AdvisorySetMember.objects.filter(is_primary=False).select_related( + # "advisory" + # ), + # to_attr="secondary_members", + # ) + # ) + # ) - context["affected_by_advisories_v2"] = affected_advisories - context["fixing_advisories_v2"] = fixing_advisories + # fixing_advisories_qs = ( + # models.AdvisorySet.objects.filter(package=package, relation_type="fixing") + # .select_related("primary_advisory") + # .prefetch_related( + # Prefetch( + # "members", + # queryset=AdvisorySetMember.objects.filter(is_primary=False).select_related( + # "advisory" + # ), + # to_attr="secondary_members", + # ) + # ) + # ) - return context + # affected_groups = [ + # ( + # Group( + # aliases=list(adv.aliases.all()), + # primary=adv.primary_advisory, + # secondaries=[a.advisory for a in adv.secondary_members], + # ) + # ) + # for adv in affected_by_advisories_qs + # ] + # fixing_groups = [ + # ( + # Group( + # aliases=list(adv.aliases.all()), + # primary=adv.primary_advisory, + # secondaries=[a.advisory for a in adv.secondary_members], + # ) + # ) + # for adv in fixing_advisories_qs + # ] + + # affected_advisories: List[GroupedAdvisory] = get_advisories_from_groups(affected_groups) + # fixing_advisories: List[GroupedAdvisory] = get_advisories_from_groups(fixing_groups) + + # context["affected_by_advisories_v2"] = affected_advisories + # context["fixing_advisories_v2"] = fixing_advisories + + # return context if package.type in TYPES_WITH_MULTIPLE_IMPORTERS: affecting_advisories = AdvisoryV2.objects.latest_affecting_advisories_for_purl( From 7c4859e732b95b55f2df46ee2e023796cce2731d Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Tue, 31 Mar 2026 16:54:06 +0530 Subject: [PATCH 498/545] Revert grouping Signed-off-by: Tushar Goel --- vulnerabilities/api_v3.py | 112 +++++++++++++++++----------------- vulnerabilities/views.py | 122 +++++++++++++++++++------------------- 2 files changed, 117 insertions(+), 117 deletions(-) diff --git a/vulnerabilities/api_v3.py b/vulnerabilities/api_v3.py index 0d1df5418..a15d5a0cd 100644 --- a/vulnerabilities/api_v3.py +++ b/vulnerabilities/api_v3.py @@ -258,34 +258,34 @@ def get_affected_by_vulnerabilities(self, package): return result - # is_grouped = AdvisorySet.objects.filter(package=package, relation_type="affecting").exists() - - # if is_grouped: - # affected_by_advisories_qs = ( - # AdvisorySet.objects.filter(package=package, relation_type="affecting") - # .select_related("primary_advisory") - # .prefetch_related( - # Prefetch( - # "members", - # queryset=AdvisorySetMember.objects.filter(is_primary=False).select_related( - # "advisory" - # ), - # to_attr="secondary_members", - # ) - # ) - # ) - - # affected_groups = [ - # Group( - # aliases=list(adv.aliases.all()), - # primary=adv.primary_advisory, - # secondaries=[member.advisory for member in adv.secondary_members], - # ) - # for adv in affected_by_advisories_qs - # ] - - # advisories: List[GroupedAdvisory] = get_advisories_from_groups(affected_groups) - # return self.return_advisories_data(package, advisories_qs, advisories) + is_grouped = AdvisorySet.objects.filter(package=package, relation_type="affecting").exists() + + if is_grouped: + affected_by_advisories_qs = ( + AdvisorySet.objects.filter(package=package, relation_type="affecting") + .select_related("primary_advisory") + .prefetch_related( + Prefetch( + "members", + queryset=AdvisorySetMember.objects.filter(is_primary=False).select_related( + "advisory" + ), + to_attr="secondary_members", + ) + ) + ) + + affected_groups = [ + Group( + aliases=list(adv.aliases.all()), + primary=adv.primary_advisory, + secondaries=[member.advisory for member in adv.secondary_members], + ) + for adv in affected_by_advisories_qs + ] + + advisories: List[GroupedAdvisory] = get_advisories_from_groups(affected_groups) + return self.return_advisories_data(package, advisories_qs, advisories) if package.type in TYPES_WITH_MULTIPLE_IMPORTERS: advisories_qs = advisories_qs.prefetch_related( @@ -320,34 +320,34 @@ def get_fixing_vulnerabilities(self, package): advisories = [] - # is_grouped = AdvisorySet.objects.filter(package=package, relation_type="fixing").exists() - - # if is_grouped: - # fixing_advisories_qs = ( - # AdvisorySet.objects.filter(package=package, relation_type="fixing") - # .select_related("primary_advisory") - # .prefetch_related( - # Prefetch( - # "members", - # queryset=AdvisorySetMember.objects.filter(is_primary=False).select_related( - # "advisory" - # ), - # to_attr="secondary_members", - # ) - # ) - # ) - - # fixing_groups = [ - # Group( - # aliases=list(adv.aliases.all()), - # primary=adv.primary_advisory, - # secondaries=[member.advisory for member in adv.secondary_members], - # ) - # for adv in fixing_advisories_qs - # ] - - # advisories: List[GroupedAdvisory] = get_advisories_from_groups(fixing_groups) - # return self.return_fixing_advisories_data(advisories) + is_grouped = AdvisorySet.objects.filter(package=package, relation_type="fixing").exists() + + if is_grouped: + fixing_advisories_qs = ( + AdvisorySet.objects.filter(package=package, relation_type="fixing") + .select_related("primary_advisory") + .prefetch_related( + Prefetch( + "members", + queryset=AdvisorySetMember.objects.filter(is_primary=False).select_related( + "advisory" + ), + to_attr="secondary_members", + ) + ) + ) + + fixing_groups = [ + Group( + aliases=list(adv.aliases.all()), + primary=adv.primary_advisory, + secondaries=[member.advisory for member in adv.secondary_members], + ) + for adv in fixing_advisories_qs + ] + + advisories: List[GroupedAdvisory] = get_advisories_from_groups(fixing_groups) + return self.return_fixing_advisories_data(advisories) if package.type in TYPES_WITH_MULTIPLE_IMPORTERS: advisories_qs = advisories_qs.prefetch_related( diff --git a/vulnerabilities/views.py b/vulnerabilities/views.py index a9d599b49..f9274a18d 100644 --- a/vulnerabilities/views.py +++ b/vulnerabilities/views.py @@ -261,69 +261,69 @@ def get_context_data(self, **kwargs): return context - # is_grouped = models.AdvisorySet.objects.filter(package=package).exists() - - # if is_grouped: - # context["grouped"] = True - # fixed_pkg_details = get_fixed_package_details(package) - # context["fixed_package_details"] = fixed_pkg_details - - # affected_by_advisories_qs = ( - # models.AdvisorySet.objects.filter(package=package, relation_type="affecting") - # .select_related("primary_advisory") - # .prefetch_related( - # Prefetch( - # "members", - # queryset=AdvisorySetMember.objects.filter(is_primary=False).select_related( - # "advisory" - # ), - # to_attr="secondary_members", - # ) - # ) - # ) + is_grouped = models.AdvisorySet.objects.filter(package=package).exists() - # fixing_advisories_qs = ( - # models.AdvisorySet.objects.filter(package=package, relation_type="fixing") - # .select_related("primary_advisory") - # .prefetch_related( - # Prefetch( - # "members", - # queryset=AdvisorySetMember.objects.filter(is_primary=False).select_related( - # "advisory" - # ), - # to_attr="secondary_members", - # ) - # ) - # ) + if is_grouped: + context["grouped"] = True + fixed_pkg_details = get_fixed_package_details(package) + context["fixed_package_details"] = fixed_pkg_details + + affected_by_advisories_qs = ( + models.AdvisorySet.objects.filter(package=package, relation_type="affecting") + .select_related("primary_advisory") + .prefetch_related( + Prefetch( + "members", + queryset=AdvisorySetMember.objects.filter(is_primary=False).select_related( + "advisory" + ), + to_attr="secondary_members", + ) + ) + ) + + fixing_advisories_qs = ( + models.AdvisorySet.objects.filter(package=package, relation_type="fixing") + .select_related("primary_advisory") + .prefetch_related( + Prefetch( + "members", + queryset=AdvisorySetMember.objects.filter(is_primary=False).select_related( + "advisory" + ), + to_attr="secondary_members", + ) + ) + ) + + affected_groups = [ + ( + Group( + aliases=list(adv.aliases.all()), + primary=adv.primary_advisory, + secondaries=[a.advisory for a in adv.secondary_members], + ) + ) + for adv in affected_by_advisories_qs + ] + fixing_groups = [ + ( + Group( + aliases=list(adv.aliases.all()), + primary=adv.primary_advisory, + secondaries=[a.advisory for a in adv.secondary_members], + ) + ) + for adv in fixing_advisories_qs + ] + + affected_advisories: List[GroupedAdvisory] = get_advisories_from_groups(affected_groups) + fixing_advisories: List[GroupedAdvisory] = get_advisories_from_groups(fixing_groups) - # affected_groups = [ - # ( - # Group( - # aliases=list(adv.aliases.all()), - # primary=adv.primary_advisory, - # secondaries=[a.advisory for a in adv.secondary_members], - # ) - # ) - # for adv in affected_by_advisories_qs - # ] - # fixing_groups = [ - # ( - # Group( - # aliases=list(adv.aliases.all()), - # primary=adv.primary_advisory, - # secondaries=[a.advisory for a in adv.secondary_members], - # ) - # ) - # for adv in fixing_advisories_qs - # ] - - # affected_advisories: List[GroupedAdvisory] = get_advisories_from_groups(affected_groups) - # fixing_advisories: List[GroupedAdvisory] = get_advisories_from_groups(fixing_groups) - - # context["affected_by_advisories_v2"] = affected_advisories - # context["fixing_advisories_v2"] = fixing_advisories - - # return context + context["affected_by_advisories_v2"] = affected_advisories + context["fixing_advisories_v2"] = fixing_advisories + + return context if package.type in TYPES_WITH_MULTIPLE_IMPORTERS: affecting_advisories = AdvisoryV2.objects.latest_affecting_advisories_for_purl( From 8f17b7ea3cc66f451657fbf17dc827de4ae0020d Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Tue, 31 Mar 2026 18:20:45 +0530 Subject: [PATCH 499/545] Change advisory ID for pypa importer Signed-off-by: Tushar Goel --- vulnerabilities/pipelines/v2_importers/pypa_importer.py | 3 +++ vulnerabilities/pipes/osv_v2.py | 9 +++++++-- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/vulnerabilities/pipelines/v2_importers/pypa_importer.py b/vulnerabilities/pipelines/v2_importers/pypa_importer.py index 7a80ed70f..142c8a385 100644 --- a/vulnerabilities/pipelines/v2_importers/pypa_importer.py +++ b/vulnerabilities/pipelines/v2_importers/pypa_importer.py @@ -59,11 +59,14 @@ def collect_advisories(self) -> Iterable[AdvisoryDataV2]: ) advisory_text = advisory.read_text() advisory_dict = saneyaml.load(advisory_text) + advisory_path = advisory.relative_to(base_directory) + advisory_id = advisory_path.parent.stem + "/" + advisory_path.stem yield parse_advisory_data_v3( raw_data=advisory_dict, supported_ecosystems=["pypi"], advisory_url=advisory_url, advisory_text=advisory_text, + advisory_id=advisory_id, ) def clean_downloads(self): diff --git a/vulnerabilities/pipes/osv_v2.py b/vulnerabilities/pipes/osv_v2.py index e70ba4a4a..65b5a5904 100644 --- a/vulnerabilities/pipes/osv_v2.py +++ b/vulnerabilities/pipes/osv_v2.py @@ -59,13 +59,18 @@ def parse_advisory_data_v3( - raw_data: dict, supported_ecosystems, advisory_url: str, advisory_text: str + raw_data: dict, + supported_ecosystems, + advisory_url: str, + advisory_text: str, + advisory_id: Optional[str] = None, ) -> Optional[AdvisoryDataV2]: """ Return an AdvisoryData build from a ``raw_data`` mapping of OSV advisory and a ``supported_ecosystem`` string. """ - advisory_id = raw_data.get("id") or "" + if not advisory_id: + advisory_id = raw_data.get("id") or "" if not advisory_id: logger.error(f"Missing advisory id in OSV data: {raw_data}") return None From 5951dfd91ea28498bd4a3d314698804fbbaaf6d8 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Tue, 31 Mar 2026 18:24:02 +0530 Subject: [PATCH 500/545] Change documentation Signed-off-by: Tushar Goel --- PIPELINES-AVID.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/PIPELINES-AVID.rst b/PIPELINES-AVID.rst index 43de21e19..3d82400f8 100644 --- a/PIPELINES-AVID.rst +++ b/PIPELINES-AVID.rst @@ -55,7 +55,7 @@ * - project-kb-statements_v2 - Vulnerability ID of the record * - pypa_importer_v2 - - ID of the OSV record + - {package_name}/{ID of the OSV record} * - pysec_importer_v2 - ID of the OSV record * - redhat_importer_v2 From 63f3416e12421da18f979459636b717e8b10dc4f Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Tue, 31 Mar 2026 19:18:19 +0530 Subject: [PATCH 501/545] Increase page_size for pagination Signed-off-by: Tushar Goel --- etc/nginx/conf.d/default.conf | 1 + vulnerablecode/settings.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/etc/nginx/conf.d/default.conf b/etc/nginx/conf.d/default.conf index ce8081c06..131882479 100644 --- a/etc/nginx/conf.d/default.conf +++ b/etc/nginx/conf.d/default.conf @@ -12,6 +12,7 @@ server { proxy_redirect off; client_max_body_size 10G; proxy_read_timeout 600s; + proxy_set_header X-Forwarded-Proto $scheme; } location /static/ { diff --git a/vulnerablecode/settings.py b/vulnerablecode/settings.py index 435cb8953..eaf2c1276 100644 --- a/vulnerablecode/settings.py +++ b/vulnerablecode/settings.py @@ -251,7 +251,7 @@ "EXCEPTION_HANDLER": "vulnerabilities.throttling.throttled_exception_handler", "DEFAULT_PAGINATION_CLASS": "vulnerabilities.pagination.SmallResultSetPagination", # Limit the load on the Database returning a small number of records by default. https://github.com/nexB/vulnerablecode/issues/819 - "PAGE_SIZE": 10, + "PAGE_SIZE": 100, # for API docs "DEFAULT_SCHEMA_CLASS": "drf_spectacular.openapi.AutoSchema", "DATETIME_FORMAT": "%Y-%m-%dT%H:%M:%SZ", From e168ba9e671709aef868431dd5347ac9f9e026e2 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Wed, 1 Apr 2026 16:35:43 +0530 Subject: [PATCH 502/545] Remove risk score from UI Signed-off-by: Tushar Goel --- vulnerabilities/templates/package_details_v2.html | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/vulnerabilities/templates/package_details_v2.html b/vulnerabilities/templates/package_details_v2.html index 8511348ec..a6c07c352 100644 --- a/vulnerabilities/templates/package_details_v2.html +++ b/vulnerabilities/templates/package_details_v2.html @@ -142,7 +142,6 @@ - @@ -198,13 +197,6 @@ {% endif %} {% endwith %} - {% empty %} @@ -266,13 +258,6 @@ {% endif %} {% endwith %} - {% empty %} From 012c3ac9f0756271bb7bc574c3643dc15519da3c Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Wed, 1 Apr 2026 16:39:42 +0530 Subject: [PATCH 503/545] Update API V3 usage Signed-off-by: Tushar Goel --- api_v3_usage.rst | 2 +- vulnerabilities/api_v3.py | 14 +++++++------- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/api_v3_usage.rst b/api_v3_usage.rst index 26ed9377f..0da3fe1af 100644 --- a/api_v3_usage.rst +++ b/api_v3_usage.rst @@ -83,7 +83,7 @@ Parameters: - ``purls`` — list of package URLs to query - ``details`` — boolean (default: ``false``) -- ``approximate`` — boolean (default: ``false``) +- ``ignore_qualifiers_subpath`` — boolean (default: ``false``) The ``approximate`` flag replaces the previous ``plain_purl`` parameter. When set to ``true``, qualifiers and subpaths in PURLs are ignored. diff --git a/vulnerabilities/api_v3.py b/vulnerabilities/api_v3.py index a15d5a0cd..ffa5bd941 100644 --- a/vulnerabilities/api_v3.py +++ b/vulnerabilities/api_v3.py @@ -43,13 +43,13 @@ class PackageQuerySerializer(serializers.Serializer): default=list, ) details = serializers.BooleanField(default=False) - approximate = serializers.BooleanField(default=False) + ignore_qualifiers_subpath = serializers.BooleanField(default=False) def validate(self, data): if not data["purls"]: - if data["details"] or data["approximate"]: + if data["details"] or data["ignore_qualifiers_subpath"]: raise serializers.ValidationError( - "details and approximate must be false when purls is empty" + "``details`` and ``ignore_qualifiers_subpath`` must be false when purls is empty" ) return data @@ -428,7 +428,7 @@ def create(self, request, *args, **kwargs): purls = serializer.validated_data["purls"] details = serializer.validated_data["details"] - approximate = serializer.validated_data["approximate"] + ignore_qualifiers_subpath = serializer.validated_data["ignore_qualifiers_subpath"] if not purls: impacted = ImpactedPackageAffecting.objects.filter(package_id=OuterRef("id")) @@ -444,7 +444,7 @@ def create(self, request, *args, **kwargs): plain_purls = None - if approximate: + if ignore_qualifiers_subpath: plain_purls = [ str( PackageURL( @@ -458,7 +458,7 @@ def create(self, request, *args, **kwargs): ] if not details: - if approximate: + if ignore_qualifiers_subpath: query = ( PackageV2.objects.filter(plain_package_url__in=plain_purls) .values_list("plain_package_url", flat=True) @@ -476,7 +476,7 @@ def create(self, request, *args, **kwargs): page = self.paginate_queryset(query) return self.get_paginated_response(page) - if approximate: + if ignore_qualifiers_subpath: query = ( PackageV2.objects.filter(plain_package_url__in=plain_purls) .order_by("plain_package_url") From ae1b71b89f657363ad34454ec37e4aacf911f1c0 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Wed, 1 Apr 2026 16:45:21 +0530 Subject: [PATCH 504/545] Change tests Signed-off-by: Tushar Goel --- api_v3_usage.rst | 38 +++++++++++++++++----------- vulnerabilities/tests/test_api_v3.py | 4 +-- 2 files changed, 25 insertions(+), 17 deletions(-) diff --git a/api_v3_usage.rst b/api_v3_usage.rst index 0da3fe1af..23732703c 100644 --- a/api_v3_usage.rst +++ b/api_v3_usage.rst @@ -85,7 +85,7 @@ Parameters: - ``details`` — boolean (default: ``false``) - ``ignore_qualifiers_subpath`` — boolean (default: ``false``) -The ``approximate`` flag replaces the previous ``plain_purl`` parameter. +The ``ignore_qualifiers_subpath`` flag replaces the previous ``plain_purl`` parameter. When set to ``true``, qualifiers and subpaths in PURLs are ignored. @@ -140,12 +140,16 @@ Example response: "purl": "pkg:npm/atob@2.0.3", "affected_by_vulnerabilities": [ { - "advisory_id": "nodejs_security_wg/npm-403", - "fixed_by_packages": [ - "pkg:npm/atob@2.1.0" - ], - "duplicate_advisory_ids": [] - } + "advisory_id": "GHSA-g5vw-3h65-2q3v", + "aliases": [], + "weighted_severity": null, + "exploitability_score": null, + "risk_score": null, + "summary": "Access control vulnerable to user data", + "fixed_by_packages": [ + "pkg:pypi/accesscontrol@7.2" + ], + }, ], "fixing_vulnerabilities": [], "next_non_vulnerable_version": "2.1.0", @@ -165,7 +169,7 @@ Using Approximate Matching { "purls": ["pkg:npm/atob@2.0.3?foo=bar"], - "approximate": true, + "ignore_qualifiers_subpath": true, "details": true } @@ -181,13 +185,17 @@ Example response: { "purl": "pkg:npm/atob@2.0.3", "affected_by_vulnerabilities": [ - { - "advisory_id": "nodejs_security_wg/npm-403", - "fixed_by_packages": [ - "pkg:npm/atob@2.1.0" - ], - "duplicate_advisory_ids": [] - } + { + "advisory_id": "GHSA-g5vw-3h65-2q3v", + "aliases": [], + "weighted_severity": null, + "exploitability_score": null, + "risk_score": null, + "summary": "Access control vulnerable to user data", + "fixed_by_packages": [ + "pkg:pypi/accesscontrol@7.2" + ], + } ], "fixing_vulnerabilities": [], "next_non_vulnerable_version": "2.1.0", diff --git a/vulnerabilities/tests/test_api_v3.py b/vulnerabilities/tests/test_api_v3.py index fa8a08b33..c7201af6a 100644 --- a/vulnerabilities/tests/test_api_v3.py +++ b/vulnerabilities/tests/test_api_v3.py @@ -126,14 +126,14 @@ def test_packages_pagination(self): self.assertIn("results", response.data) self.assertIn("next", response.data) - def test_packages_approximate(self): + def test_packages_ignore_qualifiers_subpath(self): url = reverse("package-v3-list") response = self.client.post( url, data={ "purls": ["pkg:pypi/sample@1.0.0?foo=bar"], - "approximate": True, + "ignore_qualifiers_subpath": True, "details": False, }, format="json", From 0eb2acd49158ce1e2f30ee5bb7a51da394881598 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Wed, 1 Apr 2026 16:47:57 +0530 Subject: [PATCH 505/545] Update changelog and prep for release Signed-off-by: Tushar Goel --- CHANGELOG.rst | 6 ++++++ setup.cfg | 2 +- vulnerablecode/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 294004e08..4c3d9efb4 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,6 +1,12 @@ Release notes ============= +Version v38.0.0 +--------------------- + +- This is a major version, we have changed our V3 API, refer to ``api_v3_usage.rst`` for details. +- We have started grouping advisories which have aliases or identifiers in common and also affect same set of packages together. + Version v37.0.0 --------------------- diff --git a/setup.cfg b/setup.cfg index 7e11ae621..5c8efc7dd 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = vulnerablecode -version = 37.0.0 +version = 38.0.0 license = Apache-2.0 AND CC-BY-SA-4.0 # description must be on ONE line https://github.com/pypa/setuptools/issues/1390 diff --git a/vulnerablecode/__init__.py b/vulnerablecode/__init__.py index 13c70b495..80b725801 100644 --- a/vulnerablecode/__init__.py +++ b/vulnerablecode/__init__.py @@ -14,7 +14,7 @@ import git -__version__ = "37.0.0" +__version__ = "38.0.0" PROJECT_DIR = Path(__file__).resolve().parent From 67cf3645dd37f9c69ac6e0fa96232bd2dae4e347 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Wed, 1 Apr 2026 23:27:31 +0530 Subject: [PATCH 506/545] Fix tests Signed-off-by: Tushar Goel --- etc/nginx/conf.d/default.conf | 1 + vulnerabilities/pipes/osv_v2.py | 12 +++++++----- vulnerabilities/tests/test_api_v2.py | 8 ++++---- vulnerabilities/tests/test_api_v3.py | 4 ++-- 4 files changed, 14 insertions(+), 11 deletions(-) diff --git a/etc/nginx/conf.d/default.conf b/etc/nginx/conf.d/default.conf index 131882479..754f65b76 100644 --- a/etc/nginx/conf.d/default.conf +++ b/etc/nginx/conf.d/default.conf @@ -13,6 +13,7 @@ server { client_max_body_size 10G; proxy_read_timeout 600s; proxy_set_header X-Forwarded-Proto $scheme; + proxy_set_header X-Forwarded-Host $host; } location /static/ { diff --git a/vulnerabilities/pipes/osv_v2.py b/vulnerabilities/pipes/osv_v2.py index 65b5a5904..0f8a29e78 100644 --- a/vulnerabilities/pipes/osv_v2.py +++ b/vulnerabilities/pipes/osv_v2.py @@ -69,17 +69,19 @@ def parse_advisory_data_v3( Return an AdvisoryData build from a ``raw_data`` mapping of OSV advisory and a ``supported_ecosystem`` string. """ - if not advisory_id: - advisory_id = raw_data.get("id") or "" - if not advisory_id: + adv_id = raw_data.get("id") + if not adv_id: logger.error(f"Missing advisory id in OSV data: {raw_data}") return None + aliases = raw_data.get("aliases") or [] + if not advisory_id: + advisory_id = adv_id + else: + aliases.append(adv_id) summary = raw_data.get("summary") or "" details = raw_data.get("details") or "" summary = build_description(summary=summary, description=details) - aliases = raw_data.get("aliases") or [] aliases.extend(raw_data.get("upstream", [])) - date_published = get_published_date(raw_data=raw_data) severities = list(get_severities(raw_data=raw_data, url=advisory_url)) references = get_references_v2(raw_data=raw_data) diff --git a/vulnerabilities/tests/test_api_v2.py b/vulnerabilities/tests/test_api_v2.py index c4abe3b97..be447ab0b 100644 --- a/vulnerabilities/tests/test_api_v2.py +++ b/vulnerabilities/tests/test_api_v2.py @@ -185,8 +185,8 @@ def test_list_vulnerabilities_pagination(self): self.assertIn("previous", response.data) # The 'vulnerabilities' dictionary should contain vulnerabilities up to the page limit self.assertEqual( - len(response.data["results"]["vulnerabilities"]), 10 - ) # Assuming default page size is 10 + len(response.data["results"]["vulnerabilities"]), 14 + ) # Assuming default page size is 100 class PackageV2ViewSetTest(APITestCase): @@ -346,8 +346,8 @@ def test_list_packages_pagination(self): self.assertIn("next", response.data) self.assertIn("previous", response.data) self.assertEqual( - len(response.data["results"]["packages"]), 10 - ) # Assuming default page size is 10 + len(response.data["results"]["packages"]), 14 + ) # Assuming default page size is 100 def test_invalid_vulnerability_filter(self): """ diff --git a/vulnerabilities/tests/test_api_v3.py b/vulnerabilities/tests/test_api_v3.py index c7201af6a..280662f2c 100644 --- a/vulnerabilities/tests/test_api_v3.py +++ b/vulnerabilities/tests/test_api_v3.py @@ -182,7 +182,7 @@ def test_advisories_post(self): ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(len(response.data["results"]), 10) + self.assertEqual(len(response.data["results"]), 100) advisory = response.data["results"][0] self.assertEqual(advisory["advisory_id"], "ghsa_importer/GHSA-12341") @@ -229,5 +229,5 @@ def test_get_all_vulnerable_purls(self): self.assertEqual(response.status_code, status.HTTP_200_OK) results = response.data["results"] - self.assertEqual(len(results), 10) + self.assertEqual(len(results), 100) self.assertIn("next", response.data) From fd5250976e754f7b3f507cb8291c2a21a45e7daa Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Wed, 1 Apr 2026 23:41:10 +0530 Subject: [PATCH 507/545] Fix views for ungrouped advisories Signed-off-by: Tushar Goel --- .../templates/package_details_v2.html | 16 +++++++------- vulnerabilities/views.py | 22 +++++++++++++++++-- 2 files changed, 28 insertions(+), 10 deletions(-) diff --git a/vulnerabilities/templates/package_details_v2.html b/vulnerabilities/templates/package_details_v2.html index a6c07c352..b6aa84009 100644 --- a/vulnerabilities/templates/package_details_v2.html +++ b/vulnerabilities/templates/package_details_v2.html @@ -211,15 +211,15 @@ {% for advisory in affected_by_advisories_v2 %}
    Exploitability (E) + {% if package.risk_score is not None %} {{package.risk_score}} + {% else %} + {{""}} + {% endif %}
    {{ package.is_vulnerable|yesno:"Yes,No" }}{{ package.risk_score }} + {% if package.risk_score is not None %} + {{ package.risk_score }} + {% else %} + {{ "" }} + {% endif %} +
    - {{ advisory.exploitability }} + {% if advisory.exploitability is not None %} + {{ advisory.exploitability }} + {% else %} + {{ "" }} + {% endif %}
    - {{ advisory.weighted_severity }} + {% if advisory.weighted_severity is not None %} + {{ advisory.weighted_severity }} + {% else %} + {{ "" }} + {% endif %}
    - {{ advisory.risk_score }} + {% if advisory.risk_score is not None %} + {{ advisory.risk_score }} + {% else %} + {{ "" }} + {% endif %}
    Advisory Summary Fixed in package versionRisk score
    + {% if advisory.risk_score is not None %} + {{ advisory.risk_score }} + {% else %} + {{ "" }} + {% endif %} +
    + {% if advisory.risk_score is not None %} + {{ advisory.risk_score }} + {% else %} + {{ "" }} + {% endif %} +
    Advisory Summary Fixed in package versionRisk score
    - {% if advisory.risk_score is not None %} - {{ advisory.risk_score }} - {% else %} - {{ "" }} - {% endif %} -
    - {% if advisory.risk_score is not None %} - {{ advisory.risk_score }} - {% else %} - {{ "" }} - {% endif %} -
    - + {{advisory.advisory_id }}
    - {% if advisory.aliases.all|length != 0 %} + {% if advisory.advisory.aliases.all|length != 0 %} Aliases: {% endif %}
    - {% for alias in advisory.aliases.all %} + {% for alias in advisory.advisory.aliases.all %} {% if alias.url %} {{ alias }} @@ -232,10 +232,10 @@
    - {{ advisory.summary|truncatewords:20 }} + {{ advisory.advisory.summary|truncatewords:20 }} - {% with fixed=fixed_package_details|get_item:advisory.avid %} + {% with fixed=fixed_package_details|get_item:advisory.advisory.avid %} {% if fixed %} {% for item in fixed %}
    @@ -336,16 +336,16 @@ {% for advisory in fixing_advisories_v2 %}
    - + {{advisory.advisory_id }}
    - {{ advisory.summary|truncatewords:20 }} + {{ advisory.advisory.summary|truncatewords:20 }} - {% for alias in advisory.aliases.all %} + {% for alias in advisory.advisory.aliases.all %} {% if alias.url %} {{ alias }} diff --git a/vulnerabilities/views.py b/vulnerabilities/views.py index f9274a18d..2a3d737a4 100644 --- a/vulnerabilities/views.py +++ b/vulnerabilities/views.py @@ -245,7 +245,15 @@ def get_context_data(self, **kwargs): else: fixed_pkg_details = get_fixed_package_details(package) context["fixed_package_details"] = fixed_pkg_details - context["affected_by_advisories_v2"] = affecting_advisories + affecting_advs = [] + for adv in affecting_advisories: + affecting_advs.append( + { + "advisory_id": adv.advisory_id.split("/")[-1], + "advisory": adv, + } + ) + context["affected_by_advisories_v2"] = affecting_advs context["affected_by_advisories_v2_url"] = None fixing_advisories = list(fixing_advisories_qs_ids[:101]) @@ -257,7 +265,17 @@ def get_context_data(self, **kwargs): context["fixing_advisories_v2"] = [] else: - context["fixing_advisories_v2"] = fixed_by_advisories + fixed_by_advisories = fixed_by_advisories.prefetch_related( + "aliases", + ) + fixed_by_advisories = list(fixed_by_advisories) + fix_advs = [] + for fixed_by_adv in fixed_by_advisories: + fix_advs.append( + {"advisory_id": fixed_by_adv.advisory_id.split("/")[-1], "advisory": fixed_by_adv} + ) + + context["fixing_advisories_v2"] = fix_advs return context From 959709cfe7426eaf81c4508192ad79bb4458c4f7 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Thu, 2 Apr 2026 11:29:19 +0530 Subject: [PATCH 508/545] Minor fixes Signed-off-by: Tushar Goel --- vulnerabilities/improvers/__init__.py | 2 -- .../v2_improvers/group_advisories_for_packages.py | 2 +- vulnerabilities/tests/test_api_v3.py | 9 +++++++++ 3 files changed, 10 insertions(+), 3 deletions(-) diff --git a/vulnerabilities/improvers/__init__.py b/vulnerabilities/improvers/__init__.py index 3e991d658..d55ecafdb 100644 --- a/vulnerabilities/improvers/__init__.py +++ b/vulnerabilities/improvers/__init__.py @@ -20,7 +20,6 @@ from vulnerabilities.pipelines import populate_vulnerability_summary_pipeline from vulnerabilities.pipelines import remove_duplicate_advisories from vulnerabilities.pipelines.v2_improvers import collect_ssvc_trees -from vulnerabilities.pipelines.v2_improvers import compute_advisory_todo as compute_advisory_todo_v2 from vulnerabilities.pipelines.v2_improvers import compute_package_risk as compute_package_risk_v2 from vulnerabilities.pipelines.v2_improvers import ( computer_package_version_rank as compute_version_rank_v2, @@ -70,7 +69,6 @@ enhance_with_metasploit_v2.MetasploitImproverPipeline, compute_package_risk_v2.ComputePackageRiskPipeline, compute_version_rank_v2.ComputeVersionRankPipeline, - compute_advisory_todo_v2.ComputeToDo, unfurl_version_range_v2.UnfurlVersionRangePipeline, compute_advisory_todo.ComputeToDo, collect_ssvc_trees.CollectSSVCPipeline, diff --git a/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py b/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py index db49447ff..b34727078 100644 --- a/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py +++ b/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py @@ -19,7 +19,7 @@ class GroupAdvisoriesForPackages(VulnerableCodePipeline): - """Detect and flag packages that do not exist upstream.""" + """Group advisories for packages that have multiple importers""" pipeline_id = "group_advisories_for_packages" diff --git a/vulnerabilities/tests/test_api_v3.py b/vulnerabilities/tests/test_api_v3.py index 280662f2c..36dd7fba1 100644 --- a/vulnerabilities/tests/test_api_v3.py +++ b/vulnerabilities/tests/test_api_v3.py @@ -1,3 +1,12 @@ +# +# Copyright (c) nexB Inc. and others. All rights reserved. +# VulnerableCode is a trademark of nexB Inc. +# SPDX-License-Identifier: Apache-2.0 +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/aboutcode-org/vulnerablecode for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. +# + from django.urls import reverse from packageurl import PackageURL from rest_framework import status From 78ca5283336a94044c70736f8fd26f226b309e11 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Thu, 2 Apr 2026 11:50:42 +0530 Subject: [PATCH 509/545] Compute rank while unfurling Signed-off-by: Tushar Goel --- .../group_advisories_for_packages.py | 4 +- .../v2_improvers/unfurl_version_range.py | 2 + .../templates/package_details_v3.html | 367 ------------------ vulnerabilities/views.py | 109 +----- vulnerablecode/urls.py | 1 - 5 files changed, 8 insertions(+), 475 deletions(-) delete mode 100644 vulnerabilities/templates/package_details_v3.html diff --git a/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py b/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py index b34727078..ea6fc9185 100644 --- a/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py +++ b/vulnerabilities/pipelines/v2_improvers/group_advisories_for_packages.py @@ -33,7 +33,7 @@ def group_advisories_for_packages(self): def group_advisoris_for_packages(logger=None): for package in PackageV2.objects.filter(type__in=TYPES_WITH_MULTIPLE_IMPORTERS).iterator(): - print(f"Grouping advisories for package {package.purl}") + logger(f"Grouping advisories for package {package.purl}") affecting_advisories = AdvisoryV2.objects.latest_affecting_advisories_for_purl( purl=package.purl ).prefetch_related( @@ -56,5 +56,5 @@ def group_advisoris_for_packages(logger=None): delete_and_save_advisory_set(affected_groups, package, relation="affecting") delete_and_save_advisory_set(fixed_by_groups, package, relation="fixing") except Exception as e: - print(f"Failed rebuilding advisory sets for package {package.purl}: {e!r}") + logger(f"Failed rebuilding advisory sets for package {package.purl}: {e!r}") continue diff --git a/vulnerabilities/pipelines/v2_improvers/unfurl_version_range.py b/vulnerabilities/pipelines/v2_improvers/unfurl_version_range.py index 48d40e439..1d603b88a 100644 --- a/vulnerabilities/pipelines/v2_improvers/unfurl_version_range.py +++ b/vulnerabilities/pipelines/v2_improvers/unfurl_version_range.py @@ -118,6 +118,8 @@ def bulk_create_with_m2m(purls, impact, relation, logger): affected_packages_v2 = PackageV2.objects.bulk_get_or_create_from_purls(purls=purls) + affected_packages_v2[-1].calculate_version_rank + relations = [ relation(impacted_package=impact, package=package) for package in affected_packages_v2 ] diff --git a/vulnerabilities/templates/package_details_v3.html b/vulnerabilities/templates/package_details_v3.html deleted file mode 100644 index 44ec1c297..000000000 --- a/vulnerabilities/templates/package_details_v3.html +++ /dev/null @@ -1,367 +0,0 @@ -{% extends "base.html" %} -{% load humanize %} -{% load widget_tweaks %} -{% load static %} -{% load url_filters %} -{% load utils %} - -{% block title %} -VulnerableCode Package Details - {{ package.purl }} -{% endblock %} - -{% block content %} -
    - {% include "package_search_box_v2.html"%} -
    - -{% if package %} -
    -
    -
    -
    - Package details: - {{ package.purl }} - -
    -
    - -
    - -
    - -
    -
    -
    - {% if affected_by_advisories_v2|length != 0 or affected_by_advisories_v2_url %} -
    - {% else %} -
    - {% endif %} - - - - - - - {% if package.is_ghost %} - - - - - {% endif %} - -
    - - purl - - - {{ package.purl }} -
    - Tags - - - Ghost - -
    -
    - {% if affected_by_advisories_v2|length != 0 or affected_by_advisories_v2_url %} - -
    - - - - - - - - - - - - - - - -
    - Next non-vulnerable version - - {% if next_non_vulnerable.version %} - {{ next_non_vulnerable.version }} - {% else %} - None. - {% endif %} -
    - Latest non-vulnerable version - - {% if latest_non_vulnerable.version %} - {{ latest_non_vulnerable.version }} - {% else %} - None. - {% endif %} -
    - Risk score - - {{package.risk_score}} -
    -
    - - {% endif %} - -
    - {% if affected_by_advisories_v2|length != 0 %} -
    - Vulnerabilities affecting this package ({{ affected_by_advisories_v2|length }}) -
    - - - - - - - - - - - - - {% for advisory in affected_by_advisories_v2 %} - - - - - - - - {% empty %} - - - - {% endfor %} - -
    AdvisorySourceDate PublishedSummaryFixed in package version
    - - {{advisory.primary_advisory.advisory_id }} - -
    - {% if advisory.identifiers|length != 0 %} - Aliases: - {% endif %} -
    - {% for alias in advisory.identifiers %} - {% if alias.url %} - {{ alias }} -
    - {% else %} - {{ alias }} -
    - {% endif %} - {% endfor %} -
    - {% if advisory.secondary_members|length != 0 %} -

    Supporting advisories are listed below the primary advisory.

    - {% for secondary in advisory.secondary_members %} - - {{secondary.advisory.avid }}
    -
    - {% endfor %} - {% endif %} -
    - {{advisory.primary_advisory.url}} - - {{advisory.primary_advisory.date_published}} - - {{ advisory.primary_advisory.summary }} - - {% with fixed=fixed_package_details|get_item:advisory.primary_advisory.avid %} - {% if fixed %} - {% for item in fixed %} -
    - {{ item.pkg.version }} -
    - {% if item.pkg.is_vulnerable %} - - Vulnerable - - {% else %} - - Not vulnerable - - {% endif %} -
    - {% endfor %} - {% else %} - There are no reported fixed by versions. - {% endif %} - {% endwith %} -
    - This package is not known to be subject of any advisories. -
    - {% elif affected_by_advisories_v2_url %} -
    - This package is subject to more than 100 advisories. Please refer to the following - URL for vulnerabilities affecting this package: Advisories -
    - {% else %} -
    - This package is not known to be subject of any advisories. -
    - {% endif %} -
    - -
    - {% if fixing_advisories_v2|length != 0 %} -
    - Vulnerabilities fixed by this package ({{ fixing_advisories_v2|length }}) -
    - - - - - - - - - - - - - {% for advisory in fixing_advisories_v2 %} - - - - - - - - {% empty %} - - - - {% endfor %} - -
    AdvisorySourceDate PublishedSummaryAliases
    - - {{advisory.primary_advisory.advisory_id }} - -
    - {% if advisory.secondary_members|length != 0 %} -

    Supporting advisories are listed below the primary advisory.

    - {% for secondary in advisory.secondary_members %} - - {{secondary.advisory.avid }}
    -
    - {% endfor %} - {% endif %} -
    - {{advisory.primary_advisory.url}} - - {{advisory.primary_advisory.date_published}} - - {{ advisory.primary_advisory.summary }} - - {% for alias in advisory.identifiers %} - {% if alias.url %} - {{ alias }} -
    - {% else %} - {{ alias }} -
    - {% endif %} - {% endfor %} -
    - This package is not known to fix any advisories. -
    - -
    - {% elif fixing_advisories_v2_url %} -
    - This package is known to fix more than 100 advisories. Please refer to the following - URL for vulnerabilities fixed by this package: Advisories -
    - {% else %} -
    - This package is not known to fix any advisories. -
    - {% endif %} -
    -
    -
    - - -
    -
    -
    -
    - -{% endif %} -{% endblock %} diff --git a/vulnerabilities/views.py b/vulnerabilities/views.py index 2a3d737a4..4f9f396ea 100644 --- a/vulnerabilities/views.py +++ b/vulnerabilities/views.py @@ -272,7 +272,10 @@ def get_context_data(self, **kwargs): fix_advs = [] for fixed_by_adv in fixed_by_advisories: fix_advs.append( - {"advisory_id": fixed_by_adv.advisory_id.split("/")[-1], "advisory": fixed_by_adv} + { + "advisory_id": fixed_by_adv.advisory_id.split("/")[-1], + "advisory": fixed_by_adv, + } ) context["fixing_advisories_v2"] = fix_advs @@ -399,110 +402,6 @@ def get_object(self, queryset=None): return package -class PackageV3Details(DetailView): - model = models.PackageV2 - template_name = "package_details_v3.html" - slug_url_kwarg = "purl" - slug_field = "purl" - - def get_context_data(self, **kwargs): - context = super().get_context_data(**kwargs) - package = self.object - - next_non_vulnerable, latest_non_vulnerable = package.get_non_vulnerable_versions() - - context["package"] = package - context["next_non_vulnerable"] = next_non_vulnerable - context["latest_non_vulnerable"] = latest_non_vulnerable - context["package_search_form"] = PackageSearchForm(self.request.GET) - - affected_by_advisories_qs = ( - models.AdvisorySet.objects.filter(package=package, relation_type="affecting") - .select_related("primary_advisory") - .prefetch_related( - Prefetch( - "members", - queryset=AdvisorySetMember.objects.filter(is_primary=False).select_related( - "advisory" - ), - to_attr="secondary_members", - ) - ) - ) - - fixing_advisories_qs = ( - models.AdvisorySet.objects.filter(package=package, relation_type="fixing") - .select_related("primary_advisory") - .prefetch_related( - Prefetch( - "members", - queryset=AdvisorySetMember.objects.filter(is_primary=False).select_related( - "advisory" - ), - to_attr="secondary_members", - ) - ) - ) - - print(affected_by_advisories_qs) - print(fixing_advisories_qs) - - affected_by_advisories_url = None - fixing_advisories_url = None - - affected_by_advisories_qs_ids = affected_by_advisories_qs.only("id") - fixing_advisories_qs_ids = fixing_advisories_qs.only("id") - - # affected_by_advisories = list(affected_by_advisories_qs_ids[:101]) - # if len(affected_by_advisories) > 100: - # affected_by_advisories_url = reverse_lazy( - # "affected_by_advisories_v2", kwargs={"purl": package.package_url} - # ) - # context["affected_by_advisories_v2_url"] = affected_by_advisories_url - # context["affected_by_advisories_v2"] = [] - # context["fixed_package_details"] = {} - - # else: - fixed_pkg_details = get_fixed_package_details(package) - - context["affected_by_advisories_v2"] = affected_by_advisories_qs - context["fixed_package_details"] = fixed_pkg_details - context["affected_by_advisories_v2_url"] = None - - # fixing_advisories = list(fixing_advisories_qs_ids[:101]) - # if len(fixing_advisories) > 100: - # fixing_advisories_url = reverse_lazy( - # "fixing_advisories_v2", kwargs={"purl": package.package_url} - # ) - # context["fixing_advisories_v2_url"] = fixing_advisories_url - # context["fixing_advisories_v2"] = [] - - # else: - context["fixing_advisories_v2"] = fixing_advisories_qs - context["fixing_advisories_v2_url"] = None - - return context - - def get_object(self, queryset=None): - if queryset is None: - queryset = self.get_queryset() - - purl = self.kwargs.get(self.slug_url_kwarg) - if purl: - queryset = queryset.for_purl(purl) - else: - cls = self.__class__.__name__ - raise AttributeError( - f"Package details view {cls} must be called with a purl, " f"but got: {purl!r}" - ) - - try: - package = queryset.get() - except queryset.model.DoesNotExist: - raise Http404(f"No Package found for purl: {purl}") - return package - - def get_fixed_package_details(package): rows = package.affected_in_impacts.values_list( "advisory__avid", diff --git a/vulnerablecode/urls.py b/vulnerablecode/urls.py index 44cacd9b0..eb1bc006b 100644 --- a/vulnerablecode/urls.py +++ b/vulnerablecode/urls.py @@ -41,7 +41,6 @@ from vulnerabilities.views import PackageSearch from vulnerabilities.views import PackageSearchV2 from vulnerabilities.views import PackageV2Details -from vulnerabilities.views import PackageV3Details from vulnerabilities.views import PipelineRunDetailView from vulnerabilities.views import PipelineRunListView from vulnerabilities.views import PipelineScheduleListView From c341e6b43b43abdfa722d9727ec4ff90211a5e57 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Thu, 2 Apr 2026 11:55:43 +0530 Subject: [PATCH 510/545] Adjust precedence of importers Signed-off-by: Tushar Goel --- vulnerabilities/models.py | 6 +++--- .../pipelines/v2_importers/elixir_security_importer.py | 2 +- vulnerabilities/pipelines/v2_importers/npm_importer.py | 2 +- .../pipelines/v2_importers/retiredotnet_importer.py | 2 +- vulnerabilities/pipelines/v2_importers/ruby_importer.py | 2 +- vulnerabilities/pipes/openssl.py | 4 +++- .../tests/pipelines/v2_importers/test_collect_fix_commit.py | 4 +++- vulnerabilities/tests/test_api.py | 6 +++--- 8 files changed, 16 insertions(+), 12 deletions(-) diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index 45d8acf55..90e7b0287 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -1139,9 +1139,9 @@ def get_affecting_vulnerabilities(self): next_fixed_package_vulns = list(fixed_by_pkg.affected_by) fixed_by_package_details["fixed_by_purl"] = fixed_by_purl - fixed_by_package_details["fixed_by_purl_vulnerabilities"] = ( - next_fixed_package_vulns - ) + fixed_by_package_details[ + "fixed_by_purl_vulnerabilities" + ] = next_fixed_package_vulns fixed_by_pkgs.append(fixed_by_package_details) vuln_details["fixed_by_package_details"] = fixed_by_pkgs diff --git a/vulnerabilities/pipelines/v2_importers/elixir_security_importer.py b/vulnerabilities/pipelines/v2_importers/elixir_security_importer.py index 3b9f86d8e..2269d0fbc 100644 --- a/vulnerabilities/pipelines/v2_importers/elixir_security_importer.py +++ b/vulnerabilities/pipelines/v2_importers/elixir_security_importer.py @@ -37,7 +37,7 @@ class ElixirSecurityImporterPipeline(VulnerableCodeBaseImporterPipelineV2): repo_url = "git+https://github.com/dependabot/elixir-security-advisories" run_once = True - precedence = 200 + precedence = 400 @classmethod def steps(cls): diff --git a/vulnerabilities/pipelines/v2_importers/npm_importer.py b/vulnerabilities/pipelines/v2_importers/npm_importer.py index 32eec2051..9ec4c16dc 100644 --- a/vulnerabilities/pipelines/v2_importers/npm_importer.py +++ b/vulnerabilities/pipelines/v2_importers/npm_importer.py @@ -41,7 +41,7 @@ class NpmImporterPipeline(VulnerableCodeBaseImporterPipelineV2): license_url = "https://github.com/nodejs/security-wg/blob/main/LICENSE.md" repo_url = "git+https://github.com/nodejs/security-wg" - precedence = 200 + precedence = 500 @classmethod def steps(cls): diff --git a/vulnerabilities/pipelines/v2_importers/retiredotnet_importer.py b/vulnerabilities/pipelines/v2_importers/retiredotnet_importer.py index cb87183e3..de9f131ee 100644 --- a/vulnerabilities/pipelines/v2_importers/retiredotnet_importer.py +++ b/vulnerabilities/pipelines/v2_importers/retiredotnet_importer.py @@ -30,7 +30,7 @@ class RetireDotnetImporterPipeline(VulnerableCodeBaseImporterPipelineV2): pipeline_id = "retiredotnet_importer_v2" run_once = True - precedence = 200 + precedence = 400 @classmethod def steps(cls): diff --git a/vulnerabilities/pipelines/v2_importers/ruby_importer.py b/vulnerabilities/pipelines/v2_importers/ruby_importer.py index fad09a1b5..210f73566 100644 --- a/vulnerabilities/pipelines/v2_importers/ruby_importer.py +++ b/vulnerabilities/pipelines/v2_importers/ruby_importer.py @@ -58,7 +58,7 @@ class RubyImporterPipeline(VulnerableCodeBaseImporterPipelineV2): SOFTWARE. """ - precedence = 200 + precedence = 500 @classmethod def steps(cls): diff --git a/vulnerabilities/pipes/openssl.py b/vulnerabilities/pipes/openssl.py index 1dffdedc1..b240f416c 100644 --- a/vulnerabilities/pipes/openssl.py +++ b/vulnerabilities/pipes/openssl.py @@ -89,7 +89,9 @@ def get_reference(reference_name, tag, reference_url): ref_type = ( AdvisoryReference.COMMIT if "commit" in name or tag == "patch" - else AdvisoryReference.ADVISORY if "advisory" in name else AdvisoryReference.OTHER + else AdvisoryReference.ADVISORY + if "advisory" in name + else AdvisoryReference.OTHER ) return ReferenceV2( diff --git a/vulnerabilities/tests/pipelines/v2_importers/test_collect_fix_commit.py b/vulnerabilities/tests/pipelines/v2_importers/test_collect_fix_commit.py index 9a687a3b7..dac2c7781 100644 --- a/vulnerabilities/tests/pipelines/v2_importers/test_collect_fix_commit.py +++ b/vulnerabilities/tests/pipelines/v2_importers/test_collect_fix_commit.py @@ -52,7 +52,9 @@ def test_collect_fix_commits_groups_by_vuln(mock_repo, pipeline): side_effect=lambda c: ( ["CVE-2021-0001"] if "CVE" in c.message - else ["GHSA-dead-beef-baad"] if "GHSA" in c.message else [] + else ["GHSA-dead-beef-baad"] + if "GHSA" in c.message + else [] ) ) diff --git a/vulnerabilities/tests/test_api.py b/vulnerabilities/tests/test_api.py index 31f2b7774..9ed647099 100644 --- a/vulnerabilities/tests/test_api.py +++ b/vulnerabilities/tests/test_api.py @@ -75,9 +75,9 @@ def cleaned_response(response): reference["scores"] = sorted( reference["scores"], key=lambda x: (x["value"], x["scoring_system"]) ) - package_data["resolved_vulnerabilities"][index]["references"][index2]["scores"] = ( - reference["scores"] - ) + package_data["resolved_vulnerabilities"][index]["references"][index2][ + "scores" + ] = reference["scores"] cleaned_response.append(package_data) From 54b0fc9773a6f19017b0129493a5cd7353818e1a Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Thu, 2 Apr 2026 12:01:50 +0530 Subject: [PATCH 511/545] Upgrade black Signed-off-by: Tushar Goel --- vulnerabilities/models.py | 6 +++--- vulnerabilities/pipes/openssl.py | 4 +--- .../tests/pipelines/v2_importers/test_collect_fix_commit.py | 4 +--- vulnerabilities/tests/test_api.py | 6 +++--- 4 files changed, 8 insertions(+), 12 deletions(-) diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index 90e7b0287..45d8acf55 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -1139,9 +1139,9 @@ def get_affecting_vulnerabilities(self): next_fixed_package_vulns = list(fixed_by_pkg.affected_by) fixed_by_package_details["fixed_by_purl"] = fixed_by_purl - fixed_by_package_details[ - "fixed_by_purl_vulnerabilities" - ] = next_fixed_package_vulns + fixed_by_package_details["fixed_by_purl_vulnerabilities"] = ( + next_fixed_package_vulns + ) fixed_by_pkgs.append(fixed_by_package_details) vuln_details["fixed_by_package_details"] = fixed_by_pkgs diff --git a/vulnerabilities/pipes/openssl.py b/vulnerabilities/pipes/openssl.py index b240f416c..1dffdedc1 100644 --- a/vulnerabilities/pipes/openssl.py +++ b/vulnerabilities/pipes/openssl.py @@ -89,9 +89,7 @@ def get_reference(reference_name, tag, reference_url): ref_type = ( AdvisoryReference.COMMIT if "commit" in name or tag == "patch" - else AdvisoryReference.ADVISORY - if "advisory" in name - else AdvisoryReference.OTHER + else AdvisoryReference.ADVISORY if "advisory" in name else AdvisoryReference.OTHER ) return ReferenceV2( diff --git a/vulnerabilities/tests/pipelines/v2_importers/test_collect_fix_commit.py b/vulnerabilities/tests/pipelines/v2_importers/test_collect_fix_commit.py index dac2c7781..9a687a3b7 100644 --- a/vulnerabilities/tests/pipelines/v2_importers/test_collect_fix_commit.py +++ b/vulnerabilities/tests/pipelines/v2_importers/test_collect_fix_commit.py @@ -52,9 +52,7 @@ def test_collect_fix_commits_groups_by_vuln(mock_repo, pipeline): side_effect=lambda c: ( ["CVE-2021-0001"] if "CVE" in c.message - else ["GHSA-dead-beef-baad"] - if "GHSA" in c.message - else [] + else ["GHSA-dead-beef-baad"] if "GHSA" in c.message else [] ) ) diff --git a/vulnerabilities/tests/test_api.py b/vulnerabilities/tests/test_api.py index 9ed647099..31f2b7774 100644 --- a/vulnerabilities/tests/test_api.py +++ b/vulnerabilities/tests/test_api.py @@ -75,9 +75,9 @@ def cleaned_response(response): reference["scores"] = sorted( reference["scores"], key=lambda x: (x["value"], x["scoring_system"]) ) - package_data["resolved_vulnerabilities"][index]["references"][index2][ - "scores" - ] = reference["scores"] + package_data["resolved_vulnerabilities"][index]["references"][index2]["scores"] = ( + reference["scores"] + ) cleaned_response.append(package_data) From 2c3d35401ebb05d97a25cba187e6964d84ebcb43 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Thu, 2 Apr 2026 19:03:01 +0530 Subject: [PATCH 512/545] Prepare v38.1.0 for release Signed-off-by: Tushar Goel --- CHANGELOG.rst | 8 ++ setup.cfg | 2 +- vulnerabilities/api_v3.py | 5 +- vulnerabilities/importers/__init__.py | 4 - vulnerabilities/improvers/__init__.py | 2 - .../v2_improvers/unfurl_version_range.py | 10 ++- vulnerabilities/tests/test_view.py | 32 ++++++++ vulnerabilities/throttling.py | 13 ++++ vulnerabilities/views.py | 76 ++++++++++++++----- vulnerablecode/__init__.py | 2 +- vulnerablecode/settings.py | 2 + 11 files changed, 127 insertions(+), 29 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 4c3d9efb4..f52bf437a 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,6 +1,14 @@ Release notes ============= +Version v38.1.0 +--------------------- + +- Throttle UI to 15 requests per minute to avoid abuse and improve performance. +- Handle errors in unfurl_version_range pipeline. +- Remove Todo pipeline from v1 pipelines. +- Add openAPI documentation for Package and Advisory viewset. + Version v38.0.0 --------------------- diff --git a/setup.cfg b/setup.cfg index 5c8efc7dd..16dbe9b9a 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = vulnerablecode -version = 38.0.0 +version = 38.1.0 license = Apache-2.0 AND CC-BY-SA-4.0 # description must be on ONE line https://github.com/pypa/setuptools/issues/1390 diff --git a/vulnerabilities/api_v3.py b/vulnerabilities/api_v3.py index ffa5bd941..c17202f25 100644 --- a/vulnerabilities/api_v3.py +++ b/vulnerabilities/api_v3.py @@ -14,6 +14,7 @@ from django.db.models import OuterRef from django.db.models import Prefetch from django_filters import rest_framework as filters +from drf_spectacular.utils import extend_schema from packageurl import PackageURL from rest_framework import serializers from rest_framework import viewsets @@ -422,6 +423,7 @@ class PackageV3ViewSet(viewsets.GenericViewSet): filter_backends = [filters.DjangoFilterBackend] throttle_classes = [AnonRateThrottle, PermissionBasedUserRateThrottle] + @extend_schema(request=PackageQuerySerializer) def create(self, request, *args, **kwargs): serializer = PackageQuerySerializer(data=request.data) serializer.is_valid(raise_exception=True) @@ -528,8 +530,9 @@ class AdvisoryV3ViewSet(viewsets.GenericViewSet): filter_backends = [filters.DjangoFilterBackend] throttle_classes = [AnonRateThrottle, PermissionBasedUserRateThrottle] + @extend_schema(request=AdvisoryQuerySerializer) def create(self, request, *args, **kwargs): - serializer = PackageQuerySerializer(data=request.data) + serializer = AdvisoryQuerySerializer(data=request.data) serializer.is_valid(raise_exception=True) purls = serializer.validated_data["purls"] diff --git a/vulnerabilities/importers/__init__.py b/vulnerabilities/importers/__init__.py index 594021092..439e69731 100644 --- a/vulnerabilities/importers/__init__.py +++ b/vulnerabilities/importers/__init__.py @@ -21,7 +21,6 @@ from vulnerabilities.importers import github_osv from vulnerabilities.importers import istio from vulnerabilities.importers import mozilla -from vulnerabilities.importers import openssl from vulnerabilities.importers import oss_fuzz from vulnerabilities.importers import postgresql from vulnerabilities.importers import project_kb_msr2019 @@ -38,7 +37,6 @@ from vulnerabilities.pipelines import gitlab_importer from vulnerabilities.pipelines import nginx_importer from vulnerabilities.pipelines import npm_importer -from vulnerabilities.pipelines import nvd_importer from vulnerabilities.pipelines import pypa_importer from vulnerabilities.pipelines import pysec_importer from vulnerabilities.pipelines.v2_importers import alpine_linux_importer as alpine_linux_importer_v2 @@ -118,7 +116,6 @@ retiredotnet_importer_v2.RetireDotnetImporterPipeline, ubuntu_osv_importer_v2.UbuntuOSVImporterPipeline, alpine_linux_importer_v2.AlpineLinuxImporterPipeline, - nvd_importer.NVDImporterPipeline, github_importer.GitHubAPIImporterPipeline, gitlab_importer.GitLabImporterPipeline, github_osv.GithubOSVImporter, @@ -136,7 +133,6 @@ alpine_linux_importer.AlpineLinuxImporterPipeline, ruby.RubyImporter, apache_kafka.ApacheKafkaImporter, - openssl.OpensslImporter, openssl_importer_v2.OpenSSLImporterPipeline, redhat.RedhatImporter, archlinux.ArchlinuxImporter, diff --git a/vulnerabilities/improvers/__init__.py b/vulnerabilities/improvers/__init__.py index d55ecafdb..11fa5126a 100644 --- a/vulnerabilities/improvers/__init__.py +++ b/vulnerabilities/improvers/__init__.py @@ -10,7 +10,6 @@ from vulnerabilities.improvers import valid_versions from vulnerabilities.improvers import vulnerability_status from vulnerabilities.pipelines import add_cvss31_to_CVEs -from vulnerabilities.pipelines import compute_advisory_todo from vulnerabilities.pipelines import compute_package_risk from vulnerabilities.pipelines import compute_package_version_rank from vulnerabilities.pipelines import enhance_with_exploitdb @@ -70,7 +69,6 @@ compute_package_risk_v2.ComputePackageRiskPipeline, compute_version_rank_v2.ComputeVersionRankPipeline, unfurl_version_range_v2.UnfurlVersionRangePipeline, - compute_advisory_todo.ComputeToDo, collect_ssvc_trees.CollectSSVCPipeline, relate_severities.RelateSeveritiesPipeline, group_advisories_for_packages.GroupAdvisoriesForPackages, diff --git a/vulnerabilities/pipelines/v2_improvers/unfurl_version_range.py b/vulnerabilities/pipelines/v2_improvers/unfurl_version_range.py index 1d603b88a..f18f43fbf 100644 --- a/vulnerabilities/pipelines/v2_improvers/unfurl_version_range.py +++ b/vulnerabilities/pipelines/v2_improvers/unfurl_version_range.py @@ -52,7 +52,7 @@ def unfurl_version_range(self): if purl.type not in RANGE_CLASS_BY_SCHEMES: continue - versions = get_purl_versions(purl, cached_versions) + versions = get_purl_versions(purl, cached_versions) or [] affected_purls = get_affected_purls( versions=versions, affecting_vers=impact.affecting_vers, @@ -79,6 +79,8 @@ def get_affected_purls(versions, affecting_vers, base_purl, logger): version_class = affecting_version_range.version_class try: + if not versions: + return [] versions = [version_class(v) for v in versions] except Exception as e: logger( @@ -107,8 +109,10 @@ def get_affected_purls(versions, affecting_vers, base_purl, logger): def get_purl_versions(purl, cached_versions): if not purl in cached_versions: - cached_versions[purl] = get_versions(purl) - return cached_versions[purl] + purls = get_versions(purl) + if purls is not None: + cached_versions[purl] = purls + return cached_versions.get(purl) or [] def bulk_create_with_m2m(purls, impact, relation, logger): diff --git a/vulnerabilities/tests/test_view.py b/vulnerabilities/tests/test_view.py index 471e0bf43..3111ef738 100644 --- a/vulnerabilities/tests/test_view.py +++ b/vulnerabilities/tests/test_view.py @@ -11,8 +11,10 @@ import time import pytest +from django.core.cache import cache from django.test import Client from django.test import TestCase +from django.urls import reverse from packageurl import PackageURL from univers import versions @@ -330,3 +332,33 @@ def test_aggregate_fixed_and_affected_packages(self): end_time = time.time() assert end_time - start_time < 0.05 self.assertEqual(response.status_code, 200) + + +class ThrottleTestCase(TestCase): + def setUp(self): + self.client = Client() + cache.clear() + + def test_throttle_after_15_requests(self): + url = reverse("home") + + responses = [] + + for i in range(16): + response = self.client.get( + url, + HTTP_USER_AGENT="test-agent", + ) + responses.append(response.status_code) + + assert all(code == 200 for code in responses[:15]) + + assert responses[15] == 429 + + url = reverse("package_search") + + response = self.client.get( + url, + HTTP_USER_AGENT="test-agent", + ) + assert response.status_code == 429 diff --git a/vulnerabilities/throttling.py b/vulnerabilities/throttling.py index e14c1a1c0..c97b2c89f 100644 --- a/vulnerabilities/throttling.py +++ b/vulnerabilities/throttling.py @@ -51,6 +51,19 @@ def get_throttle_rate(self, tier): raise ImproperlyConfigured(msg) +class AnonUserUIThrottle(UserRateThrottle): + scope = "ui" + + def allow_request(self, request, view): + self.rate = self.THROTTLE_RATES.get("ui") + self.num_requests, self.duration = self.parse_rate(self.rate) + return super().allow_request(request, view) + + def get_cache_key(self, request, view): + ident = self.get_ident(request) + return f"throttle_ui_{ident}" + + def throttled_exception_handler(exception, context): """ Return this response whenever a request has been throttled diff --git a/vulnerabilities/views.py b/vulnerabilities/views.py index 4f9f396ea..b984fbb51 100644 --- a/vulnerabilities/views.py +++ b/vulnerabilities/views.py @@ -20,6 +20,7 @@ from django.db.models import Exists from django.db.models import OuterRef from django.db.models import Prefetch +from django.http import HttpResponse from django.http.response import Http404 from django.shortcuts import get_object_or_404 from django.shortcuts import redirect @@ -47,6 +48,7 @@ from vulnerabilities.pipelines.v2_importers.epss_importer_v2 import EPSSImporterPipeline from vulnerabilities.severity_systems import EPSS from vulnerabilities.severity_systems import SCORING_SYSTEMS +from vulnerabilities.throttling import AnonUserUIThrottle from vulnerabilities.utils import TYPES_WITH_MULTIPLE_IMPORTERS from vulnerabilities.utils import get_advisories_from_groups from vulnerabilities.utils import merge_and_save_grouped_advisories @@ -56,7 +58,47 @@ PAGE_SIZE = 10 -class PackageSearch(ListView): +class VulnerableCodeView(View): + """ + Base ListView for VulnerableCode views that includes throttling. + """ + + throttle_classes = [AnonUserUIThrottle] + + def dispatch(self, request, *args, **kwargs): + throttle = AnonUserUIThrottle() + + if not throttle.allow_request(request, self): + return HttpResponse("Rate limit exceeded", status=429) + + return super().dispatch(request, *args, **kwargs) + + +class VulnerableCodeDetailView(DetailView, VulnerableCodeView): + """ + Base DetailView for VulnerableCode views that includes throttling. + """ + + pass + + +class VulnerableCodeListView(ListView, VulnerableCodeView): + """ + Base ListView for VulnerableCode views that includes throttling. + """ + + pass + + +class VulnerableCodeCreateView(generic.CreateView, VulnerableCodeView): + """ + Base CreateView for VulnerableCode views that includes throttling. + """ + + pass + + +class PackageSearch(VulnerableCodeListView): model = models.Package template_name = "packages.html" ordering = ["type", "namespace", "name", "version"] @@ -84,7 +126,7 @@ def get_queryset(self, query=None): ) -class VulnerabilitySearch(ListView): +class VulnerabilitySearch(VulnerableCodeListView): model = models.Vulnerability template_name = "vulnerabilities.html" ordering = ["vulnerability_id"] @@ -102,7 +144,7 @@ def get_queryset(self, query=None): return self.model.objects.search(query=query).with_package_counts() -class PackageDetails(DetailView): +class PackageDetails(VulnerableCodeDetailView): model = models.Package template_name = "package_details.html" slug_url_kwarg = "purl" @@ -143,7 +185,7 @@ def get_object(self, queryset=None): return package -class PackageSearchV2(ListView): +class PackageSearchV2(VulnerableCodeListView): model = models.PackageV2 template_name = "packages_v2.html" ordering = ["type", "namespace", "name", "version"] @@ -166,7 +208,7 @@ def get_queryset(self, query=None): return self.model.objects.search(query).prefetch_related().with_is_vulnerable() -class AffectedByAdvisoriesListView(ListView): +class AffectedByAdvisoriesListView(VulnerableCodeListView): model = models.AdvisoryV2 template_name = "affected_by_advisories.html" paginate_by = PAGE_SIZE @@ -187,7 +229,7 @@ def get_queryset(self): ) -class FixingAdvisoriesListView(ListView): +class FixingAdvisoriesListView(VulnerableCodeListView): model = models.AdvisoryV2 template_name = "fixing_advisories.html" paginate_by = PAGE_SIZE @@ -201,7 +243,7 @@ def get_queryset(self): ) -class PackageV2Details(DetailView): +class PackageV2Details(VulnerableCodeDetailView): model = models.PackageV2 template_name = "package_details_v2.html" slug_url_kwarg = "purl" @@ -439,7 +481,7 @@ def get_fixed_package_details(package): return fixed_pkg_details -class VulnerabilityDetails(DetailView): +class VulnerabilityDetails(VulnerableCodeDetailView): model = models.Vulnerability template_name = "vulnerability_details.html" slug_url_kwarg = "vulnerability_id" @@ -543,7 +585,7 @@ def get_context_data(self, **kwargs): return context -class AdvisoryDetails(DetailView): +class AdvisoryDetails(VulnerableCodeDetailView): model = models.AdvisoryV2 template_name = "advisory_detail.html" slug_url_kwarg = "avid" @@ -717,7 +759,7 @@ def add_ssvc(ssvc): return context -class HomePage(View): +class HomePage(VulnerableCodeView): template_name = "index.html" def get(self, request): @@ -730,7 +772,7 @@ def get(self, request): return render(request=request, template_name=self.template_name, context=context) -class HomePageV2(View): +class HomePageV2(VulnerableCodeView): template_name = "index_v2.html" def get(self, request): @@ -770,7 +812,7 @@ def get(self, request): """ -class ApiUserCreateView(generic.CreateView): +class ApiUserCreateView(VulnerableCodeCreateView): model = models.ApiUser form_class = ApiUserCreationForm template_name = "api_user_creation_form.html" @@ -800,7 +842,7 @@ def get_success_url(self): return reverse_lazy("api_user_request") -class VulnerabilityPackagesDetails(DetailView): +class VulnerabilityPackagesDetails(VulnerableCodeDetailView): """ View to display all packages affected by or fixing a specific vulnerability. URL: /vulnerabilities/{vulnerability_id}/packages @@ -851,7 +893,7 @@ def get_context_data(self, **kwargs): return context -class AdvisoryPackagesDetails(DetailView): +class AdvisoryPackagesDetails(VulnerableCodeDetailView): """ View to display all packages affected by or fixing a specific vulnerability. URL: /advisories/{id}/packages @@ -902,7 +944,7 @@ def get_queryset(self): ) -class PipelineScheduleListView(ListView, FormMixin): +class PipelineScheduleListView(VulnerableCodeListView, FormMixin): model = PipelineSchedule context_object_name = "schedule_list" template_name = "pipeline_dashboard.html" @@ -926,7 +968,7 @@ def get_context_data(self, **kwargs): return context -class PipelineRunListView(ListView): +class PipelineRunListView(VulnerableCodeListView): model = PipelineRun context_object_name = "run_list" template_name = "pipeline_run_list.html" @@ -952,7 +994,7 @@ def get_context_data(self, **kwargs): return context -class PipelineRunDetailView(DetailView): +class PipelineRunDetailView(VulnerableCodeDetailView): model = PipelineRun template_name = "pipeline_run_details.html" context_object_name = "run" diff --git a/vulnerablecode/__init__.py b/vulnerablecode/__init__.py index 80b725801..4966e4c04 100644 --- a/vulnerablecode/__init__.py +++ b/vulnerablecode/__init__.py @@ -14,7 +14,7 @@ import git -__version__ = "38.0.0" +__version__ = "38.1.0" PROJECT_DIR = Path(__file__).resolve().parent diff --git a/vulnerablecode/settings.py b/vulnerablecode/settings.py index eaf2c1276..4c480cbc8 100644 --- a/vulnerablecode/settings.py +++ b/vulnerablecode/settings.py @@ -192,12 +192,14 @@ LOGOUT_REDIRECT_URL = "/" THROTTLE_RATE_ANON = env.str("THROTTLE_RATE_ANON", default="3600/hour") +THROTTLE_RATE_UI = env.str("THROTTLE_RATE_UI", default="15/minute") THROTTLE_RATE_USER_HIGH = env.str("THROTTLE_RATE_USER_HIGH", default="18000/hour") THROTTLE_RATE_USER_MEDIUM = env.str("THROTTLE_RATE_USER_MEDIUM", default="14400/hour") THROTTLE_RATE_USER_LOW = env.str("THROTTLE_RATE_USER_LOW", default="10800/hour") REST_FRAMEWORK_DEFAULT_THROTTLE_RATES = { "anon": THROTTLE_RATE_ANON, + "ui": THROTTLE_RATE_UI, "low": THROTTLE_RATE_USER_LOW, "medium": THROTTLE_RATE_USER_MEDIUM, "high": THROTTLE_RATE_USER_HIGH, From c4f3af6784ae619c95e992892eb12b5e12e7c3bd Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Sat, 4 Apr 2026 00:36:26 +0530 Subject: [PATCH 513/545] style: format code Signed-off-by: Keshav Priyadarshi --- vulnerabilities/models.py | 6 +++--- vulnerabilities/pipes/openssl.py | 4 +++- .../tests/pipelines/v2_importers/test_collect_fix_commit.py | 4 +++- vulnerabilities/tests/test_api.py | 6 +++--- 4 files changed, 12 insertions(+), 8 deletions(-) diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index 45d8acf55..90e7b0287 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -1139,9 +1139,9 @@ def get_affecting_vulnerabilities(self): next_fixed_package_vulns = list(fixed_by_pkg.affected_by) fixed_by_package_details["fixed_by_purl"] = fixed_by_purl - fixed_by_package_details["fixed_by_purl_vulnerabilities"] = ( - next_fixed_package_vulns - ) + fixed_by_package_details[ + "fixed_by_purl_vulnerabilities" + ] = next_fixed_package_vulns fixed_by_pkgs.append(fixed_by_package_details) vuln_details["fixed_by_package_details"] = fixed_by_pkgs diff --git a/vulnerabilities/pipes/openssl.py b/vulnerabilities/pipes/openssl.py index 1dffdedc1..b240f416c 100644 --- a/vulnerabilities/pipes/openssl.py +++ b/vulnerabilities/pipes/openssl.py @@ -89,7 +89,9 @@ def get_reference(reference_name, tag, reference_url): ref_type = ( AdvisoryReference.COMMIT if "commit" in name or tag == "patch" - else AdvisoryReference.ADVISORY if "advisory" in name else AdvisoryReference.OTHER + else AdvisoryReference.ADVISORY + if "advisory" in name + else AdvisoryReference.OTHER ) return ReferenceV2( diff --git a/vulnerabilities/tests/pipelines/v2_importers/test_collect_fix_commit.py b/vulnerabilities/tests/pipelines/v2_importers/test_collect_fix_commit.py index 9a687a3b7..dac2c7781 100644 --- a/vulnerabilities/tests/pipelines/v2_importers/test_collect_fix_commit.py +++ b/vulnerabilities/tests/pipelines/v2_importers/test_collect_fix_commit.py @@ -52,7 +52,9 @@ def test_collect_fix_commits_groups_by_vuln(mock_repo, pipeline): side_effect=lambda c: ( ["CVE-2021-0001"] if "CVE" in c.message - else ["GHSA-dead-beef-baad"] if "GHSA" in c.message else [] + else ["GHSA-dead-beef-baad"] + if "GHSA" in c.message + else [] ) ) diff --git a/vulnerabilities/tests/test_api.py b/vulnerabilities/tests/test_api.py index 31f2b7774..9ed647099 100644 --- a/vulnerabilities/tests/test_api.py +++ b/vulnerabilities/tests/test_api.py @@ -75,9 +75,9 @@ def cleaned_response(response): reference["scores"] = sorted( reference["scores"], key=lambda x: (x["value"], x["scoring_system"]) ) - package_data["resolved_vulnerabilities"][index]["references"][index2]["scores"] = ( - reference["scores"] - ) + package_data["resolved_vulnerabilities"][index]["references"][index2][ + "scores" + ] = reference["scores"] cleaned_response.append(package_data) From 1c67f3ef193b96490b234dad513dbe807c14f4dd Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Tue, 7 Apr 2026 01:07:25 +0530 Subject: [PATCH 514/545] feat: track pipeline priority in model Signed-off-by: Keshav Priyadarshi --- vulnerabilities/models.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index 90e7b0287..0837d2500 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -2262,6 +2262,10 @@ def requeue(self): class PipelineSchedule(models.Model): """The Database representation of a pipeline schedule.""" + class ExecutionPriority(models.IntegerChoices): + HIGH = 1, "high" + DEFAULT = 2, "default" + pipeline_id = models.CharField( max_length=600, help_text=("Identify a registered Pipeline class."), @@ -2306,6 +2310,14 @@ class PipelineSchedule(models.Model): help_text=("Number of hours to wait between run of this pipeline."), ) + run_priority = models.IntegerField( + null=False, + blank=False, + choices=ExecutionPriority.choices, + default=ExecutionPriority.DEFAULT, + help_text=("Select the pipeline execution priority"), + ) + schedule_work_id = models.CharField( max_length=255, unique=True, From c1c775951e97a2c59f71b09feea45a07df1020e2 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Tue, 7 Apr 2026 01:14:00 +0530 Subject: [PATCH 515/545] feat: enque pipelines based on priority Signed-off-by: Keshav Priyadarshi --- vulnerabilities/schedules.py | 13 ++++++++++++- vulnerabilities/tasks.py | 16 +++++++++++++--- 2 files changed, 25 insertions(+), 4 deletions(-) diff --git a/vulnerabilities/schedules.py b/vulnerabilities/schedules.py index e6443e5ab..215129e10 100644 --- a/vulnerabilities/schedules.py +++ b/vulnerabilities/schedules.py @@ -95,10 +95,21 @@ def update_pipeline_schedule(): PipelineSchedule.objects.exclude(pipeline_id__in=pipelines.keys()).delete() for id, pipeline_class in pipelines.items(): run_once = getattr(pipeline_class, "run_once", False) + run_interval = getattr(pipeline_class, "run_interval", 24) + run_priority = getattr( + pipeline_class, "run_priority", PipelineSchedule.ExecutionPriority.DEFAULT + ) - PipelineSchedule.objects.get_or_create( + pipeline, created = PipelineSchedule.objects.get_or_create( pipeline_id=id, defaults={ "is_run_once": run_once, + "run_interval": run_interval, + "run_priority": run_priority, }, ) + + if not created: + pipeline.run_priority = run_priority + pipeline.run_interval = run_interval + pipeline.save() diff --git a/vulnerabilities/tasks.py b/vulnerabilities/tasks.py index 2e7ac2b10..5a5968831 100644 --- a/vulnerabilities/tasks.py +++ b/vulnerabilities/tasks.py @@ -20,7 +20,13 @@ logger = logging.getLogger(__name__) -queue = django_rq.get_queue("default") +default_queue = django_rq.get_queue("default") +high_queue = django_rq.get_queue("high") + +queues = { + "default": django_rq.get_queue("default"), + "high": django_rq.get_queue("high"), +} def execute_pipeline(pipeline_id, run_id): @@ -112,6 +118,8 @@ def set_run_failure(job, connection, type, value, traceback): def enqueue_pipeline(pipeline_id): pipeline_schedule = models.PipelineSchedule.objects.get(pipeline_id=pipeline_id) + queue = queues.get(pipeline_schedule.get_priority_display()) + if pipeline_schedule.status in [ models.PipelineRun.Status.RUNNING, models.PipelineRun.Status.QUEUED, @@ -139,5 +147,7 @@ def enqueue_pipeline(pipeline_id): def dequeue_job(job_id): """Remove a job from queue if it hasn't been executed yet.""" - if job_id in queue.jobs: - queue.remove(job_id) + + for queue in queues.values(): + if job_id in queue.jobs: + queue.remove(job_id) From bfcc0c88b96d3f4b67ecefefa564618ce0b16ddc Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Tue, 7 Apr 2026 01:14:43 +0530 Subject: [PATCH 516/545] feat: add high priority rq worker Signed-off-by: Keshav Priyadarshi --- docker-compose.yml | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/docker-compose.yml b/docker-compose.yml index 76b645560..45ee678cf 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -57,6 +57,17 @@ services: - db - vulnerablecode + vulnerablecode_rqworker_high: + build: . + command: wait-for-it web:8000 -- python ./manage.py rqworker high + env_file: + - docker.env + volumes: + - /etc/vulnerablecode/:/etc/vulnerablecode/ + depends_on: + - vulnerablecode_redis + - db + - vulnerablecode nginx: image: nginx From 974bbb506f517735c170104d27d8b84f296b199a Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Tue, 7 Apr 2026 18:02:09 +0530 Subject: [PATCH 517/545] feat: add config for high priority redis queue Signed-off-by: Keshav Priyadarshi --- vulnerablecode/settings.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/vulnerablecode/settings.py b/vulnerablecode/settings.py index 4c480cbc8..8ec5f6e31 100644 --- a/vulnerablecode/settings.py +++ b/vulnerablecode/settings.py @@ -392,7 +392,13 @@ "PORT": env.str("VULNERABLECODE_REDIS_PORT", default="6379"), "PASSWORD": env.str("VULNERABLECODE_REDIS_PASSWORD", default=""), "DEFAULT_TIMEOUT": env.int("VULNERABLECODE_REDIS_DEFAULT_TIMEOUT", default=3600), - } + }, + "high": { + "HOST": env.str("VULNERABLECODE_REDIS_HOST", default="localhost"), + "PORT": env.str("VULNERABLECODE_REDIS_PORT", default="6379"), + "PASSWORD": env.str("VULNERABLECODE_REDIS_PASSWORD", default=""), + "DEFAULT_TIMEOUT": env.int("VULNERABLECODE_REDIS_DEFAULT_TIMEOUT", default=3600), + }, } From d420b97fbe1898985410c6cdd8d05b6087a62ea9 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Tue, 7 Apr 2026 18:21:54 +0530 Subject: [PATCH 518/545] refactor: process never unfurled version ranges first - re-unfurl version ranges every 2 days - run unfurl pipeline every 2 hours Signed-off-by: Keshav Priyadarshi --- ...edpackage_last_range_unfurl_at_and_more.py | 32 +++++++++ vulnerabilities/models.py | 7 ++ vulnerabilities/pipelines/__init__.py | 10 +++ .../v2_improvers/unfurl_version_range.py | 72 +++++++++++++++---- 4 files changed, 107 insertions(+), 14 deletions(-) create mode 100644 vulnerabilities/migrations/0120_impactedpackage_last_range_unfurl_at_and_more.py diff --git a/vulnerabilities/migrations/0120_impactedpackage_last_range_unfurl_at_and_more.py b/vulnerabilities/migrations/0120_impactedpackage_last_range_unfurl_at_and_more.py new file mode 100644 index 000000000..f6b5d8a93 --- /dev/null +++ b/vulnerabilities/migrations/0120_impactedpackage_last_range_unfurl_at_and_more.py @@ -0,0 +1,32 @@ +# Generated by Django 5.2.11 on 2026-04-06 20:51 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("vulnerabilities", "0119_remove_advisoryset_identifiers_and_more"), + ] + + operations = [ + migrations.AddField( + model_name="impactedpackage", + name="last_range_unfurl_at", + field=models.DateTimeField( + blank=True, + db_index=True, + help_text="Timestamp of the last vers range unfurl.", + null=True, + ), + ), + migrations.AddField( + model_name="pipelineschedule", + name="run_priority", + field=models.IntegerField( + choices=[(1, "high"), (2, "default")], + default=2, + help_text="Select the pipeline execution priority", + ), + ), + ] diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index 0837d2500..bb4390f2a 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -3252,6 +3252,13 @@ class ImpactedPackage(models.Model): help_text="Timestamp indicating when this impact was added.", ) + last_range_unfurl_at = models.DateTimeField( + blank=True, + null=True, + db_index=True, + help_text="Timestamp of the last vers range unfurl.", + ) + def to_dict(self): from vulnerabilities.utils import purl_to_dict diff --git a/vulnerabilities/pipelines/__init__.py b/vulnerabilities/pipelines/__init__.py index 632fd95f6..499f53331 100644 --- a/vulnerabilities/pipelines/__init__.py +++ b/vulnerabilities/pipelines/__init__.py @@ -24,6 +24,7 @@ from vulnerabilities.improver import MAX_CONFIDENCE from vulnerabilities.models import Advisory from vulnerabilities.models import PipelineRun +from vulnerabilities.models import PipelineSchedule from vulnerabilities.pipes.advisory import import_advisory from vulnerabilities.pipes.advisory import insert_advisory from vulnerabilities.pipes.advisory import insert_advisory_v2 @@ -144,6 +145,9 @@ class VulnerableCodePipeline(PipelineDefinition, BasePipelineRun): # When set to true pipeline is run only once. # To rerun onetime pipeline reset is_active field to True via migration. run_once = False + # Interval between runs in hour. + run_interval = 24 + run_priority = PipelineSchedule.ExecutionPriority.DEFAULT def on_failure(self): """ @@ -176,6 +180,9 @@ class VulnerableCodeBaseImporterPipeline(VulnerableCodePipeline): # When set to true pipeline is run only once. # To rerun onetime pipeline reset is_active field to True via migration. run_once = False + # Interval between runs in hour. + run_interval = 24 + run_priority = PipelineSchedule.ExecutionPriority.DEFAULT @classmethod def steps(cls): @@ -277,6 +284,9 @@ class VulnerableCodeBaseImporterPipelineV2(VulnerableCodePipeline): # When set to true pipeline is run only once. # To rerun onetime pipeline reset is_active field to True via migration. run_once = False + # Interval between runs in hour. + run_interval = 24 + run_priority = PipelineSchedule.ExecutionPriority.DEFAULT @classmethod def steps(cls): diff --git a/vulnerabilities/pipelines/v2_improvers/unfurl_version_range.py b/vulnerabilities/pipelines/v2_improvers/unfurl_version_range.py index f18f43fbf..cb65f5f93 100644 --- a/vulnerabilities/pipelines/v2_improvers/unfurl_version_range.py +++ b/vulnerabilities/pipelines/v2_improvers/unfurl_version_range.py @@ -8,9 +8,13 @@ # import logging +from datetime import timedelta from traceback import format_exc as traceback_format_exc from aboutcode.pipeline import LoopProgress +from django.db.models import F +from django.db.models import Q +from django.utils import timezone from fetchcode.package_versions import SUPPORTED_ECOSYSTEMS as FETCHCODE_SUPPORTED_ECOSYSTEMS from packageurl import PackageURL from univers.version_range import RANGE_CLASS_BY_SCHEMES @@ -19,29 +23,45 @@ from vulnerabilities.models import ImpactedPackage from vulnerabilities.models import ImpactedPackageAffecting from vulnerabilities.models import PackageV2 +from vulnerabilities.models import PipelineSchedule from vulnerabilities.pipelines import VulnerableCodePipeline from vulnerabilities.pipes.fetchcode_utils import get_versions from vulnerabilities.utils import update_purl_version class UnfurlVersionRangePipeline(VulnerableCodePipeline): + """ + Unfurl affected version ranges by first processing those that have + never been unfurled and then handling ranges that were last unfurled + two or more days ago. + """ pipeline_id = "unfurl_version_range_v2" + run_interval = 2 + run_priority = PipelineSchedule.ExecutionPriority.HIGH + + # Days elapsed before version range is re-unfurled + reunfurl_after_days = 2 + @classmethod def steps(cls): return (cls.unfurl_version_range,) def unfurl_version_range(self): - impacted_packages = ImpactedPackage.objects.all().order_by("-created_at") - impacted_packages_count = impacted_packages.count() - processed_impacted_packages_count = 0 processed_affected_packages_count = 0 cached_versions = {} + impacts_to_update = [] + update_batch_size = 5 + + impacted_packages = impacted_package_qs(cutoff_day=self.reunfurl_after_days) + impacted_packages_count = impacted_packages.count() self.log(f"Unfurl affected vers range for {impacted_packages_count:,d} ImpactedPackage.") + progress = LoopProgress(total_iterations=impacted_packages_count, logger=self.log) - for impact in progress.iter(impacted_packages): + for impact in progress.iter(impacted_packages.iterator(chunk_size=5000)): + impacts_to_update.append(impact.pk) purl = PackageURL.from_string(impact.base_purl) if not impact.affecting_vers or not any( c in impact.affecting_vers for c in ("<", ">", "!") @@ -52,11 +72,10 @@ def unfurl_version_range(self): if purl.type not in RANGE_CLASS_BY_SCHEMES: continue - versions = get_purl_versions(purl, cached_versions) or [] + versions = get_purl_versions(purl, cached_versions, self.log) or [] affected_purls = get_affected_purls( versions=versions, - affecting_vers=impact.affecting_vers, - base_purl=purl, + impact=impact, logger=self.log, ) if not affected_purls: @@ -70,12 +89,21 @@ def unfurl_version_range(self): ) processed_impacted_packages_count += 1 + if len(impacts_to_update) > update_batch_size: + ImpactedPackage.objects.filter(pk__in=impacts_to_update).update( + last_range_unfurl_at=timezone.now() + ) + impacts_to_update.clear() + + ImpactedPackage.objects.filter(pk__in=impacts_to_update).update( + last_range_unfurl_at=timezone.now() + ) self.log(f"Successfully processed {processed_impacted_packages_count:,d} ImpactedPackage.") self.log(f"{processed_affected_packages_count:,d} new Impact-Package relation created.") -def get_affected_purls(versions, affecting_vers, base_purl, logger): - affecting_version_range = VersionRange.from_string(affecting_vers) +def get_affected_purls(versions, impact, logger): + affecting_version_range = VersionRange.from_string(impact.affecting_vers) version_class = affecting_version_range.version_class try: @@ -84,7 +112,7 @@ def get_affected_purls(versions, affecting_vers, base_purl, logger): versions = [version_class(v) for v in versions] except Exception as e: logger( - f"Error while parsing versions for {base_purl!s}: {e!r} \n {traceback_format_exc()}", + f"Error while parsing versions for {impact.base_purl!s}: {e!r} \n {traceback_format_exc()}", level=logging.ERROR, ) return @@ -95,21 +123,24 @@ def get_affected_purls(versions, affecting_vers, base_purl, logger): if version in affecting_version_range: affected_purls.append( update_purl_version( - purl=base_purl, + purl=impact.base_purl, version=str(version), ) ) except Exception as e: logger( - f"Error while checking {version!s} in {affecting_version_range!s}: {e!r} \n {traceback_format_exc()}", + ( + f"Error while checking {version!s} in {affecting_version_range!s} for " + f"advisory {impact.advisory.avid}: {e!r} \n {traceback_format_exc()}" + ), level=logging.ERROR, ) return affected_purls -def get_purl_versions(purl, cached_versions): +def get_purl_versions(purl, cached_versions, logger): if not purl in cached_versions: - purls = get_versions(purl) + purls = get_versions(purl, logger) if purls is not None: cached_versions[purl] = purls return cached_versions.get(purl) or [] @@ -135,3 +166,16 @@ def bulk_create_with_m2m(purls, impact, relation, logger): return 0 return len(relations) + + +def impacted_package_qs(cutoff_day=2): + cutoff = timezone.now() - timedelta(days=cutoff_day) + return ( + ImpactedPackage.objects.filter( + (Q(last_range_unfurl_at__isnull=True) | Q(last_range_unfurl_at__lte=cutoff)) + & Q(affecting_vers__isnull=False) + & ~Q(affecting_vers="") + ) + .order_by(F("last_range_unfurl_at").asc(nulls_first=True)) + .only("pk", "affecting_vers", "advisory", "base_purl") + ) From 5c8f7db29e182e1d05b5b76d188d925e98bf1bef Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Tue, 7 Apr 2026 18:38:46 +0530 Subject: [PATCH 519/545] test: test order of vers range processing Signed-off-by: Keshav Priyadarshi --- .../v2_improvers/test_unfurl_version_range.py | 102 +++++++++++++++++- 1 file changed, 97 insertions(+), 5 deletions(-) diff --git a/vulnerabilities/tests/pipelines/v2_improvers/test_unfurl_version_range.py b/vulnerabilities/tests/pipelines/v2_improvers/test_unfurl_version_range.py index a1927a426..ae23930f6 100644 --- a/vulnerabilities/tests/pipelines/v2_improvers/test_unfurl_version_range.py +++ b/vulnerabilities/tests/pipelines/v2_improvers/test_unfurl_version_range.py @@ -13,14 +13,18 @@ from unittest.mock import patch from django.test import TestCase +from django.utils import timezone from packageurl import PackageURL from univers.version_range import VersionRange from vulnerabilities.importer import AdvisoryDataV2 from vulnerabilities.importer import AffectedPackageV2 +from vulnerabilities.importer import PackageCommitPatchData from vulnerabilities.models import AdvisoryV2 +from vulnerabilities.models import ImpactedPackage from vulnerabilities.models import PackageV2 from vulnerabilities.pipelines.v2_improvers.unfurl_version_range import UnfurlVersionRangePipeline +from vulnerabilities.pipelines.v2_improvers.unfurl_version_range import impacted_package_qs from vulnerabilities.pipes.advisory import insert_advisory_v2 from vulnerabilities.tests.pipelines import TestLogger @@ -28,7 +32,7 @@ class TestUnfurlVersionRangePipeline(TestCase): def setUp(self): self.logger = TestLogger() - advisory1 = AdvisoryDataV2( + self.advisory1 = AdvisoryDataV2( summary="Test advisory", aliases=["CVE-2025-0001"], references=[], @@ -48,14 +52,54 @@ def setUp(self): date_published=datetime.now() - timedelta(days=10), url="https://example.com/advisory", ) - insert_advisory_v2( - advisory=advisory1, - pipeline_id="test_pipeline_v2", - logger=self.logger.write, + + self.advisory2 = AdvisoryDataV2( + summary="Test advisory", + aliases=["CVE-2025-0001"], + references=[], + severities=[], + weaknesses=[], + affected_packages=[ + AffectedPackageV2( + package=PackageURL.from_string("pkg:npm/foobar"), + affected_version_range=VersionRange.from_string("vers:npm/>3.2.1|<4.0.0"), + fixed_version_range=VersionRange.from_string("vers:npm/4.0.0"), + introduced_by_commit_patches=[], + fixed_by_commit_patches=[], + ), + AffectedPackageV2( + package=PackageURL.from_string("pkg:npm/foobar"), + affected_version_range=VersionRange.from_string("vers:npm/>4.2.1|<5.0.0"), + fixed_version_range=VersionRange.from_string("vers:npm/5.0.0"), + introduced_by_commit_patches=[], + fixed_by_commit_patches=[], + ), + AffectedPackageV2( + package=PackageURL.from_string("pkg:npm/foobar"), + affected_version_range=None, + fixed_version_range=None, + introduced_by_commit_patches=[], + fixed_by_commit_patches=[ + PackageCommitPatchData( + vcs_url="https://foobar.vcs/", + commit_hash="982f801f", + ), + ], + ), + ], + patches=[], + advisory_id="GHSA-1234", + date_published=datetime.now() - timedelta(days=10), + url="https://example.com/advisory", ) @patch("vulnerabilities.pipelines.v2_improvers.unfurl_version_range.get_purl_versions") def test_affecting_version_range_unfurl(self, mock_fetch): + insert_advisory_v2( + advisory=self.advisory1, + pipeline_id="test_pipeline_v2", + logger=self.logger.write, + ) self.assertEqual(1, PackageV2.objects.count()) mock_fetch.return_value = {"3.4.1", "3.9.0", "2.1.0", "4.0.0", "4.1.0"} pipeline = UnfurlVersionRangePipeline() @@ -67,3 +111,51 @@ def test_affecting_version_range_unfurl(self, mock_fetch): self.assertEqual(3, PackageV2.objects.count()) self.assertEqual(1, impact.fixed_by_packages.count()) self.assertEqual(2, impact.affecting_packages.count()) + + def test_impacted_package_qs_dont_process_empty_vers(self): + insert_advisory_v2( + advisory=self.advisory2, + pipeline_id="test_pipeline_v2", + logger=self.logger.write, + ) + + self.assertEqual(3, ImpactedPackage.objects.count()) + self.assertEqual(2, impacted_package_qs().count()) + + def test_impacted_package_qs_dont_process_empty_vers(self): + insert_advisory_v2( + advisory=self.advisory2, + pipeline_id="test_pipeline_v2", + logger=self.logger.write, + ) + impact = ImpactedPackage.objects.filter(affecting_vers__isnull=False).first() + impact.last_range_unfurl_at = timezone.now() + impact.save() + + self.assertEqual(1, impacted_package_qs().count()) + + def test_impacted_package_qs_prioritize_never_unfurled_impact_first(self): + insert_advisory_v2( + advisory=self.advisory2, + pipeline_id="test_pipeline_v2", + logger=self.logger.write, + ) + impact = ImpactedPackage.objects.filter(affecting_vers__isnull=False).first() + impact.last_range_unfurl_at = timezone.now() - timedelta(days=4) + impact.save() + + self.assertEqual(2, impacted_package_qs().count()) + first_impact_to_process = impacted_package_qs().first() + self.assertEqual(None, first_impact_to_process.last_range_unfurl_at) + + def test_impacted_package_reunfurl_vers(self): + insert_advisory_v2( + advisory=self.advisory2, + pipeline_id="test_pipeline_v2", + logger=self.logger.write, + ) + impact = ImpactedPackage.objects.filter(affecting_vers__isnull=False).first() + impact.last_range_unfurl_at = timezone.now() + impact.save() + + self.assertEqual(1, impacted_package_qs().count()) From 58b7873ea11b7ae00461773ab65199b21e142137 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Tue, 7 Apr 2026 18:49:20 +0530 Subject: [PATCH 520/545] feat: indicate pipeline priority in dashboard Signed-off-by: Keshav Priyadarshi --- vulnerabilities/tasks.py | 2 +- vulnerabilities/templates/pipeline_dashboard.html | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/vulnerabilities/tasks.py b/vulnerabilities/tasks.py index 5a5968831..6c2be3fac 100644 --- a/vulnerabilities/tasks.py +++ b/vulnerabilities/tasks.py @@ -118,7 +118,7 @@ def set_run_failure(job, connection, type, value, traceback): def enqueue_pipeline(pipeline_id): pipeline_schedule = models.PipelineSchedule.objects.get(pipeline_id=pipeline_id) - queue = queues.get(pipeline_schedule.get_priority_display()) + queue = queues.get(pipeline_schedule.get_run_priority_display()) if pipeline_schedule.status in [ models.PipelineRun.Status.RUNNING, diff --git a/vulnerabilities/templates/pipeline_dashboard.html b/vulnerabilities/templates/pipeline_dashboard.html index a7f4139a4..fc474efe7 100644 --- a/vulnerabilities/templates/pipeline_dashboard.html +++ b/vulnerabilities/templates/pipeline_dashboard.html @@ -62,6 +62,7 @@

    Pipeline Dashboard

    Pipeline ID
    Active
    +
    Priority
    Interval
    Status
    Last Run End Time
    @@ -79,6 +80,7 @@

    Pipeline Dashboard

    {{ schedule.pipeline_id }}
    {{ schedule.is_active|yesno:"Yes,No" }}
    +
    {{ schedule.get_run_priority_display|capfirst}}
    {% if schedule.is_run_once %} Once From 7f30e3a343f94748fce6fef805c11abb0465ca64 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Wed, 8 Apr 2026 15:02:21 +0530 Subject: [PATCH 521/545] feat: track last successful unfurl date Signed-off-by: Keshav Priyadarshi --- ...edpackage_last_range_unfurl_at_and_more.py | 2 +- ...package_last_successful_range_unfurl_at.py | 23 ++++++++++++++ vulnerabilities/models.py | 13 ++++++-- .../v2_improvers/unfurl_version_range.py | 31 +++++++++++++------ vulnerabilities/pipes/openssl.py | 4 +-- .../v2_importers/test_collect_fix_commit.py | 4 +-- .../v2_improvers/test_unfurl_version_range.py | 2 ++ vulnerabilities/tests/test_api.py | 6 ++-- 8 files changed, 63 insertions(+), 22 deletions(-) create mode 100644 vulnerabilities/migrations/0121_impactedpackage_last_successful_range_unfurl_at.py diff --git a/vulnerabilities/migrations/0120_impactedpackage_last_range_unfurl_at_and_more.py b/vulnerabilities/migrations/0120_impactedpackage_last_range_unfurl_at_and_more.py index f6b5d8a93..1cd5d7828 100644 --- a/vulnerabilities/migrations/0120_impactedpackage_last_range_unfurl_at_and_more.py +++ b/vulnerabilities/migrations/0120_impactedpackage_last_range_unfurl_at_and_more.py @@ -1,4 +1,4 @@ -# Generated by Django 5.2.11 on 2026-04-06 20:51 +# Generated by Django 5.2.11 on 2026-04-08 09:28 from django.db import migrations, models diff --git a/vulnerabilities/migrations/0121_impactedpackage_last_successful_range_unfurl_at.py b/vulnerabilities/migrations/0121_impactedpackage_last_successful_range_unfurl_at.py new file mode 100644 index 000000000..f9277183b --- /dev/null +++ b/vulnerabilities/migrations/0121_impactedpackage_last_successful_range_unfurl_at.py @@ -0,0 +1,23 @@ +# Generated by Django 5.2.11 on 2026-04-08 09:28 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("vulnerabilities", "0120_impactedpackage_last_range_unfurl_at_and_more"), + ] + + operations = [ + migrations.AddField( + model_name="impactedpackage", + name="last_successful_range_unfurl_at", + field=models.DateTimeField( + blank=True, + db_index=True, + help_text="Timestamp of the last successful vers range unfurl.", + null=True, + ), + ), + ] diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index bb4390f2a..a802f7011 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -1139,9 +1139,9 @@ def get_affecting_vulnerabilities(self): next_fixed_package_vulns = list(fixed_by_pkg.affected_by) fixed_by_package_details["fixed_by_purl"] = fixed_by_purl - fixed_by_package_details[ - "fixed_by_purl_vulnerabilities" - ] = next_fixed_package_vulns + fixed_by_package_details["fixed_by_purl_vulnerabilities"] = ( + next_fixed_package_vulns + ) fixed_by_pkgs.append(fixed_by_package_details) vuln_details["fixed_by_package_details"] = fixed_by_pkgs @@ -3259,6 +3259,13 @@ class ImpactedPackage(models.Model): help_text="Timestamp of the last vers range unfurl.", ) + last_successful_range_unfurl_at = models.DateTimeField( + blank=True, + null=True, + db_index=True, + help_text="Timestamp of the last successful vers range unfurl.", + ) + def to_dict(self): from vulnerabilities.utils import purl_to_dict diff --git a/vulnerabilities/pipelines/v2_improvers/unfurl_version_range.py b/vulnerabilities/pipelines/v2_improvers/unfurl_version_range.py index cb65f5f93..48d691fe0 100644 --- a/vulnerabilities/pipelines/v2_improvers/unfurl_version_range.py +++ b/vulnerabilities/pipelines/v2_improvers/unfurl_version_range.py @@ -52,20 +52,25 @@ def unfurl_version_range(self): processed_impacted_packages_count = 0 processed_affected_packages_count = 0 cached_versions = {} - impacts_to_update = [] - update_batch_size = 5 + update_unfurl_date = [] + update_successful_unfurl_date = [] + update_batch_size = 5000 + chunk_size = 5000 impacted_packages = impacted_package_qs(cutoff_day=self.reunfurl_after_days) impacted_packages_count = impacted_packages.count() self.log(f"Unfurl affected vers range for {impacted_packages_count:,d} ImpactedPackage.") - progress = LoopProgress(total_iterations=impacted_packages_count, logger=self.log) - for impact in progress.iter(impacted_packages.iterator(chunk_size=5000)): - impacts_to_update.append(impact.pk) + progress = LoopProgress( + total_iterations=impacted_packages_count, progress_step=5, logger=self.log + ) + for impact in progress.iter(impacted_packages.iterator(chunk_size=chunk_size)): + update_unfurl_date.append(impact.pk) purl = PackageURL.from_string(impact.base_purl) if not impact.affecting_vers or not any( c in impact.affecting_vers for c in ("<", ">", "!") ): + update_successful_unfurl_date.append(impact.pk) continue if purl.type not in FETCHCODE_SUPPORTED_ECOSYSTEMS: continue @@ -87,17 +92,25 @@ def unfurl_version_range(self): relation=ImpactedPackageAffecting, logger=self.log, ) + update_successful_unfurl_date.append(impact.pk) processed_impacted_packages_count += 1 - if len(impacts_to_update) > update_batch_size: - ImpactedPackage.objects.filter(pk__in=impacts_to_update).update( + if len(update_unfurl_date) > update_batch_size: + ImpactedPackage.objects.filter(pk__in=update_unfurl_date).update( last_range_unfurl_at=timezone.now() ) - impacts_to_update.clear() + ImpactedPackage.objects.filter(pk__in=update_successful_unfurl_date).update( + last_successful_range_unfurl_at=timezone.now() + ) + update_unfurl_date.clear() + update_successful_unfurl_date.clear() - ImpactedPackage.objects.filter(pk__in=impacts_to_update).update( + ImpactedPackage.objects.filter(pk__in=update_unfurl_date).update( last_range_unfurl_at=timezone.now() ) + ImpactedPackage.objects.filter(pk__in=update_successful_unfurl_date).update( + last_successful_range_unfurl_at=timezone.now() + ) self.log(f"Successfully processed {processed_impacted_packages_count:,d} ImpactedPackage.") self.log(f"{processed_affected_packages_count:,d} new Impact-Package relation created.") diff --git a/vulnerabilities/pipes/openssl.py b/vulnerabilities/pipes/openssl.py index b240f416c..1dffdedc1 100644 --- a/vulnerabilities/pipes/openssl.py +++ b/vulnerabilities/pipes/openssl.py @@ -89,9 +89,7 @@ def get_reference(reference_name, tag, reference_url): ref_type = ( AdvisoryReference.COMMIT if "commit" in name or tag == "patch" - else AdvisoryReference.ADVISORY - if "advisory" in name - else AdvisoryReference.OTHER + else AdvisoryReference.ADVISORY if "advisory" in name else AdvisoryReference.OTHER ) return ReferenceV2( diff --git a/vulnerabilities/tests/pipelines/v2_importers/test_collect_fix_commit.py b/vulnerabilities/tests/pipelines/v2_importers/test_collect_fix_commit.py index dac2c7781..9a687a3b7 100644 --- a/vulnerabilities/tests/pipelines/v2_importers/test_collect_fix_commit.py +++ b/vulnerabilities/tests/pipelines/v2_importers/test_collect_fix_commit.py @@ -52,9 +52,7 @@ def test_collect_fix_commits_groups_by_vuln(mock_repo, pipeline): side_effect=lambda c: ( ["CVE-2021-0001"] if "CVE" in c.message - else ["GHSA-dead-beef-baad"] - if "GHSA" in c.message - else [] + else ["GHSA-dead-beef-baad"] if "GHSA" in c.message else [] ) ) diff --git a/vulnerabilities/tests/pipelines/v2_improvers/test_unfurl_version_range.py b/vulnerabilities/tests/pipelines/v2_improvers/test_unfurl_version_range.py index ae23930f6..3d73c6884 100644 --- a/vulnerabilities/tests/pipelines/v2_improvers/test_unfurl_version_range.py +++ b/vulnerabilities/tests/pipelines/v2_improvers/test_unfurl_version_range.py @@ -111,6 +111,8 @@ def test_affecting_version_range_unfurl(self, mock_fetch): self.assertEqual(3, PackageV2.objects.count()) self.assertEqual(1, impact.fixed_by_packages.count()) self.assertEqual(2, impact.affecting_packages.count()) + self.assertNotEqual(None, impact.last_range_unfurl_at) + self.assertNotEqual(None, impact.last_successful_range_unfurl_at) def test_impacted_package_qs_dont_process_empty_vers(self): insert_advisory_v2( diff --git a/vulnerabilities/tests/test_api.py b/vulnerabilities/tests/test_api.py index 9ed647099..31f2b7774 100644 --- a/vulnerabilities/tests/test_api.py +++ b/vulnerabilities/tests/test_api.py @@ -75,9 +75,9 @@ def cleaned_response(response): reference["scores"] = sorted( reference["scores"], key=lambda x: (x["value"], x["scoring_system"]) ) - package_data["resolved_vulnerabilities"][index]["references"][index2][ - "scores" - ] = reference["scores"] + package_data["resolved_vulnerabilities"][index]["references"][index2]["scores"] = ( + reference["scores"] + ) cleaned_response.append(package_data) From 41484a582936db313a3a846f00216190895d9026 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Wed, 8 Apr 2026 16:18:56 +0530 Subject: [PATCH 522/545] refactor: squash migrations Signed-off-by: Keshav Priyadarshi --- ...edpackage_last_range_unfurl_at_and_more.py | 12 +++++++++- ...package_last_successful_range_unfurl_at.py | 23 ------------------- 2 files changed, 11 insertions(+), 24 deletions(-) delete mode 100644 vulnerabilities/migrations/0121_impactedpackage_last_successful_range_unfurl_at.py diff --git a/vulnerabilities/migrations/0120_impactedpackage_last_range_unfurl_at_and_more.py b/vulnerabilities/migrations/0120_impactedpackage_last_range_unfurl_at_and_more.py index 1cd5d7828..6e070bde4 100644 --- a/vulnerabilities/migrations/0120_impactedpackage_last_range_unfurl_at_and_more.py +++ b/vulnerabilities/migrations/0120_impactedpackage_last_range_unfurl_at_and_more.py @@ -1,4 +1,4 @@ -# Generated by Django 5.2.11 on 2026-04-08 09:28 +# Generated by Django 5.2.11 on 2026-04-08 10:48 from django.db import migrations, models @@ -20,6 +20,16 @@ class Migration(migrations.Migration): null=True, ), ), + migrations.AddField( + model_name="impactedpackage", + name="last_successful_range_unfurl_at", + field=models.DateTimeField( + blank=True, + db_index=True, + help_text="Timestamp of the last successful vers range unfurl.", + null=True, + ), + ), migrations.AddField( model_name="pipelineschedule", name="run_priority", diff --git a/vulnerabilities/migrations/0121_impactedpackage_last_successful_range_unfurl_at.py b/vulnerabilities/migrations/0121_impactedpackage_last_successful_range_unfurl_at.py deleted file mode 100644 index f9277183b..000000000 --- a/vulnerabilities/migrations/0121_impactedpackage_last_successful_range_unfurl_at.py +++ /dev/null @@ -1,23 +0,0 @@ -# Generated by Django 5.2.11 on 2026-04-08 09:28 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("vulnerabilities", "0120_impactedpackage_last_range_unfurl_at_and_more"), - ] - - operations = [ - migrations.AddField( - model_name="impactedpackage", - name="last_successful_range_unfurl_at", - field=models.DateTimeField( - blank=True, - db_index=True, - help_text="Timestamp of the last successful vers range unfurl.", - null=True, - ), - ), - ] From e7bbd898ab4efcc2326744eb7c4fc30bcfa3a552 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Wed, 8 Apr 2026 18:46:45 +0530 Subject: [PATCH 523/545] chore: prepare v38.2.0 release Signed-off-by: Keshav Priyadarshi --- CHANGELOG.rst | 5 +++++ setup.cfg | 2 +- vulnerablecode/__init__.py | 2 +- 3 files changed, 7 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index f52bf437a..3aaed5212 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,6 +1,11 @@ Release notes ============= +Version v38.2.0 +--------------------- + +- feat: add high priority queue and run version range unfurling pipeline more frequently (https://github.com/aboutcode-org/vulnerablecode/pull/2256) + Version v38.1.0 --------------------- diff --git a/setup.cfg b/setup.cfg index 16dbe9b9a..b91fd55a3 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = vulnerablecode -version = 38.1.0 +version = 38.2.0 license = Apache-2.0 AND CC-BY-SA-4.0 # description must be on ONE line https://github.com/pypa/setuptools/issues/1390 diff --git a/vulnerablecode/__init__.py b/vulnerablecode/__init__.py index 4966e4c04..86794a0b9 100644 --- a/vulnerablecode/__init__.py +++ b/vulnerablecode/__init__.py @@ -14,7 +14,7 @@ import git -__version__ = "38.1.0" +__version__ = "38.2.0" PROJECT_DIR = Path(__file__).resolve().parent From 8b90efbc11ad0d3f889fdfb0ad39b64a73408ae9 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Wed, 8 Apr 2026 19:34:57 +0530 Subject: [PATCH 524/545] chore: enable verbose logging release ci and prepare v38.3.0 release Signed-off-by: Keshav Priyadarshi --- .github/workflows/pypi-release.yml | 2 ++ CHANGELOG.rst | 2 +- setup.cfg | 2 +- vulnerablecode/__init__.py | 2 +- 4 files changed, 5 insertions(+), 3 deletions(-) diff --git a/.github/workflows/pypi-release.yml b/.github/workflows/pypi-release.yml index 9fa9534c5..c56392a1e 100644 --- a/.github/workflows/pypi-release.yml +++ b/.github/workflows/pypi-release.yml @@ -85,3 +85,5 @@ jobs: - name: Publish to PyPI if: startsWith(github.ref, 'refs/tags/') uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0 + with: + verbose: true diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 3aaed5212..a517b4561 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,7 +1,7 @@ Release notes ============= -Version v38.2.0 +Version v38.3.0 --------------------- - feat: add high priority queue and run version range unfurling pipeline more frequently (https://github.com/aboutcode-org/vulnerablecode/pull/2256) diff --git a/setup.cfg b/setup.cfg index b91fd55a3..00b785eea 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = vulnerablecode -version = 38.2.0 +version = 38.3.0 license = Apache-2.0 AND CC-BY-SA-4.0 # description must be on ONE line https://github.com/pypa/setuptools/issues/1390 diff --git a/vulnerablecode/__init__.py b/vulnerablecode/__init__.py index 86794a0b9..7027b67c2 100644 --- a/vulnerablecode/__init__.py +++ b/vulnerablecode/__init__.py @@ -14,7 +14,7 @@ import git -__version__ = "38.2.0" +__version__ = "38.3.0" PROJECT_DIR = Path(__file__).resolve().parent From c6dca58011218c1b41fa140dcdc8f52ae10dcf81 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Fri, 10 Apr 2026 17:45:32 +0530 Subject: [PATCH 525/545] fix: run pipeline scheduling jobs in respective queues - Instead of running all scheduling jobs in default queue, use each pipeline's assigned queue for scheduling. Signed-off-by: Keshav Priyadarshi --- vulnerabilities/models.py | 6 +++++- vulnerabilities/schedules.py | 2 ++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index a802f7011..497f35a69 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -2351,7 +2351,11 @@ def save(self, *args, **kwargs): if not self.pk: self.schedule_work_id = self.create_new_job(execute_now=True) elif self.pk and (existing := PipelineSchedule.objects.get(pk=self.pk)): - if existing.is_active != self.is_active or existing.run_interval != self.run_interval: + if ( + existing.is_active != self.is_active + or existing.run_interval != self.run_interval + or existing.run_priority != self.run_priority + ): self.schedule_work_id = self.create_new_job() self.full_clean() return super().save(*args, **kwargs) diff --git a/vulnerabilities/schedules.py b/vulnerabilities/schedules.py index 215129e10..27fb7e09a 100644 --- a/vulnerabilities/schedules.py +++ b/vulnerabilities/schedules.py @@ -24,6 +24,7 @@ def schedule_execution(pipeline_schedule, execute_now=False): Takes a `PackageSchedule` object as input and schedule a recurring job using `rq_scheduler` to execute the pipeline. """ + queue_name = pipeline_schedule.get_run_priority_display() first_execution = datetime.datetime.now(tz=datetime.timezone.utc) if not execute_now: first_execution = pipeline_schedule.next_run_date @@ -36,6 +37,7 @@ def schedule_execution(pipeline_schedule, execute_now=False): args=[pipeline_schedule.pipeline_id], interval=interval_in_seconds, repeat=None, + queue_name=queue_name, ) return job._id From 5962ce66abb90cee3f1df7455aabd3e5306afe0e Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Fri, 10 Apr 2026 19:54:05 +0530 Subject: [PATCH 526/545] feat: add function to compute queue load factor Signed-off-by: Keshav Priyadarshi --- vulnerabilities/models.py | 5 +++ vulnerabilities/tasks.py | 67 +++++++++++++++++++++++++++++++++++---- 2 files changed, 65 insertions(+), 7 deletions(-) diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index 497f35a69..e00f067c5 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -2387,6 +2387,11 @@ def all_runs(self): def latest_run(self): return self.pipelineruns.first() if self.pipelineruns.exists() else None + @property + def latest_successful_run(self): + successful_runs = self.pipelineruns.filter(run_end_date__isnull=False, run_exitcode=0) + return successful_runs.first() if successful_runs.exists() else None + @property def earliest_run(self): return self.pipelineruns.earliest("run_start_date") if self.pipelineruns.exists() else None diff --git a/vulnerabilities/tasks.py b/vulnerabilities/tasks.py index 6c2be3fac..2454e041b 100644 --- a/vulnerabilities/tasks.py +++ b/vulnerabilities/tasks.py @@ -9,24 +9,23 @@ import logging +from collections import Counter +from contextlib import suppress from io import StringIO from traceback import format_exc as traceback_format_exc import django_rq +from redis.exceptions import ConnectionError +from rq import Worker from vulnerabilities import models from vulnerabilities.importer import Importer from vulnerabilities.improver import Improver +from vulnerablecode.settings import RQ_QUEUES logger = logging.getLogger(__name__) -default_queue = django_rq.get_queue("default") -high_queue = django_rq.get_queue("high") - -queues = { - "default": django_rq.get_queue("default"), - "high": django_rq.get_queue("high"), -} +queues = {queue: django_rq.get_queue(queue) for queue in RQ_QUEUES.keys()} def execute_pipeline(pipeline_id, run_id): @@ -151,3 +150,57 @@ def dequeue_job(job_id): for queue in queues.values(): if job_id in queue.jobs: queue.remove(job_id) + + +def compute_queue_load_factor(): + """ + Compute worker load per queue. + + Load factor is the ratio of the total compute required to run all active pipelines + in a queue to the available worker capacity for that queue over a 24-hour period. + A value greater than 1 indicates that the number of workers is insufficient to + run all pipelines within the schedule. + + Also compute the additional workers needed to balance each queue + """ + field = models.PipelineSchedule._meta.get_field("run_priority") + label_to_value = {label: value for value, label in field.choices} + total_compute_seconds_per_queue = {} + worker_per_queue = {} + load_per_queue = {} + seconds_in_24_hr = 86400 + + for queue in RQ_QUEUES.keys(): + total_compute_seconds_per_queue[queue] = sum( + (p.latest_successful_run.runtime / (p.run_interval / 24)) + for p in models.PipelineSchedule.objects.filter( + is_active=True, run_priority=label_to_value[queue] + ) + if p.latest_successful_run + ) + + with suppress(ConnectionError): + redis_conn = django_rq.get_connection() + queue_names = [ + w.queue_names()[0] for w in Worker.all(connection=redis_conn) if w.queue_names() + ] + worker_per_queue = dict(Counter(queue_names)) + + for queue_name, worker_count in worker_per_queue.items(): + total_compute = total_compute_seconds_per_queue.get(queue_name, 0) + if worker_count == 0 or total_compute == 0: + continue + + unit_load_on_queue = total_compute / seconds_in_24_hr + + num_of_worker_for_balanced_queue = round(unit_load_on_queue) + addition_worker_needed = max(num_of_worker_for_balanced_queue - worker_count, 0) + + net_load_on_queue = unit_load_on_queue / worker_count + + load_per_queue[queue_name] = { + "load_factor": net_load_on_queue, + "additional_worker": addition_worker_needed, + } + + return dict(sorted(load_per_queue.items(), key=lambda x: x[0], reverse=True)) From 4240d36a21c2301a5719f7f64916f9b1141457c2 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Fri, 10 Apr 2026 20:42:42 +0530 Subject: [PATCH 527/545] fix: track queues with no workers in load factor Signed-off-by: Keshav Priyadarshi --- vulnerabilities/tasks.py | 22 +++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/vulnerabilities/tasks.py b/vulnerabilities/tasks.py index 2454e041b..a78c13551 100644 --- a/vulnerabilities/tasks.py +++ b/vulnerabilities/tasks.py @@ -170,6 +170,13 @@ def compute_queue_load_factor(): load_per_queue = {} seconds_in_24_hr = 86400 + with suppress(ConnectionError): + redis_conn = django_rq.get_connection() + queue_names = [ + w.queue_names()[0] for w in Worker.all(connection=redis_conn) if w.queue_names() + ] + worker_per_queue = dict(Counter(queue_names)) + for queue in RQ_QUEUES.keys(): total_compute_seconds_per_queue[queue] = sum( (p.latest_successful_run.runtime / (p.run_interval / 24)) @@ -178,17 +185,13 @@ def compute_queue_load_factor(): ) if p.latest_successful_run ) - - with suppress(ConnectionError): - redis_conn = django_rq.get_connection() - queue_names = [ - w.queue_names()[0] for w in Worker.all(connection=redis_conn) if w.queue_names() - ] - worker_per_queue = dict(Counter(queue_names)) + if queue not in worker_per_queue: + worker_per_queue[queue] = 0 for queue_name, worker_count in worker_per_queue.items(): + net_load_on_queue = "no_worker" total_compute = total_compute_seconds_per_queue.get(queue_name, 0) - if worker_count == 0 or total_compute == 0: + if total_compute == 0: continue unit_load_on_queue = total_compute / seconds_in_24_hr @@ -196,7 +199,8 @@ def compute_queue_load_factor(): num_of_worker_for_balanced_queue = round(unit_load_on_queue) addition_worker_needed = max(num_of_worker_for_balanced_queue - worker_count, 0) - net_load_on_queue = unit_load_on_queue / worker_count + if worker_count > 0: + net_load_on_queue = unit_load_on_queue / worker_count load_per_queue[queue_name] = { "load_factor": net_load_on_queue, From 6742ba61154ab5182d4a0eeb22c02129b3fe531e Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Fri, 10 Apr 2026 20:44:11 +0530 Subject: [PATCH 528/545] feat: cache computed load factor for 5 minutes Signed-off-by: Keshav Priyadarshi --- vulnerabilities/views.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/vulnerabilities/views.py b/vulnerabilities/views.py index b984fbb51..5b9406f87 100644 --- a/vulnerabilities/views.py +++ b/vulnerabilities/views.py @@ -15,6 +15,7 @@ from cvss.exceptions import CVSS4MalformedError from django.contrib import messages from django.contrib.auth.views import LoginView +from django.core.cache import cache from django.core.exceptions import ValidationError from django.core.mail import send_mail from django.db.models import Exists @@ -48,6 +49,7 @@ from vulnerabilities.pipelines.v2_importers.epss_importer_v2 import EPSSImporterPipeline from vulnerabilities.severity_systems import EPSS from vulnerabilities.severity_systems import SCORING_SYSTEMS +from vulnerabilities.tasks import compute_queue_load_factor from vulnerabilities.throttling import AnonUserUIThrottle from vulnerabilities.utils import TYPES_WITH_MULTIPLE_IMPORTERS from vulnerabilities.utils import get_advisories_from_groups @@ -57,6 +59,8 @@ PAGE_SIZE = 10 +CACHE_TIMEOUT = 60 * 5 + class VulnerableCodeView(View): """ @@ -961,6 +965,13 @@ def get_queryset(self): def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) + load_per_queue = cache.get("load_per_queue") + + if load_per_queue is None: + load_per_queue = compute_queue_load_factor() + cache.set("load_per_queue", load_per_queue, CACHE_TIMEOUT) + + context["load_per_queue"] = load_per_queue context["active_pipeline_count"] = PipelineSchedule.objects.filter(is_active=True).count() context["disabled_pipeline_count"] = PipelineSchedule.objects.filter( is_active=False From 065d5a738114dc88a23f049dbcfb2f04334ddd57 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Fri, 10 Apr 2026 20:45:18 +0530 Subject: [PATCH 529/545] feat: show load factor on pipeline dashboard Signed-off-by: Keshav Priyadarshi --- .../templates/pipeline_dashboard.html | 76 +++++++++++++++++-- 1 file changed, 71 insertions(+), 5 deletions(-) diff --git a/vulnerabilities/templates/pipeline_dashboard.html b/vulnerabilities/templates/pipeline_dashboard.html index fc474efe7..826c614ea 100644 --- a/vulnerabilities/templates/pipeline_dashboard.html +++ b/vulnerabilities/templates/pipeline_dashboard.html @@ -1,5 +1,7 @@ {% extends "base.html" %} +{% load utils %} + {% block title %} Pipeline Dashboard {% endblock %} @@ -22,6 +24,18 @@ .column { word-break: break-word; } + + .has-text-orange { + color: #ff8c42 !important; + } + + .has-tooltip-orange::before { + background-color: #ff8c42 !important; + } + + .has-tooltip-orange::after { + border-top-color: #ff8c42 !important; + } {% endblock %} @@ -48,11 +62,63 @@

    Pipeline Dashboard

    -
    -

    - {{ active_pipeline_count|default:0 }} active pipeline{{ active_pipeline_count|default:0|pluralize }}, - {{ disabled_pipeline_count|default:0 }} disabled pipeline{{ disabled_pipeline_count|default:0|pluralize }} -

    +
    +
    + {% if load_per_queue %} +

    + + Load Factor: + + {% for queue_name, values in load_per_queue.items %} + + + {{ queue_name| capfirst }} + + {% with load_factor=values|get_item:"load_factor" additional=values|get_item:"additional_worker" %} + {% if load_factor == "no_worker" %} + + + + {% elif load_factor < 1 %} + + {{ load_factor|floatformat:2 }} + + + {% elif load_factor < 1.6 %} + + {{ load_factor|floatformat:2 }} + + + {% else %} + + {{ load_factor|floatformat:2 }} + + + {% endif %} + {% endwith %} + + {% if not forloop.last %} • {% endif %} + + {% endfor %} +

    + {% endif %} +
    +
    +

    + {{ active_pipeline_count|default:0 }} active pipeline{{ active_pipeline_count|default:0|pluralize }}, + {{ disabled_pipeline_count|default:0 }} disabled pipeline{{ disabled_pipeline_count|default:0|pluralize }} +

    +
    From 104a6115d4fba05a0c7ed94f1cfa17ba452ac308 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Fri, 10 Apr 2026 21:21:09 +0530 Subject: [PATCH 530/545] chore: prepare for v38.4.0 release Signed-off-by: Keshav Priyadarshi --- CHANGELOG.rst | 6 ++++++ setup.cfg | 2 +- vulnerablecode/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index a517b4561..85d04ed9c 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,6 +1,12 @@ Release notes ============= +Version v38.4.0 +--------------------- + +- fix: run pipeline scheduling jobs in respective queues (https://github.com/aboutcode-org/vulnerablecode/pull/2263) +- feat: show queue load factors on the pipeline dashboard (https://github.com/aboutcode-org/vulnerablecode/pull/2264) + Version v38.3.0 --------------------- diff --git a/setup.cfg b/setup.cfg index 00b785eea..e1275dae2 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = vulnerablecode -version = 38.3.0 +version = 38.4.0 license = Apache-2.0 AND CC-BY-SA-4.0 # description must be on ONE line https://github.com/pypa/setuptools/issues/1390 diff --git a/vulnerablecode/__init__.py b/vulnerablecode/__init__.py index 7027b67c2..f8263d4c5 100644 --- a/vulnerablecode/__init__.py +++ b/vulnerablecode/__init__.py @@ -14,7 +14,7 @@ import git -__version__ = "38.3.0" +__version__ = "38.4.0" PROJECT_DIR = Path(__file__).resolve().parent From 11d718e3db6401062e6afaeb516c6e16c153d3d1 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Wed, 8 Apr 2026 20:20:36 +0530 Subject: [PATCH 531/545] feat: add field to track the latest advisory for an avid Signed-off-by: Keshav Priyadarshi --- vulnerabilities/models.py | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index e00f067c5..0be2cf515 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -2886,11 +2886,7 @@ def latest_for_avid(self, avid: str): ) def latest_per_avid(self): - return self.order_by( - "avid", - F("date_collected").desc(nulls_last=True), - "-id", - ).distinct("avid") + return self.filter(is_latest=True) def latest_for_avids(self, avids): return self.filter(avid__in=avids).latest_per_avid() @@ -3007,6 +3003,7 @@ class AdvisoryV2(models.Model): max_length=200, blank=False, null=False, + db_index=True, help_text="Unique ID for the datasource used for this advisory ." "e.g.: nginx_importer_v2", ) @@ -3090,6 +3087,14 @@ class AdvisoryV2(models.Model): help_text="UTC Date on which the advisory was collected", ) + is_latest = models.BooleanField( + default=False, + blank=False, + null=False, + db_index=True, + help_text="Indicates whether this is the latest version of the advisory identified by its AVID.", + ) + original_advisory_text = models.TextField( blank=True, null=True, @@ -3142,6 +3147,11 @@ class AdvisoryV2(models.Model): class Meta: unique_together = ["datasource_id", "advisory_id", "unique_content_id"] ordering = ["datasource_id", "advisory_id", "date_published", "unique_content_id"] + constraints = [ + models.UniqueConstraint( + fields=["avid"], condition=Q(is_latest=True), name="unique_latest_per_avid" + ) + ] indexes = [ models.Index( fields=["avid", "-date_collected", "-id"], From 43f634c7d0f18323bfd2757bfafaa34dd0f95d1d Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Wed, 8 Apr 2026 20:25:32 +0530 Subject: [PATCH 532/545] feat: mark most recent avid as latest when inserting advisory Signed-off-by: Keshav Priyadarshi --- vulnerabilities/pipes/advisory.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/vulnerabilities/pipes/advisory.py b/vulnerabilities/pipes/advisory.py index bcdd95075..9250f2679 100644 --- a/vulnerabilities/pipes/advisory.py +++ b/vulnerabilities/pipes/advisory.py @@ -334,6 +334,13 @@ def insert_advisory_v2( if not created: return advisory_obj + AdvisoryV2.objects.filter( + avid=f"{pipeline_id}/{advisory.advisory_id}", + is_latest=True, + ).update(is_latest=False) + advisory_obj.is_latest = True + advisory_obj.save() + aliases = get_or_create_advisory_aliases(aliases=advisory.aliases) references = get_or_create_advisory_references(references=advisory.references) severities = get_or_create_advisory_severities(severities=advisory.severities) From 7798b964a0a6776ac26a602574a9fc71f98c3691 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Tue, 14 Apr 2026 02:21:53 +0530 Subject: [PATCH 533/545] feat: enable db index on advisory_id field Signed-off-by: Keshav Priyadarshi --- vulnerabilities/models.py | 1 + 1 file changed, 1 insertion(+) diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index 0be2cf515..896da7c76 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -3013,6 +3013,7 @@ class AdvisoryV2(models.Model): blank=False, null=False, unique=False, + db_index=True, help_text="An advisory is a unique vulnerability identifier in some database, " "such as PYSEC-2020-2233", ) From ea463ce0a124fd21a19934a204dfde6946544d62 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Tue, 14 Apr 2026 02:23:07 +0530 Subject: [PATCH 534/545] feat: backfill latest advisory for existing v2 advisories Signed-off-by: Keshav Priyadarshi --- ...t_alter_advisoryv2_advisory_id_and_more.py | 65 +++++++++++++++++++ 1 file changed, 65 insertions(+) create mode 100644 vulnerabilities/migrations/0121_advisoryv2_is_latest_alter_advisoryv2_advisory_id_and_more.py diff --git a/vulnerabilities/migrations/0121_advisoryv2_is_latest_alter_advisoryv2_advisory_id_and_more.py b/vulnerabilities/migrations/0121_advisoryv2_is_latest_alter_advisoryv2_advisory_id_and_more.py new file mode 100644 index 000000000..be1db5016 --- /dev/null +++ b/vulnerabilities/migrations/0121_advisoryv2_is_latest_alter_advisoryv2_advisory_id_and_more.py @@ -0,0 +1,65 @@ +# Generated by Django 5.2.11 on 2026-04-13 19:05 + +from django.db import migrations +from django.db import models +from django.db.models import F + + +class Migration(migrations.Migration): + def add_is_latest_on_existing_advisory(apps, schema_editor): + Advisory = apps.get_model("vulnerabilities", "AdvisoryV2") + + print(f"\nUpdating is_latest on existing V2 Advisory.") + latest_qs = Advisory.objects.order_by( + "avid", + F("date_collected").desc(nulls_last=True), + "-id", + ).distinct("avid") + + Advisory.objects.filter(id__in=latest_qs.values("id")).update(is_latest=True) + + dependencies = [ + ("vulnerabilities", "0120_impactedpackage_last_range_unfurl_at_and_more"), + ] + + operations = [ + migrations.AddField( + model_name="advisoryv2", + name="is_latest", + field=models.BooleanField( + db_index=True, + default=False, + help_text="Indicates whether this is the latest version of the advisory identified by its AVID.", + ), + ), + migrations.AlterField( + model_name="advisoryv2", + name="advisory_id", + field=models.CharField( + db_index=True, + help_text="An advisory is a unique vulnerability identifier in some database, such as PYSEC-2020-2233", + max_length=500, + ), + ), + migrations.AlterField( + model_name="advisoryv2", + name="datasource_id", + field=models.CharField( + db_index=True, + help_text="Unique ID for the datasource used for this advisory .e.g.: nginx_importer_v2", + max_length=200, + ), + ), + migrations.AddConstraint( + model_name="advisoryv2", + constraint=models.UniqueConstraint( + condition=models.Q(("is_latest", True)), + fields=("avid",), + name="unique_latest_per_avid", + ), + ), + migrations.RunPython( + code=add_is_latest_on_existing_advisory, + reverse_code=migrations.RunPython.noop, + ), + ] From d671ebd9a35ebbcc89f6e3cacd65042507da60a6 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Tue, 14 Apr 2026 02:26:51 +0530 Subject: [PATCH 535/545] test: add test for latest advisory data migration Signed-off-by: Keshav Priyadarshi --- vulnerabilities/tests/test_data_migrations.py | 57 +++++++++++++++++++ 1 file changed, 57 insertions(+) diff --git a/vulnerabilities/tests/test_data_migrations.py b/vulnerabilities/tests/test_data_migrations.py index 8303c4003..c32abb83f 100644 --- a/vulnerabilities/tests/test_data_migrations.py +++ b/vulnerabilities/tests/test_data_migrations.py @@ -12,6 +12,7 @@ from django.db import IntegrityError from django.db import connection from django.db.migrations.executor import MigrationExecutor +from django.db.models import Count from django.test import TestCase from django.utils import timezone from packageurl import PackageURL @@ -1031,3 +1032,59 @@ def test_m2m_relationships_work(self): self.assertIn(commit1, impacted.affecting_commits.all()) self.assertIn(commit2, impacted.fixed_by_commits.all()) + + +class TestLatestAdvisoryV2Migration(TestMigrations): + """Tests is_latest field population on existing v2 advisory.""" + + app_name = "vulnerabilities" + migrate_from = "0120_impactedpackage_last_range_unfurl_at_and_more" + migrate_to = "0121_advisoryv2_is_latest_alter_advisoryv2_advisory_id_and_more" + + def setUpBeforeMigration(self, apps): + AdvisoryV2 = apps.get_model("vulnerabilities", "AdvisoryV2") + + AdvisoryV2.objects.create( + unique_content_id="content_id_old", + url="https://old.example.com", + summary="Old advisory", + advisory_id="test_adv", + avid="test_pipeline/test_adv", + datasource_id="test_pipeline", + ) + + AdvisoryV2.objects.create( + unique_content_id="content_id_old2", + url="https://old.example.com", + summary="Old 2 advisory", + advisory_id="test_adv", + avid="test_pipeline/test_adv", + datasource_id="test_pipeline", + ) + + AdvisoryV2.objects.create( + unique_content_id="content_id_new", + url="https://old.example.com", + summary="New advisory", + advisory_id="test_adv", + avid="test_pipeline/test_adv", + datasource_id="test_pipeline", + ) + + def test_no_duplicate_is_latest_for_avid(self): + AdvisoryV2 = apps.get_model("vulnerabilities", "AdvisoryV2") + + duplicate = ( + AdvisoryV2.objects.filter(is_latest=True) + .values("avid") + .annotate(cnt=Count("id")) + .filter(cnt__gt=1) + ) + + self.assertFalse(duplicate.exists()) + + def test_latest_is_actually_recent(self): + AdvisoryV2 = apps.get_model("vulnerabilities", "AdvisoryV2") + + latest = AdvisoryV2.objects.get(avid="test_pipeline/test_adv", is_latest=True) + self.assertEqual("New advisory", latest.summary) From 55b87aee28c56e885419069eb82d4b943dab9fa2 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Tue, 14 Apr 2026 02:32:47 +0530 Subject: [PATCH 536/545] test: verify latest advisory is updated on new advisory insertion Signed-off-by: Keshav Priyadarshi --- vulnerabilities/tests/pipes/test_advisory.py | 78 ++++++++++++++++++++ 1 file changed, 78 insertions(+) diff --git a/vulnerabilities/tests/pipes/test_advisory.py b/vulnerabilities/tests/pipes/test_advisory.py index 8710b2ea4..67c073b61 100644 --- a/vulnerabilities/tests/pipes/test_advisory.py +++ b/vulnerabilities/tests/pipes/test_advisory.py @@ -18,12 +18,15 @@ from vulnerabilities import models from vulnerabilities.importer import AdvisoryData +from vulnerabilities.importer import AdvisoryDataV2 from vulnerabilities.importer import AffectedPackage +from vulnerabilities.importer import AffectedPackageV2 from vulnerabilities.importer import PackageCommitPatchData from vulnerabilities.importer import Reference from vulnerabilities.models import AdvisoryAlias from vulnerabilities.models import AdvisoryReference from vulnerabilities.models import AdvisorySeverity +from vulnerabilities.models import AdvisoryV2 from vulnerabilities.models import AdvisoryWeakness from vulnerabilities.models import PackageCommitPatch from vulnerabilities.pipes.advisory import get_or_create_advisory_aliases @@ -33,6 +36,8 @@ from vulnerabilities.pipes.advisory import get_or_create_advisory_weaknesses from vulnerabilities.pipes.advisory import get_or_create_aliases from vulnerabilities.pipes.advisory import import_advisory +from vulnerabilities.pipes.advisory import insert_advisory_v2 +from vulnerabilities.tests.pipelines import TestLogger from vulnerabilities.utils import compute_content_id @@ -257,3 +262,76 @@ def test_get_or_create_advisory_commit(advisory_commit): assert isinstance(commit, PackageCommitPatch) assert commit.commit_hash in [c.commit_hash for c in advisory_commit] assert commit.vcs_url in [c.vcs_url for c in advisory_commit] + + +class TestLatestAdvisoryV2(TestCase): + def setUp(self): + self.logger = TestLogger() + self.advisory1 = AdvisoryDataV2( + summary="Test advisory old", + aliases=["CVE-2025-0001"], + references=[], + severities=[], + weaknesses=[], + affected_packages=[ + AffectedPackageV2( + package=PackageURL.from_string("pkg:npm/foobar"), + affected_version_range=VersionRange.from_string("vers:npm/>3.2.1|<4.0.0"), + fixed_version_range=VersionRange.from_string("vers:npm/4.0.0"), + introduced_by_commit_patches=[], + fixed_by_commit_patches=[], + ), + ], + patches=[], + advisory_id="GHSA-1234", + url="https://example.com/advisory", + ) + + self.advisory2 = AdvisoryDataV2( + summary="Test advisory new", + aliases=["CVE-2025-0001"], + references=[], + severities=[], + weaknesses=[], + affected_packages=[ + AffectedPackageV2( + package=PackageURL.from_string("pkg:npm/foobar"), + affected_version_range=VersionRange.from_string("vers:npm/>3.2.1|<4.0.0"), + fixed_version_range=VersionRange.from_string("vers:npm/4.0.0"), + introduced_by_commit_patches=[], + fixed_by_commit_patches=[], + ), + AffectedPackageV2( + package=PackageURL.from_string("pkg:npm/foobar"), + affected_version_range=None, + fixed_version_range=None, + introduced_by_commit_patches=[], + fixed_by_commit_patches=[ + PackageCommitPatchData( + vcs_url="https://foobar.vcs/", + commit_hash="982f801f", + ), + ], + ), + ], + patches=[], + advisory_id="GHSA-1234", + url="https://example.com/advisory", + ) + + insert_advisory_v2( + advisory=self.advisory1, + pipeline_id="test_pipeline_v2", + logger=self.logger.write, + ) + + def test_latest_advisory_update_on_advisory_insert(self): + adv_old = AdvisoryV2.objects.get(avid="test_pipeline_v2/GHSA-1234", is_latest=True) + insert_advisory_v2( + advisory=self.advisory2, + pipeline_id="test_pipeline_v2", + logger=self.logger.write, + ) + adv_new = AdvisoryV2.objects.get(avid="test_pipeline_v2/GHSA-1234", is_latest=True) + self.assertEqual("Test advisory old", adv_old.summary) + self.assertEqual("Test advisory new", adv_new.summary) From afd9d53232cbfaf669cd13ec2f119e6b2087e6ca Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Tue, 14 Apr 2026 03:38:49 +0530 Subject: [PATCH 537/545] test: populate is_latest in old advisory fixture Signed-off-by: Keshav Priyadarshi --- .../v2_improvers/test_collect_ssvc_trees.py | 2 + .../test_compute_package_risk_v2.py | 1 + .../v2_improvers/test_relate_severities.py | 9 ++ vulnerabilities/tests/test_api_v3.py | 18 ++- .../test_same_avid_different_content_id.py | 136 +++++------------- 5 files changed, 60 insertions(+), 106 deletions(-) diff --git a/vulnerabilities/tests/pipelines/v2_improvers/test_collect_ssvc_trees.py b/vulnerabilities/tests/pipelines/v2_improvers/test_collect_ssvc_trees.py index fa6719311..ad4a6bcb6 100644 --- a/vulnerabilities/tests/pipelines/v2_improvers/test_collect_ssvc_trees.py +++ b/vulnerabilities/tests/pipelines/v2_improvers/test_collect_ssvc_trees.py @@ -36,6 +36,7 @@ def vulnrichment_advisory(db): url="https://example.com/advisory/TEST-2024-0001", unique_content_id="unique-1234", date_collected=datetime.now(), + is_latest=True, ) @@ -59,6 +60,7 @@ def related_advisory(db): url="https://example.com/related/TEST-2024-0001", unique_content_id="unique-5678", date_collected=datetime.now(), + is_latest=True, ) diff --git a/vulnerabilities/tests/pipelines/v2_improvers/test_compute_package_risk_v2.py b/vulnerabilities/tests/pipelines/v2_improvers/test_compute_package_risk_v2.py index db6ffd5d3..305abf429 100644 --- a/vulnerabilities/tests/pipelines/v2_improvers/test_compute_package_risk_v2.py +++ b/vulnerabilities/tests/pipelines/v2_improvers/test_compute_package_risk_v2.py @@ -34,6 +34,7 @@ def test_simple_risk_pipeline(): unique_content_id="ajkef", url="https://test.com", date_collected=datetime.now(), + is_latest=True, ) adv.save() diff --git a/vulnerabilities/tests/pipelines/v2_improvers/test_relate_severities.py b/vulnerabilities/tests/pipelines/v2_improvers/test_relate_severities.py index 2dadbc679..27cf1f849 100644 --- a/vulnerabilities/tests/pipelines/v2_improvers/test_relate_severities.py +++ b/vulnerabilities/tests/pipelines/v2_improvers/test_relate_severities.py @@ -25,6 +25,7 @@ def test_relate_severities_by_advisory_id(): unique_content_id="ab1", url="https://example.com/advisory/CVE-2024-0001", date_collected="2024-01-01", + is_latest=True, ) severity_advisory = AdvisoryV2.objects.create( @@ -34,6 +35,7 @@ def test_relate_severities_by_advisory_id(): unique_content_id="ab2", url="https://example.com/epss/CVE-2024-0001", date_collected="2024-01-02", + is_latest=True, ) severity_advisory.severities.create( scoring_system=EPSS.identifier, @@ -59,6 +61,7 @@ def test_relate_severities_via_alias(): unique_content_id="ab3", url="https://example.com/advisory/CVE-2024-0002", date_collected="2024-01-01", + is_latest=True, ) base.aliases.create(alias="CVE-2024-ALIAS") @@ -70,6 +73,7 @@ def test_relate_severities_via_alias(): unique_content_id="ab4", url="https://example.com/epss/CVE-2024-ALIAS", date_collected="2024-01-02", + is_latest=True, ) severity_advisory.severities.create( scoring_system=EPSS.identifier, @@ -91,6 +95,7 @@ def test_no_self_relation_created(): url="https://example.com/advisory/CVE-2024-0003", date_collected="2024-01-03", avid="epss/CVE-2024-0003", + is_latest=True, ) advisory.severities.create( scoring_system=EPSS.identifier, @@ -112,6 +117,7 @@ def test_unsupported_severity_system_is_ignored(): url="https://example.com/advisory/CVE-2024-0004", date_collected="2024-01-01", avid="nvd/CVE-2024-0004", + is_latest=True, ) severity_advisory = AdvisoryV2.objects.create( @@ -121,6 +127,7 @@ def test_unsupported_severity_system_is_ignored(): url="https://example.com/epss/CVE-2024-0004", date_collected="2024-01-02", avid="epss/CVE-2024-0004", + is_latest=True, ) severity_advisory.severities.create( scoring_system="UNKNOWN_SYSTEM", @@ -142,6 +149,7 @@ def test_pipeline_is_idempotent(): url="https://example.com/advisory/CVE-2024-0005", date_collected="2024-01-01", avid="nvd/CVE-2024-0005", + is_latest=True, ) severity = AdvisoryV2.objects.create( @@ -150,6 +158,7 @@ def test_pipeline_is_idempotent(): unique_content_id="ab9", url="https://example.com/epss/CVE-2024-0005", date_collected="2024-01-02", + is_latest=True, avid="epss/CVE-2024-0005", ) severity.severities.create( diff --git a/vulnerabilities/tests/test_api_v3.py b/vulnerabilities/tests/test_api_v3.py index 36dd7fba1..137692abf 100644 --- a/vulnerabilities/tests/test_api_v3.py +++ b/vulnerabilities/tests/test_api_v3.py @@ -14,22 +14,26 @@ from rest_framework.test import APITestCase from univers.version_range import PypiVersionRange +from vulnerabilities.importer import AdvisoryDataV2 from vulnerabilities.models import AdvisoryV2 from vulnerabilities.models import PackageV2 from vulnerabilities.pipes.advisory import insert_advisory_v2 +from vulnerabilities.tests.pipelines import TestLogger class APIV3TestCase(APITestCase): def setUp(self): from vulnerabilities.models import ImpactedPackage - self.advisory = AdvisoryV2.objects.create( - datasource_id="ghsa", - advisory_id="GHSA-1234", - avid="ghsa/GHSA-1234", - unique_content_id="f" * 64, - url="https://example.com/advisory", - date_collected="2025-07-01T00:00:00Z", + self.logger = TestLogger() + self.advisory = insert_advisory_v2( + advisory=AdvisoryDataV2( + summary="summary", + advisory_id="GHSA-1234", + url="https://example.com/advisory", + ), + pipeline_id="ghsa", + logger=self.logger.write, ) self.package = PackageV2.objects.from_purl(purl="pkg:pypi/sample@1.0.0") diff --git a/vulnerabilities/tests/test_same_avid_different_content_id.py b/vulnerabilities/tests/test_same_avid_different_content_id.py index a366d1872..1dc6dd686 100644 --- a/vulnerabilities/tests/test_same_avid_different_content_id.py +++ b/vulnerabilities/tests/test_same_avid_different_content_id.py @@ -7,13 +7,14 @@ # See https://aboutcode.org for more information about nexB OSS projects. # -import uuid -from datetime import timedelta - import pytest -from django.utils.timezone import now +from vulnerabilities.importer import AdvisoryDataV2 from vulnerabilities.models import AdvisoryV2 +from vulnerabilities.pipes.advisory import insert_advisory_v2 +from vulnerabilities.tests.pipelines import TestLogger + +logger = TestLogger() @pytest.fixture @@ -22,45 +23,29 @@ def advisory_factory(db): Factory to create AdvisoryV2 objects with minimal required fields. """ - def _create(*, avid, advisory_id, collected_at): - return AdvisoryV2.objects.create( - datasource_id="test_source", - advisory_id=advisory_id, - avid=avid, - unique_content_id=str(uuid.uuid4()), - url="https://example.com/advisory", - date_collected=collected_at, + def _create(*, advisory_id, summary): + + return insert_advisory_v2( + advisory=AdvisoryDataV2( + summary=summary, + advisory_id=advisory_id, + url="https://example.com/advisory", + ), + pipeline_id="source", + logger=logger.write, ) return _create -@pytest.fixture -def timestamps(): - now_ts = now() - return { - "old": now_ts - timedelta(days=3), - "mid": now_ts - timedelta(days=1), - "new": now_ts, - } - - @pytest.mark.django_db def test_latest_for_avid_returns_latest_by_date_collected( - advisory_factory, timestamps, django_assert_num_queries + advisory_factory, django_assert_num_queries ): avid = "source/ADV-1" - older = advisory_factory( - avid=avid, - advisory_id="ADV-1", - collected_at=timestamps["old"], - ) - newer = advisory_factory( - avid=avid, - advisory_id="ADV-1", - collected_at=timestamps["new"], - ) + older = advisory_factory(advisory_id="ADV-1", summary="old advisory") + newer = advisory_factory(advisory_id="ADV-1", summary="new advisory") with django_assert_num_queries(1): result = AdvisoryV2.objects.latest_for_avid(avid) @@ -70,20 +55,11 @@ def test_latest_for_avid_returns_latest_by_date_collected( @pytest.mark.django_db -def test_latest_for_avid_tie_breaks_by_id(advisory_factory, timestamps, django_assert_num_queries): +def test_latest_for_avid_tie_breaks_by_id(advisory_factory, django_assert_num_queries): avid = "source/ADV-2" - ts = timestamps["mid"] - - first = advisory_factory( - avid=avid, - advisory_id="ADV-2", - collected_at=ts, - ) - second = advisory_factory( - avid=avid, - advisory_id="ADV-2", - collected_at=ts, - ) + + first = advisory_factory(advisory_id="ADV-2", summary="old advisory") + second = advisory_factory(advisory_id="ADV-2", summary="new advisory") with django_assert_num_queries(1): result = AdvisoryV2.objects.latest_for_avid(avid) @@ -92,25 +68,11 @@ def test_latest_for_avid_tie_breaks_by_id(advisory_factory, timestamps, django_a @pytest.mark.django_db -def test_latest_per_avid_returns_one_row_per_avid( - advisory_factory, timestamps, django_assert_num_queries -): - advisory_factory( - avid="source/A", - advisory_id="A", - collected_at=timestamps["old"], - ) - latest_a = advisory_factory( - avid="source/A", - advisory_id="A", - collected_at=timestamps["new"], - ) - - latest_b = advisory_factory( - avid="source/B", - advisory_id="B", - collected_at=timestamps["mid"], - ) +def test_latest_per_avid_returns_one_row_per_avid(advisory_factory, django_assert_num_queries): + advisory_factory(advisory_id="A", summary="old advisory") + latest_a = advisory_factory(advisory_id="A", summary="new advisory") + + latest_b = advisory_factory(advisory_id="B", summary="new advisory") with django_assert_num_queries(1): qs = AdvisoryV2.objects.latest_per_avid() @@ -122,19 +84,11 @@ def test_latest_per_avid_returns_one_row_per_avid( @pytest.mark.django_db -def test_latest_per_avid_excludes_older_versions(advisory_factory, timestamps): +def test_latest_per_avid_excludes_older_versions(advisory_factory): avid = "source/C" - older = advisory_factory( - avid=avid, - advisory_id="C", - collected_at=timestamps["old"], - ) - latest = advisory_factory( - avid=avid, - advisory_id="C", - collected_at=timestamps["new"], - ) + older = advisory_factory(advisory_id="C", summary="old advisory") + latest = advisory_factory(advisory_id="C", summary="new advisory") results = list(AdvisoryV2.objects.latest_per_avid()) @@ -144,30 +98,14 @@ def test_latest_per_avid_excludes_older_versions(advisory_factory, timestamps): @pytest.mark.django_db def test_latest_for_avids_filters_and_collapses_correctly( - advisory_factory, timestamps, django_assert_num_queries + advisory_factory, django_assert_num_queries ): - advisory_factory( - avid="source/A", - advisory_id="A", - collected_at=timestamps["old"], - ) - latest_a = advisory_factory( - avid="source/A", - advisory_id="A", - collected_at=timestamps["new"], - ) - - latest_b = advisory_factory( - avid="source/B", - advisory_id="B", - collected_at=timestamps["mid"], - ) - - advisory_factory( - avid="source/C", - advisory_id="C", - collected_at=timestamps["new"], - ) + + advisory_factory(advisory_id="A", summary="old advisory") + latest_a = advisory_factory(advisory_id="A", summary="new advisory") + + advisory_factory(advisory_id="B", summary="old advisory") + latest_b = advisory_factory(advisory_id="B", summary="new advisory") with django_assert_num_queries(1): qs = AdvisoryV2.objects.latest_for_avids({"source/A", "source/B"}) From 3d413f252052c00eee311e2dc3edfc9079958203 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Tue, 14 Apr 2026 17:40:59 +0530 Subject: [PATCH 538/545] Optimize V3 API Signed-off-by: Tushar Goel --- vulnerabilities/api_v3.py | 253 ++++++++++++++++++--------- vulnerabilities/models.py | 42 +++-- vulnerabilities/tests/test_api_v3.py | 2 +- 3 files changed, 202 insertions(+), 95 deletions(-) diff --git a/vulnerabilities/api_v3.py b/vulnerabilities/api_v3.py index c17202f25..a0cb24c91 100644 --- a/vulnerabilities/api_v3.py +++ b/vulnerabilities/api_v3.py @@ -7,6 +7,7 @@ # See https://aboutcode.org for more information about nexB OSS projects. # +from collections import defaultdict from typing import List from urllib.parse import urlencode @@ -21,6 +22,7 @@ from rest_framework.reverse import reverse from rest_framework.throttling import AnonRateThrottle +from vulnerabilities.models import AdvisoryAlias from vulnerabilities.models import AdvisoryReference from vulnerabilities.models import AdvisorySet from vulnerabilities.models import AdvisorySetMember @@ -216,6 +218,26 @@ def get_fixing_vulnerabilities_url(self, obj): def get_affected_by_vulnerabilities(self, package): """Return a dictionary with advisory as keys and their details, including fixed_by_packages.""" + advisories = self.context["advisory_map"].get(package.id, []) + impact_map = self.context["impact_map"].get(package.id, {}) + + if advisories: + result = [] + + for adv in advisories: + fixed = impact_map.get(adv["avid"]) + if not fixed: + continue + + result.append( + { + **adv, + "fixed_by_packages": fixed, + } + ) + + return result + advisories_qs = AdvisoryV2.objects.latest_affecting_advisories_for_purl(package.package_url) advisories = [] @@ -250,56 +272,35 @@ def get_affected_by_vulnerabilities(self, package): "advisory_id": advisory.advisory_id.split("/")[-1], "aliases": [alias.alias for alias in advisory.aliases.all()], "summary": advisory.summary, - "fixed_by_packages": [pkg.purl for pkg in impact.fixed_by_packages.all()], "severity": advisory.weighted_severity, "exploitability": advisory.exploitability, "risk_score": advisory.risk_score, + "fixed_by_packages": [pkg.purl for pkg in impact.fixed_by_packages.all()], } ) return result - is_grouped = AdvisorySet.objects.filter(package=package, relation_type="affecting").exists() - - if is_grouped: - affected_by_advisories_qs = ( - AdvisorySet.objects.filter(package=package, relation_type="affecting") - .select_related("primary_advisory") - .prefetch_related( - Prefetch( - "members", - queryset=AdvisorySetMember.objects.filter(is_primary=False).select_related( - "advisory" - ), - to_attr="secondary_members", - ) + if not advisories: + if package.type in TYPES_WITH_MULTIPLE_IMPORTERS: + advisories_qs = advisories_qs.prefetch_related( + "aliases", + "impacted_packages__affecting_packages", + "impacted_packages__fixed_by_packages", ) - ) - - affected_groups = [ - Group( - aliases=list(adv.aliases.all()), - primary=adv.primary_advisory, - secondaries=[member.advisory for member in adv.secondary_members], + advisories: List[GroupedAdvisory] = merge_and_save_grouped_advisories( + package, advisories_qs, "affecting" ) - for adv in affected_by_advisories_qs - ] + return self.return_advisories_data(package, advisories_qs, advisories) - advisories: List[GroupedAdvisory] = get_advisories_from_groups(affected_groups) - return self.return_advisories_data(package, advisories_qs, advisories) + def get_fixing_vulnerabilities(self, package): + fixing_advisories = AdvisorySet.objects.filter( + package=package, relation_type="fixing" + ).values_list("primary_advisory__advisory_id", flat=True) - if package.type in TYPES_WITH_MULTIPLE_IMPORTERS: - advisories_qs = advisories_qs.prefetch_related( - "aliases", - "impacted_packages__affecting_packages", - "impacted_packages__fixed_by_packages", - ) - advisories: List[GroupedAdvisory] = merge_and_save_grouped_advisories( - package, advisories_qs, "affecting" - ) - return self.return_advisories_data(package, advisories_qs, advisories) + if fixing_advisories: + return [{"advisory_id": adv_id.split("/")[-1]} for adv_id in fixing_advisories] - def get_fixing_vulnerabilities(self, package): advisories_qs = AdvisoryV2.objects.latest_fixed_by_advisories_for_purl(package.package_url) if not package.type in TYPES_WITH_MULTIPLE_IMPORTERS: @@ -319,37 +320,6 @@ def get_fixing_vulnerabilities(self, package): ) return results - advisories = [] - - is_grouped = AdvisorySet.objects.filter(package=package, relation_type="fixing").exists() - - if is_grouped: - fixing_advisories_qs = ( - AdvisorySet.objects.filter(package=package, relation_type="fixing") - .select_related("primary_advisory") - .prefetch_related( - Prefetch( - "members", - queryset=AdvisorySetMember.objects.filter(is_primary=False).select_related( - "advisory" - ), - to_attr="secondary_members", - ) - ) - ) - - fixing_groups = [ - Group( - aliases=list(adv.aliases.all()), - primary=adv.primary_advisory, - secondaries=[member.advisory for member in adv.secondary_members], - ) - for adv in fixing_advisories_qs - ] - - advisories: List[GroupedAdvisory] = get_advisories_from_groups(fixing_groups) - return self.return_fixing_advisories_data(advisories) - if package.type in TYPES_WITH_MULTIPLE_IMPORTERS: advisories_qs = advisories_qs.prefetch_related( "aliases", @@ -409,11 +379,11 @@ def return_advisories_data(self, package, advisories_qs, advisories): return result def get_next_non_vulnerable_version(self, package): - if next_non_vulnerable := package.get_non_vulnerable_versions()[0]: + if next_non_vulnerable := package.next_non_vulnerable_version: return next_non_vulnerable.version def get_latest_non_vulnerable_version(self, package): - if latest_non_vulnerable := package.get_non_vulnerable_versions()[-1]: + if latest_non_vulnerable := package.latest_non_vulnerable_version: return latest_non_vulnerable.version @@ -464,13 +434,11 @@ def create(self, request, *args, **kwargs): query = ( PackageV2.objects.filter(plain_package_url__in=plain_purls) .values_list("plain_package_url", flat=True) - .distinct() .order_by("plain_package_url") ) else: query = ( PackageV2.objects.filter(package_url__in=purls) - .distinct() .order_by("package_url") .values_list("package_url", flat=True) ) @@ -479,20 +447,20 @@ def create(self, request, *args, **kwargs): return self.get_paginated_response(page) if ignore_qualifiers_subpath: - query = ( - PackageV2.objects.filter(plain_package_url__in=plain_purls) - .order_by("plain_package_url") - .distinct("plain_package_url") + query = PackageV2.objects.filter(plain_package_url__in=plain_purls).order_by( + "plain_package_url" ) else: - query = ( - PackageV2.objects.filter(package_url__in=purls) - .order_by("package_url") - .distinct("package_url") - ) + query = PackageV2.objects.filter(package_url__in=purls).order_by("package_url") page = self.paginate_queryset(query) - serializer = self.get_serializer(page, many=True, context={"request": request}) + advisory_map = get_grouped_advisories_bulk(page) + impact_map = get_impacts_bulk(page) + serializer = self.get_serializer( + page, + many=True, + context={"request": request, "advisory_map": advisory_map, "impact_map": impact_map}, + ) return self.get_paginated_response(serializer.data) @@ -592,3 +560,124 @@ class FixingAdvisoriesViewSet(PackageAdvisoriesViewSet): class AffectedByAdvisoriesViewSet(PackageAdvisoriesViewSet): relation = "impacted_packages__affecting_packages__package_url" serializer_class = AffectedByAdvisoryV3Serializer + + +def get_grouped_advisories_bulk(packages): + package_ids = [p.id for p in packages] + + advisory_sets = list( + AdvisorySet.objects.filter( + package_id__in=package_ids, + relation_type="affecting", + ) + .select_related("primary_advisory", "package") + .prefetch_related( + Prefetch("aliases", queryset=AdvisoryAlias.objects.only("alias")), + Prefetch( + "members", + queryset=AdvisorySetMember.objects.filter(is_primary=False) + .select_related("advisory") + .only( + "advisory__avid", + "advisory__weighted_severity", + "advisory__exploitability", + ), + to_attr="secondary_members", + ), + ) + .only( + "id", + "package_id", + "primary_advisory__avid", + "primary_advisory__summary", + "primary_advisory__weighted_severity", + "primary_advisory__exploitability", + "primary_advisory__advisory_id", + ) + ) + + package_map = defaultdict(list) + for adv in advisory_sets: + adv._aliases_cache = [a.alias for a in adv.aliases.all()] + package_map[adv.package_id].append(adv) + + result = {} + + for package in packages: + groups = package_map.get(package.id, []) + grouped = [] + + for adv in groups: + primary = adv.primary_advisory + secondaries = [m.advisory for m in adv.secondary_members] + + max_sev = primary.weighted_severity or 0.0 + max_exp = primary.exploitability or 0.0 + + for sec in secondaries: + if sec.weighted_severity: + max_sev = max(max_sev, sec.weighted_severity) + if sec.exploitability: + max_exp = max(max_exp, sec.exploitability) + + weighted_severity = round(max_sev, 1) if max_sev else None + exploitability = max_exp or None + + risk_score = None + if exploitability and weighted_severity: + risk_score = round(min(exploitability * weighted_severity, 10.0), 1) + + identifier = primary.advisory_id.split("/")[-1] + + aliases = [a for a in adv._aliases_cache if a != identifier] + + grouped.append( + { + "avid": primary.avid, + "advisory_id": identifier, + "aliases": aliases, + "weighted_severity": weighted_severity, + "exploitability": exploitability, + "risk_score": risk_score, + "summary": primary.summary, + } + ) + + result[package.id] = grouped + + return result + + +def get_impacts_bulk(packages): + package_ids = [p.id for p in packages] + + impacts = ( + ImpactedPackageAffecting.objects.filter(package_id__in=package_ids) + .select_related("impacted_package__advisory") + .prefetch_related( + Prefetch( + "impacted_package__fixed_by_packages", + queryset=PackageV2.objects.only("package_url"), + ) + ) + .only( + "package_id", + "impacted_package_id", + "impacted_package__advisory_id", + "impacted_package__advisory__avid", + ) + ) + + impact_map = defaultdict(dict) + fixed_cache = {} + + for impact in impacts: + ip = impact.impacted_package + avid = ip.advisory.avid + + if ip.id not in fixed_cache: + fixed_cache[ip.id] = list({pkg.purl for pkg in ip.fixed_by_packages.all()}) + + impact_map[impact.package_id][avid] = fixed_cache[ip.id] + + return impact_map diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index 896da7c76..f7b6f75ee 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -2907,6 +2907,13 @@ def latest_affecting_advisories_for_purls(self, purls): ) return self.filter(id__in=Subquery(adv_ids)).latest_per_avid() + def latest_affecting_advisories_for_packages(self, purls): + adv_ids = ImpactedPackageAffecting.objects.filter(package__in=purls).values_list( + "impacted_package__advisory_id", + flat=True, + ) + return self.filter(id__in=Subquery(adv_ids)).latest_per_avid() + def latest_fixed_by_advisories_for_purl(self, purl): adv_ids = ImpactedPackageFixedBy.objects.filter(package__package_url=purl).values_list( "impacted_package__advisory_id", @@ -3577,25 +3584,36 @@ def calculate_version_rank(self): PackageV2.objects.bulk_update(sorted_packages, fields=["version_rank"]) return self.version_rank - def get_non_vulnerable_versions(self): + @cached_property + def _non_vulnerable_versions(self): """ - Return a tuple of the next and latest non-vulnerable versions as Package instance. - Return a tuple of (None, None) if there is no non-vulnerable version. + Cached computation to avoid duplicate queries. + Returns (next, latest) """ if self.version_rank == 0: self.calculate_version_rank - non_vulnerable_versions = PackageV2.objects.get_fixed_by_package_versions( - self, fix=False - ).only_non_vulnerable() - later_non_vulnerable = non_vulnerable_versions.filter( - version_rank__gte=self.version_rank - ).order_by("version_rank") + qs = ( + PackageV2.objects.get_fixed_by_package_versions(self, fix=False) + .only_non_vulnerable() + .filter(version_rank__gt=self.version_rank) + .order_by("version_rank") + ) - if later_non_vulnerable.exists(): - return later_non_vulnerable.first(), later_non_vulnerable.last() + next_non_vulnerable = qs.first() + latest_non_vulnerable = qs.last() - return None, None + return next_non_vulnerable, latest_non_vulnerable + + @property + def next_non_vulnerable_version(self): + next_nv, _ = self._non_vulnerable_versions + return next_nv if next_nv else None + + @property + def latest_non_vulnerable_version(self): + _, latest_nv = self._non_vulnerable_versions + return latest_nv if latest_nv else None @cached_property def version_class(self): diff --git a/vulnerabilities/tests/test_api_v3.py b/vulnerabilities/tests/test_api_v3.py index 137692abf..84e1cf94c 100644 --- a/vulnerabilities/tests/test_api_v3.py +++ b/vulnerabilities/tests/test_api_v3.py @@ -66,7 +66,7 @@ def test_packages_post_without_details(self): def test_packages_post_with_details(self): url = reverse("package-v3-list") - with self.assertNumQueries(33): + with self.assertNumQueries(34): response = self.client.post( url, data={ From c9552b433ce28afd4868470a4f360b7f2114f4d8 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Tue, 14 Apr 2026 18:31:47 +0530 Subject: [PATCH 539/545] Optimize get non vulnerable versions Signed-off-by: Tushar Goel --- vulnerabilities/api_v3.py | 97 ++++++++++++++++++++++++--------------- vulnerabilities/models.py | 6 +-- vulnerabilities/views.py | 2 +- 3 files changed, 63 insertions(+), 42 deletions(-) diff --git a/vulnerabilities/api_v3.py b/vulnerabilities/api_v3.py index a0cb24c91..ee62ea1d5 100644 --- a/vulnerabilities/api_v3.py +++ b/vulnerabilities/api_v3.py @@ -12,6 +12,7 @@ from urllib.parse import urlencode from django.db.models import Exists +from django.db.models import Max from django.db.models import OuterRef from django.db.models import Prefetch from django_filters import rest_framework as filters @@ -226,8 +227,7 @@ def get_affected_by_vulnerabilities(self, package): for adv in advisories: fixed = impact_map.get(adv["avid"]) - if not fixed: - continue + adv.pop("avid", None) result.append( { @@ -294,12 +294,9 @@ def get_affected_by_vulnerabilities(self, package): return self.return_advisories_data(package, advisories_qs, advisories) def get_fixing_vulnerabilities(self, package): - fixing_advisories = AdvisorySet.objects.filter( - package=package, relation_type="fixing" - ).values_list("primary_advisory__advisory_id", flat=True) - - if fixing_advisories: - return [{"advisory_id": adv_id.split("/")[-1]} for adv_id in fixing_advisories] + advisories = self.context["fixing_advisory_map"].get(package.id, []) + if advisories: + return advisories advisories_qs = AdvisoryV2.objects.latest_fixed_by_advisories_for_purl(package.package_url) @@ -326,6 +323,8 @@ def get_fixing_vulnerabilities(self, package): "impacted_packages__affecting_packages", "impacted_packages__fixed_by_packages", ) + if not advisories_qs.exists(): + return [] advisories: List[GroupedAdvisory] = merge_and_save_grouped_advisories( package, advisories_qs, "fixing" ) @@ -454,12 +453,13 @@ def create(self, request, *args, **kwargs): query = PackageV2.objects.filter(package_url__in=purls).order_by("package_url") page = self.paginate_queryset(query) - advisory_map = get_grouped_advisories_bulk(page) + affected_advisory_map = get_affected_advisories_bulk(page) + fixing_advisory_map = get_fixing_advisories_bulk(page) impact_map = get_impacts_bulk(page) serializer = self.get_serializer( page, many=True, - context={"request": request, "advisory_map": advisory_map, "impact_map": impact_map}, + context={"request": request, "advisory_map": affected_advisory_map, "impact_map": impact_map, "fixing_advisory_map": fixing_advisory_map}, ) return self.get_paginated_response(serializer.data) @@ -562,7 +562,7 @@ class AffectedByAdvisoriesViewSet(PackageAdvisoriesViewSet): serializer_class = AffectedByAdvisoryV3Serializer -def get_grouped_advisories_bulk(packages): +def get_affected_advisories_bulk(packages): package_ids = [p.id for p in packages] advisory_sets = list( @@ -570,19 +570,14 @@ def get_grouped_advisories_bulk(packages): package_id__in=package_ids, relation_type="affecting", ) - .select_related("primary_advisory", "package") - .prefetch_related( - Prefetch("aliases", queryset=AdvisoryAlias.objects.only("alias")), - Prefetch( - "members", - queryset=AdvisorySetMember.objects.filter(is_primary=False) - .select_related("advisory") - .only( - "advisory__avid", - "advisory__weighted_severity", - "advisory__exploitability", - ), - to_attr="secondary_members", + .select_related("primary_advisory") + .prefetch_related(Prefetch("aliases", queryset=AdvisoryAlias.objects.only("alias"))) + .annotate( + max_severity=Max( + "members__advisory__weighted_severity", + ), + max_exploitability=Max( + "members__advisory__exploitability", ), ) .only( @@ -590,13 +585,12 @@ def get_grouped_advisories_bulk(packages): "package_id", "primary_advisory__avid", "primary_advisory__summary", - "primary_advisory__weighted_severity", - "primary_advisory__exploitability", "primary_advisory__advisory_id", ) ) package_map = defaultdict(list) + for adv in advisory_sets: adv._aliases_cache = [a.alias for a in adv.aliases.all()] package_map[adv.package_id].append(adv) @@ -609,23 +603,14 @@ def get_grouped_advisories_bulk(packages): for adv in groups: primary = adv.primary_advisory - secondaries = [m.advisory for m in adv.secondary_members] - max_sev = primary.weighted_severity or 0.0 - max_exp = primary.exploitability or 0.0 - - for sec in secondaries: - if sec.weighted_severity: - max_sev = max(max_sev, sec.weighted_severity) - if sec.exploitability: - max_exp = max(max_exp, sec.exploitability) + max_sev = adv.max_severity or 0.0 + max_exp = adv.max_exploitability or 0.0 weighted_severity = round(max_sev, 1) if max_sev else None exploitability = max_exp or None - risk_score = None - if exploitability and weighted_severity: - risk_score = round(min(exploitability * weighted_severity, 10.0), 1) + risk_score = round(min(max_exp * max_sev, 10.0), 1) if max_exp and max_sev else None identifier = primary.advisory_id.split("/")[-1] @@ -681,3 +666,39 @@ def get_impacts_bulk(packages): impact_map[impact.package_id][avid] = fixed_cache[ip.id] return impact_map + + +def get_fixing_advisories_bulk(packages): + package_ids = [p.id for p in packages] + + advisory_sets = list( + AdvisorySet.objects.filter( + package_id__in=package_ids, + relation_type="fixing", + ) + .only( + "id", + "package_id", + "primary_advisory__advisory_id", + ) + ) + + package_map = defaultdict(list) + + for adv in advisory_sets: + package_map[adv.package_id].append(adv.primary_advisory.advisory_id) + + result = {} + + for package in packages: + groups = package_map.get(package.id, []) + grouped = [] + + for adv_id in groups: + grouped.append( + {"advisory_id": adv_id.split("/")[-1]} + ) + + result[package.id] = grouped + + return result diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index f7b6f75ee..4b6c17627 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -3585,7 +3585,7 @@ def calculate_version_rank(self): return self.version_rank @cached_property - def _non_vulnerable_versions(self): + def get_non_vulnerable_versions(self): """ Cached computation to avoid duplicate queries. Returns (next, latest) @@ -3607,12 +3607,12 @@ def _non_vulnerable_versions(self): @property def next_non_vulnerable_version(self): - next_nv, _ = self._non_vulnerable_versions + next_nv, _ = self.get_non_vulnerable_versions return next_nv if next_nv else None @property def latest_non_vulnerable_version(self): - _, latest_nv = self._non_vulnerable_versions + _, latest_nv = self.get_non_vulnerable_versions return latest_nv if latest_nv else None @cached_property diff --git a/vulnerabilities/views.py b/vulnerabilities/views.py index 5b9406f87..371dcd217 100644 --- a/vulnerabilities/views.py +++ b/vulnerabilities/views.py @@ -257,7 +257,7 @@ def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) package = self.object - next_non_vulnerable, latest_non_vulnerable = package.get_non_vulnerable_versions() + next_non_vulnerable, latest_non_vulnerable = package.get_non_vulnerable_versions context["package"] = package context["next_non_vulnerable"] = next_non_vulnerable From d83fce384e6de4103d7e45970007ac306099a447 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Tue, 14 Apr 2026 18:33:05 +0530 Subject: [PATCH 540/545] Fix tests Signed-off-by: Tushar Goel --- vulnerabilities/api_v3.py | 14 ++++++++------ vulnerabilities/tests/test_api_v3.py | 2 +- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/vulnerabilities/api_v3.py b/vulnerabilities/api_v3.py index ee62ea1d5..12f10ed1c 100644 --- a/vulnerabilities/api_v3.py +++ b/vulnerabilities/api_v3.py @@ -459,7 +459,12 @@ def create(self, request, *args, **kwargs): serializer = self.get_serializer( page, many=True, - context={"request": request, "advisory_map": affected_advisory_map, "impact_map": impact_map, "fixing_advisory_map": fixing_advisory_map}, + context={ + "request": request, + "advisory_map": affected_advisory_map, + "impact_map": impact_map, + "fixing_advisory_map": fixing_advisory_map, + }, ) return self.get_paginated_response(serializer.data) @@ -675,8 +680,7 @@ def get_fixing_advisories_bulk(packages): AdvisorySet.objects.filter( package_id__in=package_ids, relation_type="fixing", - ) - .only( + ).only( "id", "package_id", "primary_advisory__advisory_id", @@ -695,9 +699,7 @@ def get_fixing_advisories_bulk(packages): grouped = [] for adv_id in groups: - grouped.append( - {"advisory_id": adv_id.split("/")[-1]} - ) + grouped.append({"advisory_id": adv_id.split("/")[-1]}) result[package.id] = grouped diff --git a/vulnerabilities/tests/test_api_v3.py b/vulnerabilities/tests/test_api_v3.py index 84e1cf94c..be4b1d923 100644 --- a/vulnerabilities/tests/test_api_v3.py +++ b/vulnerabilities/tests/test_api_v3.py @@ -66,7 +66,7 @@ def test_packages_post_without_details(self): def test_packages_post_with_details(self): url = reverse("package-v3-list") - with self.assertNumQueries(34): + with self.assertNumQueries(31): response = self.client.post( url, data={ From bb7d3a21844ba969281b1263ae6a1ebebaeb1b62 Mon Sep 17 00:00:00 2001 From: Keshav Priyadarshi Date: Wed, 15 Apr 2026 00:10:11 +0530 Subject: [PATCH 541/545] fix: use is_latest field to get latest advisory for an avid Signed-off-by: Keshav Priyadarshi --- vulnerabilities/models.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index 896da7c76..6ce9f29df 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -2876,14 +2876,7 @@ def to_dict(self): class AdvisoryV2QuerySet(BaseQuerySet): def latest_for_avid(self, avid: str): - return ( - self.filter(avid=avid) - .order_by( - F("date_collected").desc(nulls_last=True), - "-id", - ) - .first() - ) + return self.get(avid=avid, is_latest=True) def latest_per_avid(self): return self.filter(is_latest=True) From 34969c40c833b9eed98f42fc358235c56314f9cd Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Wed, 15 Apr 2026 17:31:56 +0530 Subject: [PATCH 542/545] make package_url field unique for PackageV2 Signed-off-by: Tushar Goel --- CHANGELOG.rst | 5 ++ .../migrations/0122_auto_20260415_1155.py | 36 +++++++++++ ...ns_alter_packagev2_package_url_and_more.py | 60 +++++++++++++++++++ vulnerabilities/models.py | 19 ++++++ 4 files changed, 120 insertions(+) create mode 100644 vulnerabilities/migrations/0122_auto_20260415_1155.py create mode 100644 vulnerabilities/migrations/0123_alter_packagev2_options_alter_packagev2_package_url_and_more.py diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 85d04ed9c..eef4ddfee 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,6 +1,11 @@ Release notes ============= +Version v38.5.0 +--------------------- + +- fix: Make package_url field unique for PackageV2 + Version v38.4.0 --------------------- diff --git a/vulnerabilities/migrations/0122_auto_20260415_1155.py b/vulnerabilities/migrations/0122_auto_20260415_1155.py new file mode 100644 index 000000000..0f9463302 --- /dev/null +++ b/vulnerabilities/migrations/0122_auto_20260415_1155.py @@ -0,0 +1,36 @@ +from django.db import migrations +from django.db.models import F, Window +from django.db.models.functions import RowNumber + + +def remove_duplicate_package_urls(apps, schema_editor): + PackageV2 = apps.get_model("vulnerabilities", "PackageV2") + + duplicates = ( + PackageV2.objects + .annotate( + rn=Window( + expression=RowNumber(), + partition_by=[F("package_url")], + order_by=F("id").desc(), + ) + ) + .filter(rn__gt=1) + ) + + BATCH_SIZE = 1000 + ids = list(duplicates.values_list("id", flat=True)) + + for i in range(0, len(ids), BATCH_SIZE): + PackageV2.objects.filter(id__in=ids[i:i+BATCH_SIZE]).delete() + + +class Migration(migrations.Migration): + + dependencies = [ + ("vulnerabilities", "0121_advisoryv2_is_latest_alter_advisoryv2_advisory_id_and_more"), + ] + + operations = [ + migrations.RunPython(remove_duplicate_package_urls, migrations.RunPython.noop), + ] \ No newline at end of file diff --git a/vulnerabilities/migrations/0123_alter_packagev2_options_alter_packagev2_package_url_and_more.py b/vulnerabilities/migrations/0123_alter_packagev2_options_alter_packagev2_package_url_and_more.py new file mode 100644 index 000000000..6183a363e --- /dev/null +++ b/vulnerabilities/migrations/0123_alter_packagev2_options_alter_packagev2_package_url_and_more.py @@ -0,0 +1,60 @@ +# Generated by Django 5.2.11 on 2026-04-15 11:59 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("vulnerabilities", "0122_auto_20260415_1155"), + ] + + operations = [ + migrations.AlterModelOptions( + name="packagev2", + options={ + "ordering": [ + "type", + "namespace", + "name", + "version_rank", + "version", + "qualifiers", + "subpath", + ] + }, + ), + migrations.AlterField( + model_name="packagev2", + name="package_url", + field=models.CharField( + db_index=True, + help_text="The Package URL for this package.", + max_length=1000, + unique=True, + ), + ), + migrations.AlterUniqueTogether( + name="packagev2", + unique_together={("type", "namespace", "name", "version", "qualifiers", "subpath")}, + ), + migrations.AddIndex( + model_name="packagev2", + index=models.Index( + fields=["type", "namespace", "name"], name="vulnerabili_type_ca0efc_idx" + ), + ), + migrations.AddIndex( + model_name="packagev2", + index=models.Index( + fields=["type", "namespace", "name", "qualifiers", "subpath"], + name="vulnerabili_type_c98c98_idx", + ), + ), + migrations.AddIndex( + model_name="packagev2", + index=models.Index( + fields=["type", "namespace", "name", "version"], name="vulnerabili_type_1af1cc_idx" + ), + ), + ] diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index 11f4ad61e..8a91be454 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -3490,6 +3490,7 @@ class PackageV2(PackageURLMixin): null=False, help_text="The Package URL for this package.", db_index=True, + unique=True ) plain_package_url = models.CharField( @@ -3520,6 +3521,24 @@ class PackageV2(PackageURLMixin): db_index=True, ) + class Meta: + unique_together = ["type", "namespace", "name", "version", "qualifiers", "subpath"] + ordering = ["type", "namespace", "name", "version_rank", "version", "qualifiers", "subpath"] + indexes = [ + # Index for getting al versions of a package + models.Index(fields=["type", "namespace", "name"]), + models.Index(fields=["type", "namespace", "name", "qualifiers", "subpath"]), + # Index for getting a specific version of a package + models.Index( + fields=[ + "type", + "namespace", + "name", + "version", + ] + ), + ] + def __str__(self): return self.package_url From ed06995e656b26a42e4e0c6856e490e95b3fa0fe Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Wed, 15 Apr 2026 19:36:23 +0530 Subject: [PATCH 543/545] Fix errors Signed-off-by: Tushar Goel --- vulnerabilities/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index 8a91be454..c874db4e4 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -3490,7 +3490,7 @@ class PackageV2(PackageURLMixin): null=False, help_text="The Package URL for this package.", db_index=True, - unique=True + unique=True, ) plain_package_url = models.CharField( From 660375a893dbd83bf54a6659dbf1755f0d02e2eb Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Fri, 24 Apr 2026 17:38:40 +0530 Subject: [PATCH 544/545] Fix models Signed-off-by: Tushar Goel --- vulnerabilities/models.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index c874db4e4..4efc04766 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -3477,7 +3477,8 @@ def from_purl(self, purl: Union[PackageURL, str]): """ Return a new Package given a ``purl`` PackageURL object or PURL string. """ - return PackageV2.objects.create(**purl_to_dict(purl=purl)) + package, _ = PackageV2.objects.get_or_create(**purl_to_dict(purl=purl)) + return package class PackageV2(PackageURLMixin): From 0e24e73cf2a9abd52e7d31ebd6e456a1dae3f348 Mon Sep 17 00:00:00 2001 From: Tushar Goel Date: Fri, 24 Apr 2026 17:46:38 +0530 Subject: [PATCH 545/545] Bump version Signed-off-by: Tushar Goel --- setup.cfg | 2 +- vulnerablecode/__init__.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.cfg b/setup.cfg index e1275dae2..71d62d573 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = vulnerablecode -version = 38.4.0 +version = 38.5.0 license = Apache-2.0 AND CC-BY-SA-4.0 # description must be on ONE line https://github.com/pypa/setuptools/issues/1390 diff --git a/vulnerablecode/__init__.py b/vulnerablecode/__init__.py index f8263d4c5..15d335f6a 100644 --- a/vulnerablecode/__init__.py +++ b/vulnerablecode/__init__.py @@ -14,7 +14,7 @@ import git -__version__ = "38.4.0" +__version__ = "38.5.0" PROJECT_DIR = Path(__file__).resolve().parent