From 949c18c17d2c99c2971cad613c2b7fe9981a2940 Mon Sep 17 00:00:00 2001 From: ziadhany Date: Tue, 20 Aug 2024 15:19:18 +0300 Subject: [PATCH 01/16] Migrate ( metasploit, exploit-db, kev ) to aboutcode pipeline. Set data_source as the header for the exploit table. Squash the migration files into a single file. Add test for exploit-db , metasploit Add a missing migration file Rename resources_and_notes to notes Fix Api test Refactor metasploit , exploitdb , kev improver Rename Kev tab to exploit tab Add support for exploitdb , metasploit, kev Signed-off-by: ziadhany Signed-off-by: ziad hany --- .../migrations/0063_exploit_delete_kev.py | 131 ++++++++++++++++++ vulnerabilities/pipelines/exploitdb.py | 95 +++++++++++++ vulnerabilities/pipelines/metasploit.py | 78 +++++++++++ .../pipelines/vulnerability_kev.py | 69 +++++++++ 4 files changed, 373 insertions(+) create mode 100644 vulnerabilities/migrations/0063_exploit_delete_kev.py create mode 100644 vulnerabilities/pipelines/exploitdb.py create mode 100644 vulnerabilities/pipelines/metasploit.py create mode 100644 vulnerabilities/pipelines/vulnerability_kev.py diff --git a/vulnerabilities/migrations/0063_exploit_delete_kev.py b/vulnerabilities/migrations/0063_exploit_delete_kev.py new file mode 100644 index 000000000..00d2d60fe --- /dev/null +++ b/vulnerabilities/migrations/0063_exploit_delete_kev.py @@ -0,0 +1,131 @@ +# Generated by Django 4.1.13 on 2024-09-10 18:40 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ("vulnerabilities", "0062_package_is_ghost"), + ] + + operations = [ + migrations.CreateModel( + name="Exploit", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ( + "date_added", + models.DateField( + blank=True, + help_text="The date the vulnerability was added to an exploit catalog.", + null=True, + ), + ), + ( + "description", + models.TextField( + blank=True, + help_text="Description of the vulnerability in an exploit catalog, often a refinement of the original CVE description", + null=True, + ), + ), + ( + "required_action", + models.TextField( + blank=True, + help_text="The required action to address the vulnerability, typically to apply vendor updates or apply vendor mitigations or to discontinue use.", + null=True, + ), + ), + ( + "due_date", + models.DateField( + blank=True, + help_text="The date the required action is due, which applies to all USA federal civilian executive branch (FCEB) agencies, but all organizations are strongly encouraged to execute the required action", + null=True, + ), + ), + ( + "notes", + models.TextField( + blank=True, + help_text="Additional notes and resources about the vulnerability, often a URL to vendor instructions.", + null=True, + ), + ), + ( + "known_ransomware_campaign_use", + models.BooleanField( + default=False, + help_text="Known' if this vulnerability is known to have been leveraged as part of a ransomware campaign; \n or 'Unknown' if there is no confirmation that the vulnerability has been utilized for ransomware.", + ), + ), + ( + "source_date_published", + models.DateField( + blank=True, + help_text="The date that the exploit was published or disclosed.", + null=True, + ), + ), + ( + "exploit_type", + models.TextField( + blank=True, + help_text="The type of the exploit as provided by the original upstream data source.", + null=True, + ), + ), + ( + "platform", + models.TextField( + blank=True, + help_text="The platform associated with the exploit as provided by the original upstream data source.", + null=True, + ), + ), + ( + "source_date_updated", + models.DateField( + blank=True, + help_text="The date the exploit was updated in the original upstream data source.", + null=True, + ), + ), + ( + "data_source", + models.TextField( + blank=True, + help_text="The source of the exploit information, such as CISA KEV, exploitdb, metaspoit, or others.", + null=True, + ), + ), + ( + "source_url", + models.URLField( + blank=True, + help_text="The URL to the exploit as provided in the original upstream data source.", + null=True, + ), + ), + ( + "vulnerability", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="exploits", + to="vulnerabilities.vulnerability", + ), + ), + ], + ), + migrations.DeleteModel( + name="Kev", + ), + ] diff --git a/vulnerabilities/pipelines/exploitdb.py b/vulnerabilities/pipelines/exploitdb.py new file mode 100644 index 000000000..0c3bdc458 --- /dev/null +++ b/vulnerabilities/pipelines/exploitdb.py @@ -0,0 +1,95 @@ +import csv +import io +import logging + +import requests + +from vulnerabilities.models import Alias +from vulnerabilities.models import Exploit +from vulnerabilities.models import VulnerabilityReference +from vulnerabilities.models import VulnerabilityRelatedReference +from vulnerabilities.pipelines import VulnerableCodePipeline + +logger = logging.getLogger(__name__) + + +class ExploitDBImproverPipeline(VulnerableCodePipeline): + """ + ExploitDB Improver Pipeline: Fetch ExploitDB data, iterate over it to find the vulnerability with + the specified alias, and create or update the ref and ref-type accordingly. + """ + + exploit_data = None + + license_expression = "GPL-2.0" + + @classmethod + def steps(cls): + return ( + cls.fetch_exploits, + cls.add_exploit, + ) + + def fetch_exploits(self): + exploit_db_url = ( + "https://gitlab.com/exploit-database/exploitdb/-/raw/main/files_exploits.csv" + ) + response = requests.get(exploit_db_url) + self.exploit_data = io.StringIO(response.text) + + def add_exploit(self): + csvreader = csv.reader(self.exploit_data) + + header = next(csvreader) + for row in csvreader: + + aliases = row[11].split(";") + + for raw_alias in aliases: + + alias = Alias.objects.get_or_none(alias=raw_alias) + if not alias: + continue + + vul = alias.vulnerability + if not vul: + continue + + self.add_exploit_references(row[11], row[16], row[1], vul) + + Exploit.objects.update_or_create( + vulnerability=vul, + data_source="Exploit-DB", + defaults={ + "date_added": row[header.index("date_added")], + "description": row[header.index("description")], + "known_ransomware_campaign_use": row[header.index("verified")], + "source_date_published": row[header.index("date_published")], + "exploit_type": row[header.index("type")], + "platform": row[header.index("platform")], + "source_date_updated": row[header.index("date_updated")], + "source_url": row[header.index("source_url")], + }, + ) + + def add_exploit_references(self, ref_id, direct_url, path, vul): + url_map = { + "file_url": f"https://gitlab.com/exploit-database/exploitdb/-/blob/main/{path}", + "direct_url": direct_url, + } + + for key, url in url_map.items(): + if url: + ref, created = VulnerabilityReference.objects.update_or_create( + url=url, + defaults={ + "reference_id": ref_id, + "reference_type": VulnerabilityReference.EXPLOIT, + }, + ) + + if created: + VulnerabilityRelatedReference.objects.get_or_create( + vulnerability=vul, + reference=ref, + ) diff --git a/vulnerabilities/pipelines/metasploit.py b/vulnerabilities/pipelines/metasploit.py new file mode 100644 index 000000000..be1829ede --- /dev/null +++ b/vulnerabilities/pipelines/metasploit.py @@ -0,0 +1,78 @@ +import logging + +import requests +import saneyaml + +from vulnerabilities.models import Alias +from vulnerabilities.models import Exploit +from vulnerabilities.pipelines import VulnerableCodePipeline + +module_logger = logging.getLogger(__name__) + + +class MetasploitImproverPipeline(VulnerableCodePipeline): + """ + Metasploit Exploits Pipeline: Retrieve Metasploit data, iterate through it to identify vulnerabilities + by their associated aliases, and create or update the corresponding Exploit instances. + """ + + metasploit_data = {} + + @classmethod + def steps(cls): + return ( + cls.fetch_exploits, + cls.add_exploits, + ) + + def fetch_exploits(self): + url = "https://raw.githubusercontent.com/rapid7/metasploit-framework/master/db/modules_metadata_base.json" + response = requests.get(url) + if response.status_code != 200: + self.log(f"Failed to fetch the Metasploit Exploits: {url}") + return + self.metasploit_data = response.json() + + def add_exploits(self): + for _, record in self.metasploit_data.items(): + vul = None + for ref in record.get("references", []): + if ref.startswith("OSVDB") or ref.startswith("URL-"): + # ignore OSV-DB and reference exploit for metasploit + continue + + if not vul: + try: + alias = Alias.objects.get(alias=ref) + except Alias.DoesNotExist: + continue + + if not alias.vulnerability: + continue + + vul = alias.vulnerability + + if not vul: + continue + + description = record.get("description", "") + notes = record.get("notes", {}) + source_date_published = record.get("disclosure_date") + platform = record.get("platform") + + path = record.get("path") + source_url = ( + f"https://github.com/rapid7/metasploit-framework/tree/master{path}" if path else "" + ) + + Exploit.objects.update_or_create( + vulnerability=vul, + data_source="Metasploit", + defaults={ + "description": description, + "notes": saneyaml.dump(notes), + "source_date_published": source_date_published, + "platform": platform, + "source_url": source_url, + }, + ) diff --git a/vulnerabilities/pipelines/vulnerability_kev.py b/vulnerabilities/pipelines/vulnerability_kev.py new file mode 100644 index 000000000..255249472 --- /dev/null +++ b/vulnerabilities/pipelines/vulnerability_kev.py @@ -0,0 +1,69 @@ +import logging + +from sphinx.util import requests + +from vulnerabilities.models import Alias +from vulnerabilities.models import Exploit +from vulnerabilities.pipelines import VulnerableCodePipeline + +module_logger = logging.getLogger(__name__) + + +class VulnerabilityKevPipeline(VulnerableCodePipeline): + """ + Known Exploited Vulnerabilities Pipeline: Retrieve KEV data, iterate through it to identify vulnerabilities + by their associated aliases, and create or update the corresponding Exploit instances. + """ + + kev_data = {} + + @classmethod + def steps(cls): + return ( + cls.fetch_exploits, + cls.add_exploits, + ) + + def fetch_exploits(self): + kev_url = ( + "https://www.cisa.gov/sites/default/files/feeds/known_exploited_vulnerabilities.json" + ) + response = requests.get(kev_url) + if response.status_code != 200: + self.log( + f"Failed to fetch the CISA Catalog of Known Exploited Vulnerabilities: {kev_url}" + ) + return + self.kev_data = response.json() + + def add_exploits(self): + for kev_vul in self.kev_data.get("vulnerabilities", []): + cve_id = kev_vul.get("cveID") + + if not cve_id: + continue + + alias = Alias.objects.get_or_none(alias=cve_id) + + if not alias: + continue + + vul = alias.vulnerability + + if not vul: + continue + + Exploit.objects.update_or_create( + vulnerability=vul, + data_source="KEV", + defaults={ + "description": kev_vul["shortDescription"], + "date_added": kev_vul["dateAdded"], + "required_action": kev_vul["requiredAction"], + "due_date": kev_vul["dueDate"], + "notes": kev_vul["notes"], + "known_ransomware_campaign_use": True + if kev_vul["knownRansomwareCampaignUse"] == "Known" + else False, + }, + ) From 0dbe64086ac3ce668aefa436773069a791adf1f5 Mon Sep 17 00:00:00 2001 From: ziadhany Date: Tue, 17 Sep 2024 17:48:51 +0300 Subject: [PATCH 02/16] Implement the appropriate LoopProgress progress bar. Refactor the error handling logic in the code. Signed-off-by: ziadhany Signed-off-by: ziad hany --- vulnerabilities/pipelines/exploitdb.py | 95 ------------------- vulnerabilities/pipelines/metasploit.py | 78 --------------- .../pipelines/vulnerability_kev.py | 69 -------------- 3 files changed, 242 deletions(-) delete mode 100644 vulnerabilities/pipelines/exploitdb.py delete mode 100644 vulnerabilities/pipelines/metasploit.py delete mode 100644 vulnerabilities/pipelines/vulnerability_kev.py diff --git a/vulnerabilities/pipelines/exploitdb.py b/vulnerabilities/pipelines/exploitdb.py deleted file mode 100644 index 0c3bdc458..000000000 --- a/vulnerabilities/pipelines/exploitdb.py +++ /dev/null @@ -1,95 +0,0 @@ -import csv -import io -import logging - -import requests - -from vulnerabilities.models import Alias -from vulnerabilities.models import Exploit -from vulnerabilities.models import VulnerabilityReference -from vulnerabilities.models import VulnerabilityRelatedReference -from vulnerabilities.pipelines import VulnerableCodePipeline - -logger = logging.getLogger(__name__) - - -class ExploitDBImproverPipeline(VulnerableCodePipeline): - """ - ExploitDB Improver Pipeline: Fetch ExploitDB data, iterate over it to find the vulnerability with - the specified alias, and create or update the ref and ref-type accordingly. - """ - - exploit_data = None - - license_expression = "GPL-2.0" - - @classmethod - def steps(cls): - return ( - cls.fetch_exploits, - cls.add_exploit, - ) - - def fetch_exploits(self): - exploit_db_url = ( - "https://gitlab.com/exploit-database/exploitdb/-/raw/main/files_exploits.csv" - ) - response = requests.get(exploit_db_url) - self.exploit_data = io.StringIO(response.text) - - def add_exploit(self): - csvreader = csv.reader(self.exploit_data) - - header = next(csvreader) - for row in csvreader: - - aliases = row[11].split(";") - - for raw_alias in aliases: - - alias = Alias.objects.get_or_none(alias=raw_alias) - if not alias: - continue - - vul = alias.vulnerability - if not vul: - continue - - self.add_exploit_references(row[11], row[16], row[1], vul) - - Exploit.objects.update_or_create( - vulnerability=vul, - data_source="Exploit-DB", - defaults={ - "date_added": row[header.index("date_added")], - "description": row[header.index("description")], - "known_ransomware_campaign_use": row[header.index("verified")], - "source_date_published": row[header.index("date_published")], - "exploit_type": row[header.index("type")], - "platform": row[header.index("platform")], - "source_date_updated": row[header.index("date_updated")], - "source_url": row[header.index("source_url")], - }, - ) - - def add_exploit_references(self, ref_id, direct_url, path, vul): - url_map = { - "file_url": f"https://gitlab.com/exploit-database/exploitdb/-/blob/main/{path}", - "direct_url": direct_url, - } - - for key, url in url_map.items(): - if url: - ref, created = VulnerabilityReference.objects.update_or_create( - url=url, - defaults={ - "reference_id": ref_id, - "reference_type": VulnerabilityReference.EXPLOIT, - }, - ) - - if created: - VulnerabilityRelatedReference.objects.get_or_create( - vulnerability=vul, - reference=ref, - ) diff --git a/vulnerabilities/pipelines/metasploit.py b/vulnerabilities/pipelines/metasploit.py deleted file mode 100644 index be1829ede..000000000 --- a/vulnerabilities/pipelines/metasploit.py +++ /dev/null @@ -1,78 +0,0 @@ -import logging - -import requests -import saneyaml - -from vulnerabilities.models import Alias -from vulnerabilities.models import Exploit -from vulnerabilities.pipelines import VulnerableCodePipeline - -module_logger = logging.getLogger(__name__) - - -class MetasploitImproverPipeline(VulnerableCodePipeline): - """ - Metasploit Exploits Pipeline: Retrieve Metasploit data, iterate through it to identify vulnerabilities - by their associated aliases, and create or update the corresponding Exploit instances. - """ - - metasploit_data = {} - - @classmethod - def steps(cls): - return ( - cls.fetch_exploits, - cls.add_exploits, - ) - - def fetch_exploits(self): - url = "https://raw.githubusercontent.com/rapid7/metasploit-framework/master/db/modules_metadata_base.json" - response = requests.get(url) - if response.status_code != 200: - self.log(f"Failed to fetch the Metasploit Exploits: {url}") - return - self.metasploit_data = response.json() - - def add_exploits(self): - for _, record in self.metasploit_data.items(): - vul = None - for ref in record.get("references", []): - if ref.startswith("OSVDB") or ref.startswith("URL-"): - # ignore OSV-DB and reference exploit for metasploit - continue - - if not vul: - try: - alias = Alias.objects.get(alias=ref) - except Alias.DoesNotExist: - continue - - if not alias.vulnerability: - continue - - vul = alias.vulnerability - - if not vul: - continue - - description = record.get("description", "") - notes = record.get("notes", {}) - source_date_published = record.get("disclosure_date") - platform = record.get("platform") - - path = record.get("path") - source_url = ( - f"https://github.com/rapid7/metasploit-framework/tree/master{path}" if path else "" - ) - - Exploit.objects.update_or_create( - vulnerability=vul, - data_source="Metasploit", - defaults={ - "description": description, - "notes": saneyaml.dump(notes), - "source_date_published": source_date_published, - "platform": platform, - "source_url": source_url, - }, - ) diff --git a/vulnerabilities/pipelines/vulnerability_kev.py b/vulnerabilities/pipelines/vulnerability_kev.py deleted file mode 100644 index 255249472..000000000 --- a/vulnerabilities/pipelines/vulnerability_kev.py +++ /dev/null @@ -1,69 +0,0 @@ -import logging - -from sphinx.util import requests - -from vulnerabilities.models import Alias -from vulnerabilities.models import Exploit -from vulnerabilities.pipelines import VulnerableCodePipeline - -module_logger = logging.getLogger(__name__) - - -class VulnerabilityKevPipeline(VulnerableCodePipeline): - """ - Known Exploited Vulnerabilities Pipeline: Retrieve KEV data, iterate through it to identify vulnerabilities - by their associated aliases, and create or update the corresponding Exploit instances. - """ - - kev_data = {} - - @classmethod - def steps(cls): - return ( - cls.fetch_exploits, - cls.add_exploits, - ) - - def fetch_exploits(self): - kev_url = ( - "https://www.cisa.gov/sites/default/files/feeds/known_exploited_vulnerabilities.json" - ) - response = requests.get(kev_url) - if response.status_code != 200: - self.log( - f"Failed to fetch the CISA Catalog of Known Exploited Vulnerabilities: {kev_url}" - ) - return - self.kev_data = response.json() - - def add_exploits(self): - for kev_vul in self.kev_data.get("vulnerabilities", []): - cve_id = kev_vul.get("cveID") - - if not cve_id: - continue - - alias = Alias.objects.get_or_none(alias=cve_id) - - if not alias: - continue - - vul = alias.vulnerability - - if not vul: - continue - - Exploit.objects.update_or_create( - vulnerability=vul, - data_source="KEV", - defaults={ - "description": kev_vul["shortDescription"], - "date_added": kev_vul["dateAdded"], - "required_action": kev_vul["requiredAction"], - "due_date": kev_vul["dueDate"], - "notes": kev_vul["notes"], - "known_ransomware_campaign_use": True - if kev_vul["knownRansomwareCampaignUse"] == "Known" - else False, - }, - ) From f5131b71eb9f3d65d6ca93ba0f5f8a158676c805 Mon Sep 17 00:00:00 2001 From: ziadhany Date: Sat, 21 Sep 2024 19:00:21 +0300 Subject: [PATCH 03/16] Fix migration conflict Add pipeline_id for ( kev, metasploit, exploit-db ) Signed-off-by: ziadhany Signed-off-by: ziad hany --- ...{0063_exploit_delete_kev.py => 0065_exploit_delete_kev.py} | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) rename vulnerabilities/migrations/{0063_exploit_delete_kev.py => 0065_exploit_delete_kev.py} (97%) diff --git a/vulnerabilities/migrations/0063_exploit_delete_kev.py b/vulnerabilities/migrations/0065_exploit_delete_kev.py similarity index 97% rename from vulnerabilities/migrations/0063_exploit_delete_kev.py rename to vulnerabilities/migrations/0065_exploit_delete_kev.py index 00d2d60fe..28b14d2c9 100644 --- a/vulnerabilities/migrations/0063_exploit_delete_kev.py +++ b/vulnerabilities/migrations/0065_exploit_delete_kev.py @@ -1,4 +1,4 @@ -# Generated by Django 4.1.13 on 2024-09-10 18:40 +# Generated by Django 4.2.15 on 2024-09-21 15:37 from django.db import migrations, models import django.db.models.deletion @@ -7,7 +7,7 @@ class Migration(migrations.Migration): dependencies = [ - ("vulnerabilities", "0062_package_is_ghost"), + ("vulnerabilities", "0064_update_npm_pypa_advisory_created_by"), ] operations = [ From 8394d1db42373f8d4aaa5b07197a6b12be5465b4 Mon Sep 17 00:00:00 2001 From: ziadhany Date: Tue, 20 Aug 2024 15:19:18 +0300 Subject: [PATCH 04/16] Migrate ( metasploit, exploit-db, kev ) to aboutcode pipeline. Set data_source as the header for the exploit table. Squash the migration files into a single file. Add test for exploit-db , metasploit Add a missing migration file Rename resources_and_notes to notes Fix Api test Refactor metasploit , exploitdb , kev improver Rename Kev tab to exploit tab Add support for exploitdb , metasploit, kev Signed-off-by: ziadhany Signed-off-by: ziad hany --- .../migrations/0063_exploit_delete_kev.py | 131 ++++++++++++++++++ vulnerabilities/pipelines/exploitdb.py | 95 +++++++++++++ vulnerabilities/pipelines/metasploit.py | 78 +++++++++++ .../pipelines/vulnerability_kev.py | 69 +++++++++ 4 files changed, 373 insertions(+) create mode 100644 vulnerabilities/migrations/0063_exploit_delete_kev.py create mode 100644 vulnerabilities/pipelines/exploitdb.py create mode 100644 vulnerabilities/pipelines/metasploit.py create mode 100644 vulnerabilities/pipelines/vulnerability_kev.py diff --git a/vulnerabilities/migrations/0063_exploit_delete_kev.py b/vulnerabilities/migrations/0063_exploit_delete_kev.py new file mode 100644 index 000000000..00d2d60fe --- /dev/null +++ b/vulnerabilities/migrations/0063_exploit_delete_kev.py @@ -0,0 +1,131 @@ +# Generated by Django 4.1.13 on 2024-09-10 18:40 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ("vulnerabilities", "0062_package_is_ghost"), + ] + + operations = [ + migrations.CreateModel( + name="Exploit", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ( + "date_added", + models.DateField( + blank=True, + help_text="The date the vulnerability was added to an exploit catalog.", + null=True, + ), + ), + ( + "description", + models.TextField( + blank=True, + help_text="Description of the vulnerability in an exploit catalog, often a refinement of the original CVE description", + null=True, + ), + ), + ( + "required_action", + models.TextField( + blank=True, + help_text="The required action to address the vulnerability, typically to apply vendor updates or apply vendor mitigations or to discontinue use.", + null=True, + ), + ), + ( + "due_date", + models.DateField( + blank=True, + help_text="The date the required action is due, which applies to all USA federal civilian executive branch (FCEB) agencies, but all organizations are strongly encouraged to execute the required action", + null=True, + ), + ), + ( + "notes", + models.TextField( + blank=True, + help_text="Additional notes and resources about the vulnerability, often a URL to vendor instructions.", + null=True, + ), + ), + ( + "known_ransomware_campaign_use", + models.BooleanField( + default=False, + help_text="Known' if this vulnerability is known to have been leveraged as part of a ransomware campaign; \n or 'Unknown' if there is no confirmation that the vulnerability has been utilized for ransomware.", + ), + ), + ( + "source_date_published", + models.DateField( + blank=True, + help_text="The date that the exploit was published or disclosed.", + null=True, + ), + ), + ( + "exploit_type", + models.TextField( + blank=True, + help_text="The type of the exploit as provided by the original upstream data source.", + null=True, + ), + ), + ( + "platform", + models.TextField( + blank=True, + help_text="The platform associated with the exploit as provided by the original upstream data source.", + null=True, + ), + ), + ( + "source_date_updated", + models.DateField( + blank=True, + help_text="The date the exploit was updated in the original upstream data source.", + null=True, + ), + ), + ( + "data_source", + models.TextField( + blank=True, + help_text="The source of the exploit information, such as CISA KEV, exploitdb, metaspoit, or others.", + null=True, + ), + ), + ( + "source_url", + models.URLField( + blank=True, + help_text="The URL to the exploit as provided in the original upstream data source.", + null=True, + ), + ), + ( + "vulnerability", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="exploits", + to="vulnerabilities.vulnerability", + ), + ), + ], + ), + migrations.DeleteModel( + name="Kev", + ), + ] diff --git a/vulnerabilities/pipelines/exploitdb.py b/vulnerabilities/pipelines/exploitdb.py new file mode 100644 index 000000000..0c3bdc458 --- /dev/null +++ b/vulnerabilities/pipelines/exploitdb.py @@ -0,0 +1,95 @@ +import csv +import io +import logging + +import requests + +from vulnerabilities.models import Alias +from vulnerabilities.models import Exploit +from vulnerabilities.models import VulnerabilityReference +from vulnerabilities.models import VulnerabilityRelatedReference +from vulnerabilities.pipelines import VulnerableCodePipeline + +logger = logging.getLogger(__name__) + + +class ExploitDBImproverPipeline(VulnerableCodePipeline): + """ + ExploitDB Improver Pipeline: Fetch ExploitDB data, iterate over it to find the vulnerability with + the specified alias, and create or update the ref and ref-type accordingly. + """ + + exploit_data = None + + license_expression = "GPL-2.0" + + @classmethod + def steps(cls): + return ( + cls.fetch_exploits, + cls.add_exploit, + ) + + def fetch_exploits(self): + exploit_db_url = ( + "https://gitlab.com/exploit-database/exploitdb/-/raw/main/files_exploits.csv" + ) + response = requests.get(exploit_db_url) + self.exploit_data = io.StringIO(response.text) + + def add_exploit(self): + csvreader = csv.reader(self.exploit_data) + + header = next(csvreader) + for row in csvreader: + + aliases = row[11].split(";") + + for raw_alias in aliases: + + alias = Alias.objects.get_or_none(alias=raw_alias) + if not alias: + continue + + vul = alias.vulnerability + if not vul: + continue + + self.add_exploit_references(row[11], row[16], row[1], vul) + + Exploit.objects.update_or_create( + vulnerability=vul, + data_source="Exploit-DB", + defaults={ + "date_added": row[header.index("date_added")], + "description": row[header.index("description")], + "known_ransomware_campaign_use": row[header.index("verified")], + "source_date_published": row[header.index("date_published")], + "exploit_type": row[header.index("type")], + "platform": row[header.index("platform")], + "source_date_updated": row[header.index("date_updated")], + "source_url": row[header.index("source_url")], + }, + ) + + def add_exploit_references(self, ref_id, direct_url, path, vul): + url_map = { + "file_url": f"https://gitlab.com/exploit-database/exploitdb/-/blob/main/{path}", + "direct_url": direct_url, + } + + for key, url in url_map.items(): + if url: + ref, created = VulnerabilityReference.objects.update_or_create( + url=url, + defaults={ + "reference_id": ref_id, + "reference_type": VulnerabilityReference.EXPLOIT, + }, + ) + + if created: + VulnerabilityRelatedReference.objects.get_or_create( + vulnerability=vul, + reference=ref, + ) diff --git a/vulnerabilities/pipelines/metasploit.py b/vulnerabilities/pipelines/metasploit.py new file mode 100644 index 000000000..be1829ede --- /dev/null +++ b/vulnerabilities/pipelines/metasploit.py @@ -0,0 +1,78 @@ +import logging + +import requests +import saneyaml + +from vulnerabilities.models import Alias +from vulnerabilities.models import Exploit +from vulnerabilities.pipelines import VulnerableCodePipeline + +module_logger = logging.getLogger(__name__) + + +class MetasploitImproverPipeline(VulnerableCodePipeline): + """ + Metasploit Exploits Pipeline: Retrieve Metasploit data, iterate through it to identify vulnerabilities + by their associated aliases, and create or update the corresponding Exploit instances. + """ + + metasploit_data = {} + + @classmethod + def steps(cls): + return ( + cls.fetch_exploits, + cls.add_exploits, + ) + + def fetch_exploits(self): + url = "https://raw.githubusercontent.com/rapid7/metasploit-framework/master/db/modules_metadata_base.json" + response = requests.get(url) + if response.status_code != 200: + self.log(f"Failed to fetch the Metasploit Exploits: {url}") + return + self.metasploit_data = response.json() + + def add_exploits(self): + for _, record in self.metasploit_data.items(): + vul = None + for ref in record.get("references", []): + if ref.startswith("OSVDB") or ref.startswith("URL-"): + # ignore OSV-DB and reference exploit for metasploit + continue + + if not vul: + try: + alias = Alias.objects.get(alias=ref) + except Alias.DoesNotExist: + continue + + if not alias.vulnerability: + continue + + vul = alias.vulnerability + + if not vul: + continue + + description = record.get("description", "") + notes = record.get("notes", {}) + source_date_published = record.get("disclosure_date") + platform = record.get("platform") + + path = record.get("path") + source_url = ( + f"https://github.com/rapid7/metasploit-framework/tree/master{path}" if path else "" + ) + + Exploit.objects.update_or_create( + vulnerability=vul, + data_source="Metasploit", + defaults={ + "description": description, + "notes": saneyaml.dump(notes), + "source_date_published": source_date_published, + "platform": platform, + "source_url": source_url, + }, + ) diff --git a/vulnerabilities/pipelines/vulnerability_kev.py b/vulnerabilities/pipelines/vulnerability_kev.py new file mode 100644 index 000000000..255249472 --- /dev/null +++ b/vulnerabilities/pipelines/vulnerability_kev.py @@ -0,0 +1,69 @@ +import logging + +from sphinx.util import requests + +from vulnerabilities.models import Alias +from vulnerabilities.models import Exploit +from vulnerabilities.pipelines import VulnerableCodePipeline + +module_logger = logging.getLogger(__name__) + + +class VulnerabilityKevPipeline(VulnerableCodePipeline): + """ + Known Exploited Vulnerabilities Pipeline: Retrieve KEV data, iterate through it to identify vulnerabilities + by their associated aliases, and create or update the corresponding Exploit instances. + """ + + kev_data = {} + + @classmethod + def steps(cls): + return ( + cls.fetch_exploits, + cls.add_exploits, + ) + + def fetch_exploits(self): + kev_url = ( + "https://www.cisa.gov/sites/default/files/feeds/known_exploited_vulnerabilities.json" + ) + response = requests.get(kev_url) + if response.status_code != 200: + self.log( + f"Failed to fetch the CISA Catalog of Known Exploited Vulnerabilities: {kev_url}" + ) + return + self.kev_data = response.json() + + def add_exploits(self): + for kev_vul in self.kev_data.get("vulnerabilities", []): + cve_id = kev_vul.get("cveID") + + if not cve_id: + continue + + alias = Alias.objects.get_or_none(alias=cve_id) + + if not alias: + continue + + vul = alias.vulnerability + + if not vul: + continue + + Exploit.objects.update_or_create( + vulnerability=vul, + data_source="KEV", + defaults={ + "description": kev_vul["shortDescription"], + "date_added": kev_vul["dateAdded"], + "required_action": kev_vul["requiredAction"], + "due_date": kev_vul["dueDate"], + "notes": kev_vul["notes"], + "known_ransomware_campaign_use": True + if kev_vul["knownRansomwareCampaignUse"] == "Known" + else False, + }, + ) From 4b808d9f9efe9eac5b5a2bfb87ee310cffb5fd15 Mon Sep 17 00:00:00 2001 From: ziadhany Date: Tue, 17 Sep 2024 17:48:51 +0300 Subject: [PATCH 05/16] Implement the appropriate LoopProgress progress bar. Refactor the error handling logic in the code. Signed-off-by: ziadhany Signed-off-by: ziad hany --- vulnerabilities/pipelines/exploitdb.py | 95 ------------------- vulnerabilities/pipelines/metasploit.py | 78 --------------- .../pipelines/vulnerability_kev.py | 69 -------------- 3 files changed, 242 deletions(-) delete mode 100644 vulnerabilities/pipelines/exploitdb.py delete mode 100644 vulnerabilities/pipelines/metasploit.py delete mode 100644 vulnerabilities/pipelines/vulnerability_kev.py diff --git a/vulnerabilities/pipelines/exploitdb.py b/vulnerabilities/pipelines/exploitdb.py deleted file mode 100644 index 0c3bdc458..000000000 --- a/vulnerabilities/pipelines/exploitdb.py +++ /dev/null @@ -1,95 +0,0 @@ -import csv -import io -import logging - -import requests - -from vulnerabilities.models import Alias -from vulnerabilities.models import Exploit -from vulnerabilities.models import VulnerabilityReference -from vulnerabilities.models import VulnerabilityRelatedReference -from vulnerabilities.pipelines import VulnerableCodePipeline - -logger = logging.getLogger(__name__) - - -class ExploitDBImproverPipeline(VulnerableCodePipeline): - """ - ExploitDB Improver Pipeline: Fetch ExploitDB data, iterate over it to find the vulnerability with - the specified alias, and create or update the ref and ref-type accordingly. - """ - - exploit_data = None - - license_expression = "GPL-2.0" - - @classmethod - def steps(cls): - return ( - cls.fetch_exploits, - cls.add_exploit, - ) - - def fetch_exploits(self): - exploit_db_url = ( - "https://gitlab.com/exploit-database/exploitdb/-/raw/main/files_exploits.csv" - ) - response = requests.get(exploit_db_url) - self.exploit_data = io.StringIO(response.text) - - def add_exploit(self): - csvreader = csv.reader(self.exploit_data) - - header = next(csvreader) - for row in csvreader: - - aliases = row[11].split(";") - - for raw_alias in aliases: - - alias = Alias.objects.get_or_none(alias=raw_alias) - if not alias: - continue - - vul = alias.vulnerability - if not vul: - continue - - self.add_exploit_references(row[11], row[16], row[1], vul) - - Exploit.objects.update_or_create( - vulnerability=vul, - data_source="Exploit-DB", - defaults={ - "date_added": row[header.index("date_added")], - "description": row[header.index("description")], - "known_ransomware_campaign_use": row[header.index("verified")], - "source_date_published": row[header.index("date_published")], - "exploit_type": row[header.index("type")], - "platform": row[header.index("platform")], - "source_date_updated": row[header.index("date_updated")], - "source_url": row[header.index("source_url")], - }, - ) - - def add_exploit_references(self, ref_id, direct_url, path, vul): - url_map = { - "file_url": f"https://gitlab.com/exploit-database/exploitdb/-/blob/main/{path}", - "direct_url": direct_url, - } - - for key, url in url_map.items(): - if url: - ref, created = VulnerabilityReference.objects.update_or_create( - url=url, - defaults={ - "reference_id": ref_id, - "reference_type": VulnerabilityReference.EXPLOIT, - }, - ) - - if created: - VulnerabilityRelatedReference.objects.get_or_create( - vulnerability=vul, - reference=ref, - ) diff --git a/vulnerabilities/pipelines/metasploit.py b/vulnerabilities/pipelines/metasploit.py deleted file mode 100644 index be1829ede..000000000 --- a/vulnerabilities/pipelines/metasploit.py +++ /dev/null @@ -1,78 +0,0 @@ -import logging - -import requests -import saneyaml - -from vulnerabilities.models import Alias -from vulnerabilities.models import Exploit -from vulnerabilities.pipelines import VulnerableCodePipeline - -module_logger = logging.getLogger(__name__) - - -class MetasploitImproverPipeline(VulnerableCodePipeline): - """ - Metasploit Exploits Pipeline: Retrieve Metasploit data, iterate through it to identify vulnerabilities - by their associated aliases, and create or update the corresponding Exploit instances. - """ - - metasploit_data = {} - - @classmethod - def steps(cls): - return ( - cls.fetch_exploits, - cls.add_exploits, - ) - - def fetch_exploits(self): - url = "https://raw.githubusercontent.com/rapid7/metasploit-framework/master/db/modules_metadata_base.json" - response = requests.get(url) - if response.status_code != 200: - self.log(f"Failed to fetch the Metasploit Exploits: {url}") - return - self.metasploit_data = response.json() - - def add_exploits(self): - for _, record in self.metasploit_data.items(): - vul = None - for ref in record.get("references", []): - if ref.startswith("OSVDB") or ref.startswith("URL-"): - # ignore OSV-DB and reference exploit for metasploit - continue - - if not vul: - try: - alias = Alias.objects.get(alias=ref) - except Alias.DoesNotExist: - continue - - if not alias.vulnerability: - continue - - vul = alias.vulnerability - - if not vul: - continue - - description = record.get("description", "") - notes = record.get("notes", {}) - source_date_published = record.get("disclosure_date") - platform = record.get("platform") - - path = record.get("path") - source_url = ( - f"https://github.com/rapid7/metasploit-framework/tree/master{path}" if path else "" - ) - - Exploit.objects.update_or_create( - vulnerability=vul, - data_source="Metasploit", - defaults={ - "description": description, - "notes": saneyaml.dump(notes), - "source_date_published": source_date_published, - "platform": platform, - "source_url": source_url, - }, - ) diff --git a/vulnerabilities/pipelines/vulnerability_kev.py b/vulnerabilities/pipelines/vulnerability_kev.py deleted file mode 100644 index 255249472..000000000 --- a/vulnerabilities/pipelines/vulnerability_kev.py +++ /dev/null @@ -1,69 +0,0 @@ -import logging - -from sphinx.util import requests - -from vulnerabilities.models import Alias -from vulnerabilities.models import Exploit -from vulnerabilities.pipelines import VulnerableCodePipeline - -module_logger = logging.getLogger(__name__) - - -class VulnerabilityKevPipeline(VulnerableCodePipeline): - """ - Known Exploited Vulnerabilities Pipeline: Retrieve KEV data, iterate through it to identify vulnerabilities - by their associated aliases, and create or update the corresponding Exploit instances. - """ - - kev_data = {} - - @classmethod - def steps(cls): - return ( - cls.fetch_exploits, - cls.add_exploits, - ) - - def fetch_exploits(self): - kev_url = ( - "https://www.cisa.gov/sites/default/files/feeds/known_exploited_vulnerabilities.json" - ) - response = requests.get(kev_url) - if response.status_code != 200: - self.log( - f"Failed to fetch the CISA Catalog of Known Exploited Vulnerabilities: {kev_url}" - ) - return - self.kev_data = response.json() - - def add_exploits(self): - for kev_vul in self.kev_data.get("vulnerabilities", []): - cve_id = kev_vul.get("cveID") - - if not cve_id: - continue - - alias = Alias.objects.get_or_none(alias=cve_id) - - if not alias: - continue - - vul = alias.vulnerability - - if not vul: - continue - - Exploit.objects.update_or_create( - vulnerability=vul, - data_source="KEV", - defaults={ - "description": kev_vul["shortDescription"], - "date_added": kev_vul["dateAdded"], - "required_action": kev_vul["requiredAction"], - "due_date": kev_vul["dueDate"], - "notes": kev_vul["notes"], - "known_ransomware_campaign_use": True - if kev_vul["knownRansomwareCampaignUse"] == "Known" - else False, - }, - ) From 6e67497fe25c4bbfbb89f964a2cb4384408621da Mon Sep 17 00:00:00 2001 From: ziadhany Date: Sat, 21 Sep 2024 19:07:12 +0300 Subject: [PATCH 06/16] Remove unwanted migration file Signed-off-by: ziadhany Signed-off-by: ziad hany --- .../migrations/0063_exploit_delete_kev.py | 131 ------------------ 1 file changed, 131 deletions(-) delete mode 100644 vulnerabilities/migrations/0063_exploit_delete_kev.py diff --git a/vulnerabilities/migrations/0063_exploit_delete_kev.py b/vulnerabilities/migrations/0063_exploit_delete_kev.py deleted file mode 100644 index 00d2d60fe..000000000 --- a/vulnerabilities/migrations/0063_exploit_delete_kev.py +++ /dev/null @@ -1,131 +0,0 @@ -# Generated by Django 4.1.13 on 2024-09-10 18:40 - -from django.db import migrations, models -import django.db.models.deletion - - -class Migration(migrations.Migration): - - dependencies = [ - ("vulnerabilities", "0062_package_is_ghost"), - ] - - operations = [ - migrations.CreateModel( - name="Exploit", - fields=[ - ( - "id", - models.AutoField( - auto_created=True, primary_key=True, serialize=False, verbose_name="ID" - ), - ), - ( - "date_added", - models.DateField( - blank=True, - help_text="The date the vulnerability was added to an exploit catalog.", - null=True, - ), - ), - ( - "description", - models.TextField( - blank=True, - help_text="Description of the vulnerability in an exploit catalog, often a refinement of the original CVE description", - null=True, - ), - ), - ( - "required_action", - models.TextField( - blank=True, - help_text="The required action to address the vulnerability, typically to apply vendor updates or apply vendor mitigations or to discontinue use.", - null=True, - ), - ), - ( - "due_date", - models.DateField( - blank=True, - help_text="The date the required action is due, which applies to all USA federal civilian executive branch (FCEB) agencies, but all organizations are strongly encouraged to execute the required action", - null=True, - ), - ), - ( - "notes", - models.TextField( - blank=True, - help_text="Additional notes and resources about the vulnerability, often a URL to vendor instructions.", - null=True, - ), - ), - ( - "known_ransomware_campaign_use", - models.BooleanField( - default=False, - help_text="Known' if this vulnerability is known to have been leveraged as part of a ransomware campaign; \n or 'Unknown' if there is no confirmation that the vulnerability has been utilized for ransomware.", - ), - ), - ( - "source_date_published", - models.DateField( - blank=True, - help_text="The date that the exploit was published or disclosed.", - null=True, - ), - ), - ( - "exploit_type", - models.TextField( - blank=True, - help_text="The type of the exploit as provided by the original upstream data source.", - null=True, - ), - ), - ( - "platform", - models.TextField( - blank=True, - help_text="The platform associated with the exploit as provided by the original upstream data source.", - null=True, - ), - ), - ( - "source_date_updated", - models.DateField( - blank=True, - help_text="The date the exploit was updated in the original upstream data source.", - null=True, - ), - ), - ( - "data_source", - models.TextField( - blank=True, - help_text="The source of the exploit information, such as CISA KEV, exploitdb, metaspoit, or others.", - null=True, - ), - ), - ( - "source_url", - models.URLField( - blank=True, - help_text="The URL to the exploit as provided in the original upstream data source.", - null=True, - ), - ), - ( - "vulnerability", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - related_name="exploits", - to="vulnerabilities.vulnerability", - ), - ), - ], - ), - migrations.DeleteModel( - name="Kev", - ), - ] From 2d9e5858396066feebf38f194cc1625f1f931a2e Mon Sep 17 00:00:00 2001 From: ziadhany Date: Tue, 17 Sep 2024 05:13:41 +0300 Subject: [PATCH 07/16] Add support for Calculating Risk in VulnerableCode Signed-off-by: ziadhany Signed-off-by: ziad hany --- vulnerabilities/risk.py | 125 +++++++++++++ .../templates/package_details.html | 10 ++ vulnerabilities/tests/test_risk.py | 164 ++++++++++++++++++ vulnerabilities/views.py | 2 + weight_config.json | 5 + 5 files changed, 306 insertions(+) create mode 100644 vulnerabilities/risk.py create mode 100644 vulnerabilities/tests/test_risk.py create mode 100644 weight_config.json diff --git a/vulnerabilities/risk.py b/vulnerabilities/risk.py new file mode 100644 index 000000000..5be6e4ebf --- /dev/null +++ b/vulnerabilities/risk.py @@ -0,0 +1,125 @@ +import os +import re + +from vulnerabilities.models import Exploit +from vulnerabilities.models import Package +from vulnerabilities.models import PackageRelatedVulnerability +from vulnerabilities.models import Vulnerability +from vulnerabilities.models import VulnerabilityReference +from vulnerabilities.severity_systems import EPSS +from vulnerabilities.utils import load_json + +BASE_DIR = os.path.dirname(os.path.abspath(__file__)) + + +def get_weighted_severity(severities): + """ + Weighted Severity is the maximum value obtained when each Severity is multiplied + by its associated Weight/10. + Example of Weighted Severity: max(7*(10/10), 8*(3/10), 6*(8/10)) = 7 + """ + weight_config_path = os.path.join(BASE_DIR, "..", "weight_config.json") + weight_config = load_json(weight_config_path) + + score_map = { + "low": 3, + "moderate": 6.9, + "medium": 6.9, + "high": 8.9, + "important": 8.9, + "critical": 10.0, + "urgent": 10.0, + } + + score_list = [] + for severity in severities: + weights = [ + value + for regex_key, value in weight_config.items() + if re.match(regex_key, severity.reference.url) + ] + + if not weights: + return 0 + + max_weight = float(max(weights)) / 10 + vul_score = severity.value + try: + vul_score = float(vul_score) + vul_score_value = vul_score * max_weight + except ValueError: + vul_score = vul_score.lower() + vul_score_value = score_map.get(vul_score, 0) * max_weight + + score_list.append(vul_score_value) + return max(score_list) if score_list else 0 + + +def get_exploitability_level(exploits, references, severities): + """ + Exploitability refers to the potential or + probability of a software package vulnerability being exploited by + malicious actors to compromise systems, applications, or networks. + It is determined automatically by discovery of exploits. + """ + # no exploit known ( default .5) + exploit_level = 0.5 + + if exploits: + # Automatable Exploit with PoC script published OR known exploits (KEV) in the wild OR known ransomware + exploit_level = 2 + + elif severities: + # high EPSS. + epss = severities.filter( + scoring_system=EPSS.identifier, + ) + epss = any(float(epss.value) > 0.8 for epss in epss) + if epss: + exploit_level = 2 + + elif references: + # PoC/Exploit script published + ref_exploits = references.filter( + reference_type=VulnerabilityReference.EXPLOIT, + ) + if ref_exploits: + exploit_level = 1 + + return exploit_level + + +def calculate_vulnerability_risk(vulnerability: Vulnerability): + """ + Risk may be expressed as a number ranging from 0 to 10. + Risk is calculated from weighted severity and exploitability values. + It is the maximum value of (the weighted severity multiplied by its exploitability) or 10 + + Risk = min(weighted severity * exploitability, 10) + """ + # TODO try to use a prefetch + references = vulnerability.references.all() + severities = vulnerability.severities.all() + exploits = Exploit.objects.filter(vulnerability=vulnerability) + + weighted_severity = get_weighted_severity(severities) + exploitability = get_exploitability_level(exploits, references, severities) + return min(weighted_severity * exploitability, 10) + + +def calculate_pkg_risk(package: Package): + """ + Calculate the risk for a package by iterating over all vulnerabilities that affects this package + and determining the associated risk. + """ + + result = [] + for pkg_related_vul in PackageRelatedVulnerability.objects.filter(package=package, fix=False): + if pkg_related_vul: + risk = calculate_vulnerability_risk(pkg_related_vul.vulnerability) + result.append(risk) + + if not result: + return + + return f"{max(result):.2f}" diff --git a/vulnerabilities/templates/package_details.html b/vulnerabilities/templates/package_details.html index 26de42fa8..85cb10cdd 100644 --- a/vulnerabilities/templates/package_details.html +++ b/vulnerabilities/templates/package_details.html @@ -112,6 +112,16 @@ {% endif %} + + + Risk + + + {% if risk %} + {{ risk }} + {% endif %} + + diff --git a/vulnerabilities/tests/test_risk.py b/vulnerabilities/tests/test_risk.py new file mode 100644 index 000000000..30e6e9807 --- /dev/null +++ b/vulnerabilities/tests/test_risk.py @@ -0,0 +1,164 @@ +import pytest + +from vulnerabilities.models import Exploit +from vulnerabilities.models import Vulnerability +from vulnerabilities.models import VulnerabilityReference +from vulnerabilities.models import VulnerabilityRelatedReference +from vulnerabilities.models import VulnerabilitySeverity +from vulnerabilities.models import Weakness +from vulnerabilities.risk import calculate_vulnerability_risk +from vulnerabilities.risk import get_exploitability_level +from vulnerabilities.risk import get_weighted_severity +from vulnerabilities.severity_systems import CVSSV3 +from vulnerabilities.severity_systems import EPSS +from vulnerabilities.severity_systems import GENERIC + + +@pytest.fixture +@pytest.mark.django_db +def vulnerability(): + vul = Vulnerability(vulnerability_id="VCID-Existing") + vul.save() + + reference1 = VulnerabilityReference.objects.create( + reference_id="", + url="https://nvd.nist.gov/vuln/detail/CVE-xxxx-xxx1", + ) + + VulnerabilitySeverity.objects.create( + reference=reference1, + scoring_system=CVSSV3.identifier, + scoring_elements="CVSS:3.0/AV:P/AC:H/PR:H/UI:R/S:C/C:H/I:H/A:N/E:H/RL:O/RC:R/CR:H/MAC:H/MC:L", + value="6.5", + ) + + VulnerabilitySeverity.objects.create( + reference=reference1, + scoring_system=GENERIC.identifier, + value="MODERATE", # 6.9 + ) + + VulnerabilityRelatedReference.objects.create(reference=reference1, vulnerability=vul) + + weaknesses = Weakness.objects.create(cwe_id=119) + vul.weaknesses.add(weaknesses) + return vul + + +@pytest.fixture +@pytest.mark.django_db +def exploit(): + vul = Vulnerability(vulnerability_id="VCID-Exploit") + vul.save() + return Exploit.objects.create(vulnerability=vul, description="exploit description") + + +@pytest.fixture +@pytest.mark.django_db +def vulnerability_with_exploit_ref(): + vul = Vulnerability(vulnerability_id="VCID-Exploit-Ref") + vul.save() + + reference_exploit = VulnerabilityReference.objects.create( + reference_id="", + reference_type=VulnerabilityReference.EXPLOIT, + url="https://nvd.nist.gov/vuln/detail/CVE-xxxx-xxxx2", + ) + + VulnerabilityRelatedReference.objects.create(reference=reference_exploit, vulnerability=vul) + return vul + + +@pytest.fixture +@pytest.mark.django_db +def high_epss_score(): + vul = Vulnerability(vulnerability_id="VCID-HIGH-EPSS") + vul.save() + + reference1 = VulnerabilityReference.objects.create( + reference_id="", + url="https://nvd.nist.gov/vuln/detail/CVE-xxxx-xxx3", + ) + + VulnerabilitySeverity.objects.create( + reference=reference1, + scoring_system=EPSS.identifier, + value=".9", + ) + + VulnerabilityRelatedReference.objects.create(reference=reference1, vulnerability=vul) + return vul.severities + + +@pytest.fixture +@pytest.mark.django_db +def low_epss_score(): + vul = Vulnerability(vulnerability_id="VCID-LOW-EPSS") + vul.save() + + reference1 = VulnerabilityReference.objects.create( + reference_id="", + url="https://nvd.nist.gov/vuln/detail/CVE-xxxx-xxx4", + ) + + VulnerabilitySeverity.objects.create( + reference=reference1, + scoring_system=EPSS.identifier, + value=".3", + ) + + VulnerabilityRelatedReference.objects.create(reference=reference1, vulnerability=vul) + return vul.severities + + +@pytest.mark.django_db +def test_exploitability_level( + exploit, + vulnerability_with_exploit_ref, + high_epss_score, + low_epss_score, + vulnerability, +): + + assert get_exploitability_level(exploit, None, None) == 2 + + assert get_exploitability_level(None, None, high_epss_score) == 2 + + assert get_exploitability_level(None, None, low_epss_score) == 0.5 + + assert ( + get_exploitability_level( + None, + vulnerability_with_exploit_ref.references, + vulnerability_with_exploit_ref.severities, + ) + == 1 + ) + + assert get_exploitability_level(None, None, None) == 0.5 + + +@pytest.mark.django_db +def test_get_weighted_severity(vulnerability): + severities = vulnerability.severities.all() + assert get_weighted_severity(severities) == 6.210000000000001 + + reference2 = VulnerabilityReference.objects.create( + reference_id="", + url="https://security-tracker.debian.org/tracker/CVE-2019-13057", + ) + + VulnerabilitySeverity.objects.create( + reference=reference2, + scoring_system=GENERIC.identifier, + value="CRITICAL", + ) + + VulnerabilityRelatedReference.objects.create(reference=reference2, vulnerability=vulnerability) + new_severities = vulnerability.severities.all() + assert get_weighted_severity(new_severities) == 9 + + +@pytest.mark.django_db +def test_calculate_vulnerability_risk(vulnerability): + assert calculate_vulnerability_risk(vulnerability) == 3.1050000000000004 diff --git a/vulnerabilities/views.py b/vulnerabilities/views.py index 394dc1c36..75b510cdc 100644 --- a/vulnerabilities/views.py +++ b/vulnerabilities/views.py @@ -31,6 +31,7 @@ from vulnerabilities.forms import PackageSearchForm from vulnerabilities.forms import VulnerabilitySearchForm from vulnerabilities.models import VulnerabilityStatusType +from vulnerabilities.risk import calculate_pkg_risk from vulnerabilities.severity_systems import EPSS from vulnerabilities.severity_systems import SCORING_SYSTEMS from vulnerabilities.utils import get_severity_range @@ -122,6 +123,7 @@ def get_context_data(self, **kwargs): context["fixing_vulnerabilities"] = package.fixing.order_by("vulnerability_id") context["package_search_form"] = PackageSearchForm(self.request.GET) context["fixed_package_details"] = package.fixed_package_details + context["risk"] = calculate_pkg_risk(package) context["history"] = list(package.history) return context diff --git a/weight_config.json b/weight_config.json new file mode 100644 index 000000000..7dd69c4ae --- /dev/null +++ b/weight_config.json @@ -0,0 +1,5 @@ +{ + "https://nvd\\.nist\\.gov/.*": 9, + "https:\\/\\/security-tracker\\.debian\\.org\\/.*": 9, + "^(?:http|ftp)s?://": 1 +} \ No newline at end of file From 22cdb76405a155a605064bd0ff2e20d714f96e97 Mon Sep 17 00:00:00 2001 From: ziadhany Date: Tue, 1 Oct 2024 03:16:09 +0300 Subject: [PATCH 08/16] Remove unwanted migration file Signed-off-by: ziadhany Signed-off-by: ziad hany --- .../migrations/0065_exploit_delete_kev.py | 131 ------------------ 1 file changed, 131 deletions(-) delete mode 100644 vulnerabilities/migrations/0065_exploit_delete_kev.py diff --git a/vulnerabilities/migrations/0065_exploit_delete_kev.py b/vulnerabilities/migrations/0065_exploit_delete_kev.py deleted file mode 100644 index 28b14d2c9..000000000 --- a/vulnerabilities/migrations/0065_exploit_delete_kev.py +++ /dev/null @@ -1,131 +0,0 @@ -# Generated by Django 4.2.15 on 2024-09-21 15:37 - -from django.db import migrations, models -import django.db.models.deletion - - -class Migration(migrations.Migration): - - dependencies = [ - ("vulnerabilities", "0064_update_npm_pypa_advisory_created_by"), - ] - - operations = [ - migrations.CreateModel( - name="Exploit", - fields=[ - ( - "id", - models.AutoField( - auto_created=True, primary_key=True, serialize=False, verbose_name="ID" - ), - ), - ( - "date_added", - models.DateField( - blank=True, - help_text="The date the vulnerability was added to an exploit catalog.", - null=True, - ), - ), - ( - "description", - models.TextField( - blank=True, - help_text="Description of the vulnerability in an exploit catalog, often a refinement of the original CVE description", - null=True, - ), - ), - ( - "required_action", - models.TextField( - blank=True, - help_text="The required action to address the vulnerability, typically to apply vendor updates or apply vendor mitigations or to discontinue use.", - null=True, - ), - ), - ( - "due_date", - models.DateField( - blank=True, - help_text="The date the required action is due, which applies to all USA federal civilian executive branch (FCEB) agencies, but all organizations are strongly encouraged to execute the required action", - null=True, - ), - ), - ( - "notes", - models.TextField( - blank=True, - help_text="Additional notes and resources about the vulnerability, often a URL to vendor instructions.", - null=True, - ), - ), - ( - "known_ransomware_campaign_use", - models.BooleanField( - default=False, - help_text="Known' if this vulnerability is known to have been leveraged as part of a ransomware campaign; \n or 'Unknown' if there is no confirmation that the vulnerability has been utilized for ransomware.", - ), - ), - ( - "source_date_published", - models.DateField( - blank=True, - help_text="The date that the exploit was published or disclosed.", - null=True, - ), - ), - ( - "exploit_type", - models.TextField( - blank=True, - help_text="The type of the exploit as provided by the original upstream data source.", - null=True, - ), - ), - ( - "platform", - models.TextField( - blank=True, - help_text="The platform associated with the exploit as provided by the original upstream data source.", - null=True, - ), - ), - ( - "source_date_updated", - models.DateField( - blank=True, - help_text="The date the exploit was updated in the original upstream data source.", - null=True, - ), - ), - ( - "data_source", - models.TextField( - blank=True, - help_text="The source of the exploit information, such as CISA KEV, exploitdb, metaspoit, or others.", - null=True, - ), - ), - ( - "source_url", - models.URLField( - blank=True, - help_text="The URL to the exploit as provided in the original upstream data source.", - null=True, - ), - ), - ( - "vulnerability", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - related_name="exploits", - to="vulnerabilities.vulnerability", - ), - ), - ], - ), - migrations.DeleteModel( - name="Kev", - ), - ] From 8beb3b9ee8c7cec21e7d8968217a50c360fe89b6 Mon Sep 17 00:00:00 2001 From: ziadhany Date: Tue, 1 Oct 2024 17:47:30 +0300 Subject: [PATCH 09/16] Add a prefetch to try to optimize query performance Signed-off-by: ziadhany Signed-off-by: ziad hany --- vulnerabilities/risk.py | 9 +++++---- vulnerabilities/views.py | 1 - 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/vulnerabilities/risk.py b/vulnerabilities/risk.py index 5be6e4ebf..c6a772d40 100644 --- a/vulnerabilities/risk.py +++ b/vulnerabilities/risk.py @@ -97,9 +97,8 @@ def calculate_vulnerability_risk(vulnerability: Vulnerability): Risk = min(weighted severity * exploitability, 10) """ - # TODO try to use a prefetch - references = vulnerability.references.all() - severities = vulnerability.severities.all() + references = vulnerability.references.select_related("url", "reference_type") + severities = vulnerability.severities.select_related("reference") exploits = Exploit.objects.filter(vulnerability=vulnerability) weighted_severity = get_weighted_severity(severities) @@ -114,7 +113,9 @@ def calculate_pkg_risk(package: Package): """ result = [] - for pkg_related_vul in PackageRelatedVulnerability.objects.filter(package=package, fix=False): + for pkg_related_vul in PackageRelatedVulnerability.objects.filter( + package=package, fix=False + ).prefetch_related("vulnerability"): if pkg_related_vul: risk = calculate_vulnerability_risk(pkg_related_vul.vulnerability) result.append(risk) diff --git a/vulnerabilities/views.py b/vulnerabilities/views.py index 75b510cdc..30217919f 100644 --- a/vulnerabilities/views.py +++ b/vulnerabilities/views.py @@ -124,7 +124,6 @@ def get_context_data(self, **kwargs): context["package_search_form"] = PackageSearchForm(self.request.GET) context["fixed_package_details"] = package.fixed_package_details context["risk"] = calculate_pkg_risk(package) - context["history"] = list(package.history) return context From 74a16ffed760064bb2251bc8eca5b98265291b78 Mon Sep 17 00:00:00 2001 From: ziadhany Date: Tue, 1 Oct 2024 18:58:26 +0300 Subject: [PATCH 10/16] Empty risk when there is no data Signed-off-by: ziadhany Signed-off-by: ziad hany --- vulnerabilities/risk.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/vulnerabilities/risk.py b/vulnerabilities/risk.py index c6a772d40..d68a2b3e1 100644 --- a/vulnerabilities/risk.py +++ b/vulnerabilities/risk.py @@ -97,13 +97,13 @@ def calculate_vulnerability_risk(vulnerability: Vulnerability): Risk = min(weighted severity * exploitability, 10) """ - references = vulnerability.references.select_related("url", "reference_type") + references = vulnerability.references severities = vulnerability.severities.select_related("reference") exploits = Exploit.objects.filter(vulnerability=vulnerability) - - weighted_severity = get_weighted_severity(severities) - exploitability = get_exploitability_level(exploits, references, severities) - return min(weighted_severity * exploitability, 10) + if references.exists() or severities.exists() or exploits.exists(): + weighted_severity = get_weighted_severity(severities) + exploitability = get_exploitability_level(exploits, references, severities) + return min(weighted_severity * exploitability, 10) def calculate_pkg_risk(package: Package): @@ -118,6 +118,8 @@ def calculate_pkg_risk(package: Package): ).prefetch_related("vulnerability"): if pkg_related_vul: risk = calculate_vulnerability_risk(pkg_related_vul.vulnerability) + if not risk: + continue result.append(risk) if not result: From 8a637fc724f2c232beea5dd7fc0334663bf59443 Mon Sep 17 00:00:00 2001 From: ziad hany Date: Tue, 22 Oct 2024 09:45:07 +0300 Subject: [PATCH 11/16] Create a pipeline for package risk Signed-off-by: ziadhany Signed-off-by: ziad hany --- vulnerabilities/api.py | 1 + vulnerabilities/improvers/__init__.py | 44 ++++++++++--------- .../migrations/0074_package_risk.py | 23 ++++++++++ vulnerabilities/models.py | 8 ++++ vulnerabilities/pipelines/risk_package.py | 30 +++++++++++++ vulnerabilities/risk.py | 33 +++++++------- .../templates/package_details.html | 4 +- .../tests/pipelines/test_risk_pipeline.py | 24 ++++++++++ vulnerabilities/tests/test_api.py | 1 + vulnerabilities/views.py | 1 - weight_config.json | 5 +-- 11 files changed, 131 insertions(+), 43 deletions(-) create mode 100644 vulnerabilities/migrations/0074_package_risk.py create mode 100644 vulnerabilities/pipelines/risk_package.py create mode 100644 vulnerabilities/tests/pipelines/test_risk_pipeline.py diff --git a/vulnerabilities/api.py b/vulnerabilities/api.py index e7fc33460..c4ca03447 100644 --- a/vulnerabilities/api.py +++ b/vulnerabilities/api.py @@ -359,6 +359,7 @@ class Meta: "latest_non_vulnerable_version", "affected_by_vulnerabilities", "fixing_vulnerabilities", + "risk", ] diff --git a/vulnerabilities/improvers/__init__.py b/vulnerabilities/improvers/__init__.py index 6e9c24b38..f0ef42b98 100644 --- a/vulnerabilities/improvers/__init__.py +++ b/vulnerabilities/improvers/__init__.py @@ -14,29 +14,31 @@ from vulnerabilities.pipelines import enhance_with_kev from vulnerabilities.pipelines import enhance_with_metasploit from vulnerabilities.pipelines import flag_ghost_packages +from vulnerabilities.pipelines import risk_package IMPROVERS_REGISTRY = [ - valid_versions.GitHubBasicImprover, - valid_versions.GitLabBasicImprover, - valid_versions.NginxBasicImprover, - valid_versions.ApacheHTTPDImprover, - valid_versions.DebianBasicImprover, - valid_versions.NpmImprover, - valid_versions.ElixirImprover, - valid_versions.ApacheTomcatImprover, - valid_versions.ApacheKafkaImprover, - valid_versions.IstioImprover, - valid_versions.DebianOvalImprover, - valid_versions.UbuntuOvalImprover, - valid_versions.OSSFuzzImprover, - valid_versions.RubyImprover, - valid_versions.GithubOSVImprover, - vulnerability_status.VulnerabilityStatusImprover, - valid_versions.CurlImprover, - flag_ghost_packages.FlagGhostPackagePipeline, - enhance_with_kev.VulnerabilityKevPipeline, - enhance_with_metasploit.MetasploitImproverPipeline, - enhance_with_exploitdb.ExploitDBImproverPipeline, + # valid_versions.GitHubBasicImprover, + # valid_versions.GitLabBasicImprover, + # valid_versions.NginxBasicImprover, + # valid_versions.ApacheHTTPDImprover, + # valid_versions.DebianBasicImprover, + # valid_versions.NpmImprover, + # valid_versions.ElixirImprover, + # valid_versions.ApacheTomcatImprover, + # valid_versions.ApacheKafkaImprover, + # valid_versions.IstioImprover, + # valid_versions.DebianOvalImprover, + # valid_versions.UbuntuOvalImprover, + # valid_versions.OSSFuzzImprover, + # valid_versions.RubyImprover, + # valid_versions.GithubOSVImprover, + # vulnerability_status.VulnerabilityStatusImprover, + # valid_versions.CurlImprover, + # flag_ghost_packages.FlagGhostPackagePipeline, + # enhance_with_kev.VulnerabilityKevPipeline, + # enhance_with_metasploit.MetasploitImproverPipeline, + # enhance_with_exploitdb.ExploitDBImproverPipeline, + risk_package.RiskPackagePipeline, ] IMPROVERS_REGISTRY = { diff --git a/vulnerabilities/migrations/0074_package_risk.py b/vulnerabilities/migrations/0074_package_risk.py new file mode 100644 index 000000000..91ce9a0ea --- /dev/null +++ b/vulnerabilities/migrations/0074_package_risk.py @@ -0,0 +1,23 @@ +# Generated by Django 4.2.16 on 2024-10-22 06:49 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("vulnerabilities", "0073_delete_packagerelatedvulnerability"), + ] + + operations = [ + migrations.AddField( + model_name="package", + name="risk", + field=models.DecimalField( + decimal_places=2, + help_text="Enter a risk score between 0.00 and 10.00, where higher values indicate greater vulnerability risk for the package.", + max_digits=4, + null=True, + ), + ), + ] diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index 03ee82d1f..7726bfa79 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -636,6 +636,14 @@ class Package(PackageURLMixin): help_text="True if the package does not exist in the upstream package manager or its repository.", ) + risk = models.DecimalField( + null=True, + max_digits=4, + decimal_places=2, + help_text="Enter a risk score between 0.00 and 10.00, where higher values " + "indicate greater vulnerability risk for the package.", + ) + objects = PackageQuerySet.as_manager() def save(self, *args, **kwargs): diff --git a/vulnerabilities/pipelines/risk_package.py b/vulnerabilities/pipelines/risk_package.py new file mode 100644 index 000000000..95db46d2a --- /dev/null +++ b/vulnerabilities/pipelines/risk_package.py @@ -0,0 +1,30 @@ +from vulnerabilities.models import Package +from vulnerabilities.pipelines import VulnerableCodePipeline +from vulnerabilities.risk import calculate_pkg_risk + + +class RiskPackagePipeline(VulnerableCodePipeline): + """ + Risk Assessment Pipeline for Package Vulnerabilities: Iterate through the packages and evaluate their associated risk. + """ + + pipeline_id = "risk_package" + license_expression = None + + @classmethod + def steps(cls): + return (cls.add_risk_package,) + + def add_risk_package(self): + self.log(f"Add risk package pipeline ") + + updatables = [] + for pkg in Package.objects.filter(affected_by_vulnerabilities__isnull=False): + risk = calculate_pkg_risk(pkg) + pkg.risk = risk + updatables.append(pkg) + + # Bulk update the 'risk' field for all packages + Package.objects.bulk_update(objs=updatables, fields=["risk"], batch_size=1000) + + self.log(f"Successfully added risk package pipeline ") diff --git a/vulnerabilities/risk.py b/vulnerabilities/risk.py index d68a2b3e1..de00132eb 100644 --- a/vulnerabilities/risk.py +++ b/vulnerabilities/risk.py @@ -1,15 +1,16 @@ import os -import re +from vulnerabilities.models import AffectedByPackageRelatedVulnerability from vulnerabilities.models import Exploit from vulnerabilities.models import Package -from vulnerabilities.models import PackageRelatedVulnerability from vulnerabilities.models import Vulnerability from vulnerabilities.models import VulnerabilityReference from vulnerabilities.severity_systems import EPSS from vulnerabilities.utils import load_json BASE_DIR = os.path.dirname(os.path.abspath(__file__)) +WEIGHT_CONFIG_PATH = os.path.join(BASE_DIR, "../weight_config.json") +DEFAULT_WEIGHT = 1 def get_weighted_severity(severities): @@ -18,8 +19,8 @@ def get_weighted_severity(severities): by its associated Weight/10. Example of Weighted Severity: max(7*(10/10), 8*(3/10), 6*(8/10)) = 7 """ - weight_config_path = os.path.join(BASE_DIR, "..", "weight_config.json") - weight_config = load_json(weight_config_path) + + weight_config = load_json(WEIGHT_CONFIG_PATH) score_map = { "low": 3, @@ -33,11 +34,12 @@ def get_weighted_severity(severities): score_list = [] for severity in severities: - weights = [ - value - for regex_key, value in weight_config.items() - if re.match(regex_key, severity.reference.url) - ] + weights = [] + for key, value in weight_config.items(): + if severity.reference.url.startswith(key): + weights.append(value) + continue + weights.append(DEFAULT_WEIGHT) if not weights: return 0 @@ -113,14 +115,13 @@ def calculate_pkg_risk(package: Package): """ result = [] - for pkg_related_vul in PackageRelatedVulnerability.objects.filter( - package=package, fix=False + for pkg_related_vul in AffectedByPackageRelatedVulnerability.objects.filter( + package=package ).prefetch_related("vulnerability"): - if pkg_related_vul: - risk = calculate_vulnerability_risk(pkg_related_vul.vulnerability) - if not risk: - continue - result.append(risk) + risk = calculate_vulnerability_risk(pkg_related_vul.vulnerability) + if not risk: + continue + result.append(risk) if not result: return diff --git a/vulnerabilities/templates/package_details.html b/vulnerabilities/templates/package_details.html index 85cb10cdd..ee7323287 100644 --- a/vulnerabilities/templates/package_details.html +++ b/vulnerabilities/templates/package_details.html @@ -117,8 +117,8 @@ Risk - {% if risk %} - {{ risk }} + {% if package.risk %} + {{ package.risk }} {% endif %} diff --git a/vulnerabilities/tests/pipelines/test_risk_pipeline.py b/vulnerabilities/tests/pipelines/test_risk_pipeline.py new file mode 100644 index 000000000..f65b8910a --- /dev/null +++ b/vulnerabilities/tests/pipelines/test_risk_pipeline.py @@ -0,0 +1,24 @@ +import pytest + +from vulnerabilities.models import AffectedByPackageRelatedVulnerability +from vulnerabilities.models import Package +from vulnerabilities.pipelines.risk_package import RiskPackagePipeline +from vulnerabilities.tests.test_risk import vulnerability + + +@pytest.mark.django_db +def test_simple_risk_pipeline(vulnerability): + pkg = Package.objects.create(type="pypi", name="foo", version="2.3.0") + assert Package.objects.count() == 1 + + improver = RiskPackagePipeline() + improver.execute() + + assert pkg.risk is None + + AffectedByPackageRelatedVulnerability.objects.create(package=pkg, vulnerability=vulnerability) + improver = RiskPackagePipeline() + improver.execute() + + pkg = Package.objects.get(type="pypi", name="foo", version="2.3.0") + assert str(pkg.risk) == str(3.11) diff --git a/vulnerabilities/tests/test_api.py b/vulnerabilities/tests/test_api.py index cbb018673..9a982e0b4 100644 --- a/vulnerabilities/tests/test_api.py +++ b/vulnerabilities/tests/test_api.py @@ -625,6 +625,7 @@ def test_api_with_lesser_and_greater_fixed_by_packages(self): } ], "resource_url": "http://testserver/packages/pkg:maven/com.fasterxml.jackson.core/jackson-databind@2.13.1", + "risk": None, } assert response == expected diff --git a/vulnerabilities/views.py b/vulnerabilities/views.py index 30217919f..d0a58effe 100644 --- a/vulnerabilities/views.py +++ b/vulnerabilities/views.py @@ -123,7 +123,6 @@ def get_context_data(self, **kwargs): context["fixing_vulnerabilities"] = package.fixing.order_by("vulnerability_id") context["package_search_form"] = PackageSearchForm(self.request.GET) context["fixed_package_details"] = package.fixed_package_details - context["risk"] = calculate_pkg_risk(package) context["history"] = list(package.history) return context diff --git a/weight_config.json b/weight_config.json index 7dd69c4ae..8951dad03 100644 --- a/weight_config.json +++ b/weight_config.json @@ -1,5 +1,4 @@ { - "https://nvd\\.nist\\.gov/.*": 9, - "https:\\/\\/security-tracker\\.debian\\.org\\/.*": 9, - "^(?:http|ftp)s?://": 1 + "https://nvd.nist.gov/": 9, + "https://security-tracker.debian.org/": 9 } \ No newline at end of file From 848a5f56c5791b767e97878f4a3b9a8d536a8a92 Mon Sep 17 00:00:00 2001 From: ziadhany Date: Tue, 22 Oct 2024 16:23:21 +0300 Subject: [PATCH 12/16] Load the weight once uncomment all importers Signed-off-by: ziad hany --- vulnerabilities/improvers/__init__.py | 42 +++++++++++++-------------- vulnerabilities/risk.py | 5 ++-- 2 files changed, 23 insertions(+), 24 deletions(-) diff --git a/vulnerabilities/improvers/__init__.py b/vulnerabilities/improvers/__init__.py index f0ef42b98..1885473bf 100644 --- a/vulnerabilities/improvers/__init__.py +++ b/vulnerabilities/improvers/__init__.py @@ -17,27 +17,27 @@ from vulnerabilities.pipelines import risk_package IMPROVERS_REGISTRY = [ - # valid_versions.GitHubBasicImprover, - # valid_versions.GitLabBasicImprover, - # valid_versions.NginxBasicImprover, - # valid_versions.ApacheHTTPDImprover, - # valid_versions.DebianBasicImprover, - # valid_versions.NpmImprover, - # valid_versions.ElixirImprover, - # valid_versions.ApacheTomcatImprover, - # valid_versions.ApacheKafkaImprover, - # valid_versions.IstioImprover, - # valid_versions.DebianOvalImprover, - # valid_versions.UbuntuOvalImprover, - # valid_versions.OSSFuzzImprover, - # valid_versions.RubyImprover, - # valid_versions.GithubOSVImprover, - # vulnerability_status.VulnerabilityStatusImprover, - # valid_versions.CurlImprover, - # flag_ghost_packages.FlagGhostPackagePipeline, - # enhance_with_kev.VulnerabilityKevPipeline, - # enhance_with_metasploit.MetasploitImproverPipeline, - # enhance_with_exploitdb.ExploitDBImproverPipeline, + valid_versions.GitHubBasicImprover, + valid_versions.GitLabBasicImprover, + valid_versions.NginxBasicImprover, + valid_versions.ApacheHTTPDImprover, + valid_versions.DebianBasicImprover, + valid_versions.NpmImprover, + valid_versions.ElixirImprover, + valid_versions.ApacheTomcatImprover, + valid_versions.ApacheKafkaImprover, + valid_versions.IstioImprover, + valid_versions.DebianOvalImprover, + valid_versions.UbuntuOvalImprover, + valid_versions.OSSFuzzImprover, + valid_versions.RubyImprover, + valid_versions.GithubOSVImprover, + vulnerability_status.VulnerabilityStatusImprover, + valid_versions.CurlImprover, + flag_ghost_packages.FlagGhostPackagePipeline, + enhance_with_kev.VulnerabilityKevPipeline, + enhance_with_metasploit.MetasploitImproverPipeline, + enhance_with_exploitdb.ExploitDBImproverPipeline, risk_package.RiskPackagePipeline, ] diff --git a/vulnerabilities/risk.py b/vulnerabilities/risk.py index de00132eb..ba656e4ba 100644 --- a/vulnerabilities/risk.py +++ b/vulnerabilities/risk.py @@ -11,6 +11,7 @@ BASE_DIR = os.path.dirname(os.path.abspath(__file__)) WEIGHT_CONFIG_PATH = os.path.join(BASE_DIR, "../weight_config.json") DEFAULT_WEIGHT = 1 +WEIGHT_CONFIG = load_json(WEIGHT_CONFIG_PATH) def get_weighted_severity(severities): @@ -20,8 +21,6 @@ def get_weighted_severity(severities): Example of Weighted Severity: max(7*(10/10), 8*(3/10), 6*(8/10)) = 7 """ - weight_config = load_json(WEIGHT_CONFIG_PATH) - score_map = { "low": 3, "moderate": 6.9, @@ -35,7 +34,7 @@ def get_weighted_severity(severities): score_list = [] for severity in severities: weights = [] - for key, value in weight_config.items(): + for key, value in WEIGHT_CONFIG.items(): if severity.reference.url.startswith(key): weights.append(value) continue From 185eb0e5bda8fceebbe8be4e9200762bcae928ee Mon Sep 17 00:00:00 2001 From: ziadhany Date: Mon, 28 Oct 2024 17:46:40 +0300 Subject: [PATCH 13/16] Update the risk description in the model. Rename the pipeline from RiskPackagePipeline to ComputePackageRiskPipeline. Add a tooltip for risk, and remove any unused imports in the view. Signed-off-by: ziad hany --- vulnerabilities/improvers/__init__.py | 2 +- .../migrations/0074_package_risk.py | 4 +-- vulnerabilities/models.py | 2 +- vulnerabilities/pipelines/risk_package.py | 36 +++++++++++++++---- vulnerabilities/risk.py | 11 +++--- .../templates/package_details.html | 6 +++- .../tests/pipelines/test_risk_pipeline.py | 6 ++-- vulnerabilities/views.py | 1 - 8 files changed, 45 insertions(+), 23 deletions(-) diff --git a/vulnerabilities/improvers/__init__.py b/vulnerabilities/improvers/__init__.py index 1885473bf..0f14f4854 100644 --- a/vulnerabilities/improvers/__init__.py +++ b/vulnerabilities/improvers/__init__.py @@ -38,7 +38,7 @@ enhance_with_kev.VulnerabilityKevPipeline, enhance_with_metasploit.MetasploitImproverPipeline, enhance_with_exploitdb.ExploitDBImproverPipeline, - risk_package.RiskPackagePipeline, + risk_package.ComputePackageRiskPipeline, ] IMPROVERS_REGISTRY = { diff --git a/vulnerabilities/migrations/0074_package_risk.py b/vulnerabilities/migrations/0074_package_risk.py index 91ce9a0ea..6dd8f5db0 100644 --- a/vulnerabilities/migrations/0074_package_risk.py +++ b/vulnerabilities/migrations/0074_package_risk.py @@ -1,4 +1,4 @@ -# Generated by Django 4.2.16 on 2024-10-22 06:49 +# Generated by Django 4.2.16 on 2024-10-28 13:02 from django.db import migrations, models @@ -15,7 +15,7 @@ class Migration(migrations.Migration): name="risk", field=models.DecimalField( decimal_places=2, - help_text="Enter a risk score between 0.00 and 10.00, where higher values indicate greater vulnerability risk for the package.", + help_text="Risk score between 0.00 and 10.00, where higher values indicate greater vulnerability risk for the package.", max_digits=4, null=True, ), diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index 7726bfa79..21f1a94c0 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -640,7 +640,7 @@ class Package(PackageURLMixin): null=True, max_digits=4, decimal_places=2, - help_text="Enter a risk score between 0.00 and 10.00, where higher values " + help_text="Risk score between 0.00 and 10.00, where higher values " "indicate greater vulnerability risk for the package.", ) diff --git a/vulnerabilities/pipelines/risk_package.py b/vulnerabilities/pipelines/risk_package.py index 95db46d2a..0b05655de 100644 --- a/vulnerabilities/pipelines/risk_package.py +++ b/vulnerabilities/pipelines/risk_package.py @@ -1,14 +1,16 @@ +from aboutcode.pipeline import LoopProgress + from vulnerabilities.models import Package from vulnerabilities.pipelines import VulnerableCodePipeline from vulnerabilities.risk import calculate_pkg_risk -class RiskPackagePipeline(VulnerableCodePipeline): +class ComputePackageRiskPipeline(VulnerableCodePipeline): """ Risk Assessment Pipeline for Package Vulnerabilities: Iterate through the packages and evaluate their associated risk. """ - pipeline_id = "risk_package" + pipeline_id = "compute_package_risk" license_expression = None @classmethod @@ -16,15 +18,35 @@ def steps(cls): return (cls.add_risk_package,) def add_risk_package(self): - self.log(f"Add risk package pipeline ") + affected_pkgs = Package.objects.filter(affected_by_vulnerabilities__isnull=False).distinct() + + self.log(f"Calculating risk for {affected_pkgs.count():,d} affected package records") + + progress = LoopProgress(total_iterations=affected_pkgs.count(), logger=self.log) updatables = [] - for pkg in Package.objects.filter(affected_by_vulnerabilities__isnull=False): + updated_package_count = 0 + batch_size = 1000 + + for pkg in progress.iter(affected_pkgs): risk = calculate_pkg_risk(pkg) pkg.risk = risk updatables.append(pkg) - # Bulk update the 'risk' field for all packages - Package.objects.bulk_update(objs=updatables, fields=["risk"], batch_size=1000) + if len(updatables) >= batch_size: + try: + Package.objects.bulk_update(objs=updatables, fields=["risk"]) + updated_package_count += len(updatables) + except Exception as e: + self.log(f"Error updating packages: {e}") + + updatables.clear() + + if updatables: + try: + Package.objects.bulk_update(objs=updatables, fields=["risk"]) + updated_package_count += len(updatables) + except Exception as e: + self.log(f"Error updating remaining packages: {e}") - self.log(f"Successfully added risk package pipeline ") + self.log(f"Successfully added risk score for {updated_package_count:,d} package") diff --git a/vulnerabilities/risk.py b/vulnerabilities/risk.py index ba656e4ba..df38b0372 100644 --- a/vulnerabilities/risk.py +++ b/vulnerabilities/risk.py @@ -1,4 +1,4 @@ -import os +from pathlib import Path from vulnerabilities.models import AffectedByPackageRelatedVulnerability from vulnerabilities.models import Exploit @@ -8,9 +8,8 @@ from vulnerabilities.severity_systems import EPSS from vulnerabilities.utils import load_json -BASE_DIR = os.path.dirname(os.path.abspath(__file__)) -WEIGHT_CONFIG_PATH = os.path.join(BASE_DIR, "../weight_config.json") DEFAULT_WEIGHT = 1 +WEIGHT_CONFIG_PATH = Path(__file__).parent.parent / "weight_config.json" WEIGHT_CONFIG = load_json(WEIGHT_CONFIG_PATH) @@ -117,10 +116,8 @@ def calculate_pkg_risk(package: Package): for pkg_related_vul in AffectedByPackageRelatedVulnerability.objects.filter( package=package ).prefetch_related("vulnerability"): - risk = calculate_vulnerability_risk(pkg_related_vul.vulnerability) - if not risk: - continue - result.append(risk) + if risk := calculate_vulnerability_risk(pkg_related_vul.vulnerability): + result.append(risk) if not result: return diff --git a/vulnerabilities/templates/package_details.html b/vulnerabilities/templates/package_details.html index ee7323287..20480b943 100644 --- a/vulnerabilities/templates/package_details.html +++ b/vulnerabilities/templates/package_details.html @@ -114,7 +114,11 @@ - Risk + + Risk + {% if package.risk %} diff --git a/vulnerabilities/tests/pipelines/test_risk_pipeline.py b/vulnerabilities/tests/pipelines/test_risk_pipeline.py index f65b8910a..b69cb626a 100644 --- a/vulnerabilities/tests/pipelines/test_risk_pipeline.py +++ b/vulnerabilities/tests/pipelines/test_risk_pipeline.py @@ -2,7 +2,7 @@ from vulnerabilities.models import AffectedByPackageRelatedVulnerability from vulnerabilities.models import Package -from vulnerabilities.pipelines.risk_package import RiskPackagePipeline +from vulnerabilities.pipelines.risk_package import ComputePackageRiskPipeline from vulnerabilities.tests.test_risk import vulnerability @@ -11,13 +11,13 @@ def test_simple_risk_pipeline(vulnerability): pkg = Package.objects.create(type="pypi", name="foo", version="2.3.0") assert Package.objects.count() == 1 - improver = RiskPackagePipeline() + improver = ComputePackageRiskPipeline() improver.execute() assert pkg.risk is None AffectedByPackageRelatedVulnerability.objects.create(package=pkg, vulnerability=vulnerability) - improver = RiskPackagePipeline() + improver = ComputePackageRiskPipeline() improver.execute() pkg = Package.objects.get(type="pypi", name="foo", version="2.3.0") diff --git a/vulnerabilities/views.py b/vulnerabilities/views.py index d0a58effe..d5e568701 100644 --- a/vulnerabilities/views.py +++ b/vulnerabilities/views.py @@ -31,7 +31,6 @@ from vulnerabilities.forms import PackageSearchForm from vulnerabilities.forms import VulnerabilitySearchForm from vulnerabilities.models import VulnerabilityStatusType -from vulnerabilities.risk import calculate_pkg_risk from vulnerabilities.severity_systems import EPSS from vulnerabilities.severity_systems import SCORING_SYSTEMS from vulnerabilities.utils import get_severity_range From 45c62b5051625d82a95cef3443b14572597fd055 Mon Sep 17 00:00:00 2001 From: ziadhany Date: Mon, 28 Oct 2024 18:01:03 +0300 Subject: [PATCH 14/16] Rename the pipeline step from add_risk_package to add_package_risk_score and remove any extra whitespace in views.py. Signed-off-by: ziad hany --- vulnerabilities/pipelines/risk_package.py | 4 ++-- vulnerabilities/views.py | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/vulnerabilities/pipelines/risk_package.py b/vulnerabilities/pipelines/risk_package.py index 0b05655de..46d5f12ba 100644 --- a/vulnerabilities/pipelines/risk_package.py +++ b/vulnerabilities/pipelines/risk_package.py @@ -15,9 +15,9 @@ class ComputePackageRiskPipeline(VulnerableCodePipeline): @classmethod def steps(cls): - return (cls.add_risk_package,) + return (cls.add_package_risk_score,) - def add_risk_package(self): + def add_package_risk_score(self): affected_pkgs = Package.objects.filter(affected_by_vulnerabilities__isnull=False).distinct() self.log(f"Calculating risk for {affected_pkgs.count():,d} affected package records") diff --git a/vulnerabilities/views.py b/vulnerabilities/views.py index e1b259d91..51cdcd049 100644 --- a/vulnerabilities/views.py +++ b/vulnerabilities/views.py @@ -122,6 +122,7 @@ def get_context_data(self, **kwargs): context["fixing_vulnerabilities"] = package.fixing.order_by("vulnerability_id") context["package_search_form"] = PackageSearchForm(self.request.GET) context["fixed_package_details"] = package.fixed_package_details + context["history"] = list(package.history) return context From 0af9432bb0114f9c8c6ae96c774c53dd1e52ef6b Mon Sep 17 00:00:00 2001 From: ziadhany Date: Mon, 28 Oct 2024 18:03:52 +0300 Subject: [PATCH 15/16] Resolve migration conflict Signed-off-by: ziad hany --- .../migrations/{0074_package_risk.py => 0075_package_risk.py} | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) rename vulnerabilities/migrations/{0074_package_risk.py => 0075_package_risk.py} (81%) diff --git a/vulnerabilities/migrations/0074_package_risk.py b/vulnerabilities/migrations/0075_package_risk.py similarity index 81% rename from vulnerabilities/migrations/0074_package_risk.py rename to vulnerabilities/migrations/0075_package_risk.py index 6dd8f5db0..5ffe849ef 100644 --- a/vulnerabilities/migrations/0074_package_risk.py +++ b/vulnerabilities/migrations/0075_package_risk.py @@ -1,4 +1,4 @@ -# Generated by Django 4.2.16 on 2024-10-28 13:02 +# Generated by Django 4.2.16 on 2024-10-28 15:02 from django.db import migrations, models @@ -6,7 +6,7 @@ class Migration(migrations.Migration): dependencies = [ - ("vulnerabilities", "0073_delete_packagerelatedvulnerability"), + ("vulnerabilities", "0074_update_pysec_advisory_created_by"), ] operations = [ From 0e114ac059e1cb768e19a922c6bbd14f31643f14 Mon Sep 17 00:00:00 2001 From: ziadhany Date: Tue, 29 Oct 2024 14:47:52 +0300 Subject: [PATCH 16/16] Rename the pipeline file Add pagination and refactor bulk_update_package Signed-off-by: ziad hany --- vulnerabilities/api.py | 2 +- vulnerabilities/improvers/__init__.py | 4 +- ...age_risk.py => 0075_package_risk_score.py} | 4 +- vulnerabilities/models.py | 2 +- .../pipelines/compute_package_risk.py | 59 +++++++++++++++++++ vulnerabilities/pipelines/risk_package.py | 52 ---------------- vulnerabilities/risk.py | 6 +- .../templates/package_details.html | 4 +- ...peline.py => test_compute_package_risk.py} | 6 +- vulnerabilities/tests/test_api.py | 2 +- vulnerabilities/tests/test_risk.py | 6 +- 11 files changed, 77 insertions(+), 70 deletions(-) rename vulnerabilities/migrations/{0075_package_risk.py => 0075_package_risk_score.py} (87%) create mode 100644 vulnerabilities/pipelines/compute_package_risk.py delete mode 100644 vulnerabilities/pipelines/risk_package.py rename vulnerabilities/tests/pipelines/{test_risk_pipeline.py => test_compute_package_risk.py} (80%) diff --git a/vulnerabilities/api.py b/vulnerabilities/api.py index c285055b2..b8bb703a6 100644 --- a/vulnerabilities/api.py +++ b/vulnerabilities/api.py @@ -359,7 +359,7 @@ class Meta: "latest_non_vulnerable_version", "affected_by_vulnerabilities", "fixing_vulnerabilities", - "risk", + "risk_score", ] diff --git a/vulnerabilities/improvers/__init__.py b/vulnerabilities/improvers/__init__.py index 7241055eb..fd18fb28c 100644 --- a/vulnerabilities/improvers/__init__.py +++ b/vulnerabilities/improvers/__init__.py @@ -10,11 +10,11 @@ from vulnerabilities.improvers import valid_versions from vulnerabilities.improvers import vulnerability_status from vulnerabilities.pipelines import VulnerableCodePipeline +from vulnerabilities.pipelines import compute_package_risk from vulnerabilities.pipelines import enhance_with_exploitdb from vulnerabilities.pipelines import enhance_with_kev from vulnerabilities.pipelines import enhance_with_metasploit from vulnerabilities.pipelines import flag_ghost_packages -from vulnerabilities.pipelines import risk_package IMPROVERS_REGISTRY = [ valid_versions.GitHubBasicImprover, @@ -38,7 +38,7 @@ enhance_with_kev.VulnerabilityKevPipeline, enhance_with_metasploit.MetasploitImproverPipeline, enhance_with_exploitdb.ExploitDBImproverPipeline, - risk_package.ComputePackageRiskPipeline, + compute_package_risk.ComputePackageRiskPipeline, ] IMPROVERS_REGISTRY = { diff --git a/vulnerabilities/migrations/0075_package_risk.py b/vulnerabilities/migrations/0075_package_risk_score.py similarity index 87% rename from vulnerabilities/migrations/0075_package_risk.py rename to vulnerabilities/migrations/0075_package_risk_score.py index 5ffe849ef..72827ae63 100644 --- a/vulnerabilities/migrations/0075_package_risk.py +++ b/vulnerabilities/migrations/0075_package_risk_score.py @@ -1,4 +1,4 @@ -# Generated by Django 4.2.16 on 2024-10-28 15:02 +# Generated by Django 4.2.16 on 2024-10-29 10:55 from django.db import migrations, models @@ -12,7 +12,7 @@ class Migration(migrations.Migration): operations = [ migrations.AddField( model_name="package", - name="risk", + name="risk_score", field=models.DecimalField( decimal_places=2, help_text="Risk score between 0.00 and 10.00, where higher values indicate greater vulnerability risk for the package.", diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index e410121ec..b95a07297 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -636,7 +636,7 @@ class Package(PackageURLMixin): help_text="True if the package does not exist in the upstream package manager or its repository.", ) - risk = models.DecimalField( + risk_score = models.DecimalField( null=True, max_digits=4, decimal_places=2, diff --git a/vulnerabilities/pipelines/compute_package_risk.py b/vulnerabilities/pipelines/compute_package_risk.py new file mode 100644 index 000000000..525da599d --- /dev/null +++ b/vulnerabilities/pipelines/compute_package_risk.py @@ -0,0 +1,59 @@ +from aboutcode.pipeline import LoopProgress + +from vulnerabilities.models import Package +from vulnerabilities.pipelines import VulnerableCodePipeline +from vulnerabilities.risk import compute_package_risk + + +class ComputePackageRiskPipeline(VulnerableCodePipeline): + """ + Risk Assessment Pipeline for Package Vulnerabilities: Iterate through the packages and evaluate their associated risk. + """ + + pipeline_id = "compute_package_risk" + license_expression = None + + @classmethod + def steps(cls): + return (cls.add_package_risk_score,) + + def add_package_risk_score(self): + affected_packages = Package.objects.filter( + affected_by_vulnerabilities__isnull=False + ).distinct() + + self.log(f"Calculating risk for {affected_packages.count():,d} affected package records") + + progress = LoopProgress(total_iterations=affected_packages.count(), logger=self.log) + + updatables = [] + updated_package_count = 0 + batch_size = 5000 + + for package in progress.iter(affected_packages.paginated()): + risk_score = compute_package_risk(package) + package.risk_score = risk_score + updatables.append(package) + + if len(updatables) >= batch_size: + updated_package_count += bulk_update_package_risk_score( + packages=updatables, + logger=self.log, + ) + updated_package_count += bulk_update_package_risk_score( + packages=updatables, + logger=self.log, + ) + self.log(f"Successfully added risk score for {updated_package_count:,d} package") + + +def bulk_update_package_risk_score(packages, logger): + package_count = 0 + if packages: + try: + Package.objects.bulk_update(objs=packages, fields=["risk_score"]) + package_count += len(packages) + except Exception as e: + logger(f"Error updating packages: {e}") + packages.clear() + return package_count diff --git a/vulnerabilities/pipelines/risk_package.py b/vulnerabilities/pipelines/risk_package.py deleted file mode 100644 index 46d5f12ba..000000000 --- a/vulnerabilities/pipelines/risk_package.py +++ /dev/null @@ -1,52 +0,0 @@ -from aboutcode.pipeline import LoopProgress - -from vulnerabilities.models import Package -from vulnerabilities.pipelines import VulnerableCodePipeline -from vulnerabilities.risk import calculate_pkg_risk - - -class ComputePackageRiskPipeline(VulnerableCodePipeline): - """ - Risk Assessment Pipeline for Package Vulnerabilities: Iterate through the packages and evaluate their associated risk. - """ - - pipeline_id = "compute_package_risk" - license_expression = None - - @classmethod - def steps(cls): - return (cls.add_package_risk_score,) - - def add_package_risk_score(self): - affected_pkgs = Package.objects.filter(affected_by_vulnerabilities__isnull=False).distinct() - - self.log(f"Calculating risk for {affected_pkgs.count():,d} affected package records") - - progress = LoopProgress(total_iterations=affected_pkgs.count(), logger=self.log) - - updatables = [] - updated_package_count = 0 - batch_size = 1000 - - for pkg in progress.iter(affected_pkgs): - risk = calculate_pkg_risk(pkg) - pkg.risk = risk - updatables.append(pkg) - - if len(updatables) >= batch_size: - try: - Package.objects.bulk_update(objs=updatables, fields=["risk"]) - updated_package_count += len(updatables) - except Exception as e: - self.log(f"Error updating packages: {e}") - - updatables.clear() - - if updatables: - try: - Package.objects.bulk_update(objs=updatables, fields=["risk"]) - updated_package_count += len(updatables) - except Exception as e: - self.log(f"Error updating remaining packages: {e}") - - self.log(f"Successfully added risk score for {updated_package_count:,d} package") diff --git a/vulnerabilities/risk.py b/vulnerabilities/risk.py index df38b0372..f3b225189 100644 --- a/vulnerabilities/risk.py +++ b/vulnerabilities/risk.py @@ -89,7 +89,7 @@ def get_exploitability_level(exploits, references, severities): return exploit_level -def calculate_vulnerability_risk(vulnerability: Vulnerability): +def compute_vulnerability_risk(vulnerability: Vulnerability): """ Risk may be expressed as a number ranging from 0 to 10. Risk is calculated from weighted severity and exploitability values. @@ -106,7 +106,7 @@ def calculate_vulnerability_risk(vulnerability: Vulnerability): return min(weighted_severity * exploitability, 10) -def calculate_pkg_risk(package: Package): +def compute_package_risk(package: Package): """ Calculate the risk for a package by iterating over all vulnerabilities that affects this package and determining the associated risk. @@ -116,7 +116,7 @@ def calculate_pkg_risk(package: Package): for pkg_related_vul in AffectedByPackageRelatedVulnerability.objects.filter( package=package ).prefetch_related("vulnerability"): - if risk := calculate_vulnerability_risk(pkg_related_vul.vulnerability): + if risk := compute_vulnerability_risk(pkg_related_vul.vulnerability): result.append(risk) if not result: diff --git a/vulnerabilities/templates/package_details.html b/vulnerabilities/templates/package_details.html index 20480b943..dd72d0500 100644 --- a/vulnerabilities/templates/package_details.html +++ b/vulnerabilities/templates/package_details.html @@ -121,8 +121,8 @@ - {% if package.risk %} - {{ package.risk }} + {% if package.risk_score %} + {{ package.risk_score }} {% endif %} diff --git a/vulnerabilities/tests/pipelines/test_risk_pipeline.py b/vulnerabilities/tests/pipelines/test_compute_package_risk.py similarity index 80% rename from vulnerabilities/tests/pipelines/test_risk_pipeline.py rename to vulnerabilities/tests/pipelines/test_compute_package_risk.py index b69cb626a..e44fa424a 100644 --- a/vulnerabilities/tests/pipelines/test_risk_pipeline.py +++ b/vulnerabilities/tests/pipelines/test_compute_package_risk.py @@ -2,7 +2,7 @@ from vulnerabilities.models import AffectedByPackageRelatedVulnerability from vulnerabilities.models import Package -from vulnerabilities.pipelines.risk_package import ComputePackageRiskPipeline +from vulnerabilities.pipelines.compute_package_risk import ComputePackageRiskPipeline from vulnerabilities.tests.test_risk import vulnerability @@ -14,11 +14,11 @@ def test_simple_risk_pipeline(vulnerability): improver = ComputePackageRiskPipeline() improver.execute() - assert pkg.risk is None + assert pkg.risk_score is None AffectedByPackageRelatedVulnerability.objects.create(package=pkg, vulnerability=vulnerability) improver = ComputePackageRiskPipeline() improver.execute() pkg = Package.objects.get(type="pypi", name="foo", version="2.3.0") - assert str(pkg.risk) == str(3.11) + assert str(pkg.risk_score) == str(3.11) diff --git a/vulnerabilities/tests/test_api.py b/vulnerabilities/tests/test_api.py index b59b2cbcd..926d3c219 100644 --- a/vulnerabilities/tests/test_api.py +++ b/vulnerabilities/tests/test_api.py @@ -625,7 +625,7 @@ def test_api_with_lesser_and_greater_fixed_by_packages(self): } ], "resource_url": "http://testserver/packages/pkg:maven/com.fasterxml.jackson.core/jackson-databind@2.13.1", - "risk": None, + "risk_score": None, } assert response == expected diff --git a/vulnerabilities/tests/test_risk.py b/vulnerabilities/tests/test_risk.py index 30e6e9807..169024eb6 100644 --- a/vulnerabilities/tests/test_risk.py +++ b/vulnerabilities/tests/test_risk.py @@ -6,7 +6,7 @@ from vulnerabilities.models import VulnerabilityRelatedReference from vulnerabilities.models import VulnerabilitySeverity from vulnerabilities.models import Weakness -from vulnerabilities.risk import calculate_vulnerability_risk +from vulnerabilities.risk import compute_vulnerability_risk from vulnerabilities.risk import get_exploitability_level from vulnerabilities.risk import get_weighted_severity from vulnerabilities.severity_systems import CVSSV3 @@ -160,5 +160,5 @@ def test_get_weighted_severity(vulnerability): @pytest.mark.django_db -def test_calculate_vulnerability_risk(vulnerability): - assert calculate_vulnerability_risk(vulnerability) == 3.1050000000000004 +def test_compute_vulnerability_risk(vulnerability): + assert compute_vulnerability_risk(vulnerability) == 3.1050000000000004