feature: IANA update

This commit is contained in:
Heiko
2025-12-19 20:10:39 +01:00
parent f038d6a3fc
commit 753c582010
27 changed files with 1923 additions and 419 deletions

View File

@@ -5,7 +5,13 @@ import logging
from .__main__ import main
from .scanner import perform_scan
__version__ = "0.1.0"
try:
from importlib.metadata import version
__version__ = version("compliance-scan")
except Exception:
__version__ = "unknown"
__all__ = ["main", "perform_scan"]
# Configure logging

View File

@@ -4,7 +4,11 @@
import sys
from .cli import parse_arguments
from .commands import handle_report_command, handle_scan_command
from .commands import (
handle_report_command,
handle_scan_command,
handle_update_iana_command,
)
from .output import print_error
@@ -21,6 +25,8 @@ def main() -> int:
return handle_scan_command(args)
if args.command == "report":
return handle_report_command(args)
if args.command == "update-iana":
return handle_update_iana_command(args)
print_error(f"Unknown command: {args.command}")
return 1

View File

@@ -164,6 +164,10 @@ Examples:
compliance-scan scan example.com:443 --print
compliance-scan scan example.com:443,636 -db /path/to/scans.db
compliance-scan scan [2001:db8::1]:443,636 --print
Note:
SSLyze outputs INFO-level log messages during scanning that cannot be suppressed.
These messages are harmless and can be ignored.
""",
)
@@ -247,6 +251,30 @@ Examples:
default=False,
)
# Update-iana subcommand
update_parser = subparsers.add_parser(
"update-iana",
help="Update IANA registry data from official online sources",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog="""
Examples:
compliance-scan update-iana
compliance-scan update-iana -db /path/to/scans.db
Note:
Default database contains IANA data as of 12/2024.
This command fetches current data from IANA and updates the database.
""",
)
update_parser.add_argument(
"-db",
"--database",
type=str,
help="Database file to update (default: compliance_status.db)",
default="compliance_status.db",
)
args = parser.parse_args()
# Check if no command was provided

View File

@@ -2,5 +2,10 @@
from .report import handle_report_command
from .scan import handle_scan_command
from .update_iana import handle_update_iana_command
__all__ = ["handle_report_command", "handle_scan_command"]
__all__ = [
"handle_report_command",
"handle_scan_command",
"handle_update_iana_command",
]

View File

@@ -1,7 +1,8 @@
"""Scan command handler."""
import argparse
from datetime import datetime, timezone
import sqlite3
from datetime import UTC, datetime
from pathlib import Path
from typing import Any
@@ -58,7 +59,7 @@ def handle_scan_command(args: argparse.Namespace) -> int:
return 1
# Single timestamp for all scans (program start time)
program_start_time = datetime.now(timezone.utc)
program_start_time = datetime.now(UTC)
# Scan results storage
scan_results_dict: dict[int, Any] = {}
@@ -76,7 +77,7 @@ def handle_scan_command(args: argparse.Namespace) -> int:
continue
# Calculate total scan duration
scan_end_time = datetime.now(timezone.utc)
scan_end_time = datetime.now(UTC)
total_scan_duration = (scan_end_time - program_start_time).total_seconds()
# Save all results to database with single scan_id
@@ -105,8 +106,6 @@ def handle_scan_command(args: argparse.Namespace) -> int:
# Print summary if requested
if args.print:
import sqlite3
print("\n" + "=" * 70)
print("SCAN SUMMARY")
print("=" * 70)

View File

@@ -0,0 +1,248 @@
"""Update IANA command handler."""
import argparse
import json
import logging
import sqlite3
from pathlib import Path
from urllib.error import URLError
from urllib.request import urlopen
from ..iana_parser import (
extract_updated_date,
find_registry,
get_table_name_from_filename,
parse_xml_with_namespace_support,
)
from ..iana_validator import (
ValidationError,
normalize_header,
validate_headers,
validate_registry_data,
)
from ..output import print_error
logger = logging.getLogger(__name__)
def fetch_xml_from_url(url: str, timeout: int = 30) -> str:
"""Fetch XML content from URL.
Args:
url: URL to fetch
timeout: Timeout in seconds
Returns:
XML content as string
Raises:
URLError: If URL cannot be fetched
"""
logger.info(f"Fetching {url}")
with urlopen(url, timeout=timeout) as response:
return response.read().decode("utf-8")
def calculate_diff(
old_rows: list[tuple],
new_rows: list[tuple],
pk_index: int = 0,
) -> dict[str, list]:
"""Calculate diff between old and new data.
Args:
old_rows: Existing rows from DB
new_rows: New rows from XML
pk_index: Index of primary key column
Returns:
Dict with 'added', 'deleted', 'modified' lists of primary keys
"""
old_dict = {row[pk_index]: row for row in old_rows}
new_dict = {row[pk_index]: row for row in new_rows}
added = [k for k in new_dict if k not in old_dict]
deleted = [k for k in old_dict if k not in new_dict]
modified = [k for k in new_dict if k in old_dict and old_dict[k] != new_dict[k]]
return {"added": added, "deleted": deleted, "modified": modified}
def process_registry_with_validation(
xml_content: str,
registry_id: str,
table_name: str,
headers: list[str],
db_conn: sqlite3.Connection,
skip_min_rows_check: bool = False,
) -> tuple[int, dict[str, list]]:
"""Process registry with validation and diff calculation.
Args:
xml_content: XML content as string
registry_id: Registry ID to extract
table_name: Database table name
headers: List of column headers
db_conn: Database connection
skip_min_rows_check: Skip minimum rows validation (for tests)
Returns:
Tuple of (row_count, diff_dict)
Raises:
ValidationError: If validation fails
ValueError: If registry not found
"""
import tempfile
with tempfile.NamedTemporaryFile(
mode="w", suffix=".xml", delete=False, encoding="utf-8"
) as tmp_file:
tmp_file.write(xml_content)
tmp_path = tmp_file.name
try:
root, ns = parse_xml_with_namespace_support(tmp_path)
finally:
Path(tmp_path).unlink()
validate_headers(table_name, headers, db_conn)
registry = find_registry(root, registry_id, ns)
if ns:
records = registry.findall("iana:record", ns)
else:
records = registry.findall("record")
from ..iana_parser import extract_field_value, is_unassigned
rows_dict = []
for record in records:
if is_unassigned(record, ns):
continue
row_dict = {}
for header in headers:
normalized_key = normalize_header(header)
row_dict[normalized_key] = extract_field_value(record, header, ns)
rows_dict.append(row_dict)
validate_registry_data(table_name, rows_dict, skip_min_rows_check)
rows = [tuple(row.values()) for row in rows_dict]
cursor = db_conn.cursor()
old_rows = cursor.execute(f"SELECT * FROM {table_name}").fetchall()
diff = calculate_diff(old_rows, rows)
placeholders = ",".join(["?"] * len(headers))
cursor.execute(f"DELETE FROM {table_name}")
cursor.executemany(f"INSERT INTO {table_name} VALUES ({placeholders})", rows)
return len(rows), diff
def handle_update_iana_command(args: argparse.Namespace) -> int:
"""Handle the update-iana subcommand.
Args:
args: Parsed arguments
Returns:
Exit code (0 for success, 1 for error)
"""
logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s")
db_path = args.database
if not Path(db_path).exists():
print_error(f"Database not found: {db_path}")
return 1
script_dir = Path(__file__).parent.parent
config_path = script_dir / "data" / "iana_parse.json"
logger.info(f"Loading configuration from {config_path}")
try:
with config_path.open(encoding="utf-8") as f:
config = json.load(f)
except (FileNotFoundError, json.JSONDecodeError, OSError) as e:
print_error(f"Error loading configuration: {e}")
return 1
try:
conn = sqlite3.connect(str(db_path))
except sqlite3.Error as e:
print_error(f"Error opening database: {e}")
return 1
logger.info("Starting IANA registry update")
try:
conn.execute("BEGIN TRANSACTION")
total_registries = 0
total_rows = 0
for url, registries in config.items():
try:
xml_content = fetch_xml_from_url(url)
except (URLError, OSError) as e:
print_error(f"Failed to fetch {url}: {e}")
conn.rollback()
conn.close()
return 1
xml_date = extract_updated_date(xml_content)
logger.info(f"XML data date: {xml_date}")
for registry_id, output_filename, headers in registries:
table_name = get_table_name_from_filename(output_filename)
try:
row_count, diff = process_registry_with_validation(
xml_content, registry_id, table_name, headers, conn
)
logger.info(
f"{table_name}: {row_count} rows "
f"({len(diff['added'])} added, "
f"{len(diff['modified'])} modified, "
f"{len(diff['deleted'])} deleted)"
)
total_registries += 1
total_rows += row_count
except (ValidationError, ValueError) as e:
print_error(
f"Validation failed for {table_name}: {e}\n"
f"IANA data structure may have changed. "
f"Please open an issue at the project repository."
)
conn.rollback()
conn.close()
return 1
conn.commit()
logger.info(
f"Successfully updated {total_registries} registries "
f"({total_rows} total rows)"
)
except sqlite3.Error as e:
print_error(f"Database error: {e}")
conn.rollback()
conn.close()
return 1
finally:
conn.close()
return 0

View File

@@ -1,5 +1,5 @@
{
"proto/assignments/tls-parameters/tls-parameters.xml": [
"https://www.iana.org/assignments/tls-parameters/tls-parameters.xml": [
[
"tls-parameters-4",
"tls_cipher_suites.csv",
@@ -31,7 +31,7 @@
["Value", "Description", "DTLS", "Recommended", "RFC/Draft"]
]
],
"proto/assignments/ikev2-parameters/ikev2-parameters.xml": [
"https://www.iana.org/assignments/ikev2-parameters/ikev2-parameters.xml": [
[
"ikev2-parameters-5",
"ikev2_encryption_algorithms.csv",

View File

@@ -1,7 +1,7 @@
"""Compliance checking module for IANA and BSI standards."""
import sqlite3
from datetime import datetime, timezone
from datetime import UTC, datetime
from typing import Any
# Error messages
@@ -26,7 +26,7 @@ def check_compliance(db_path: str, scan_id: int) -> dict[str, Any]:
cursor = conn.cursor()
try:
timestamp = datetime.now(timezone.utc).isoformat()
timestamp = datetime.now(UTC).isoformat()
stats = {
"cipher_suites_checked": 0,
"cipher_suites_passed": 0,
@@ -122,7 +122,7 @@ def check_certificate_compliance(
if bsi_result and algo_type:
min_key_length, valid_until, notes = bsi_result
current_year = datetime.now(timezone.utc).year
current_year = datetime.now(UTC).year
# Check key length
if key_bits and key_bits >= min_key_length:
@@ -285,7 +285,7 @@ def _check_cipher_suite_compliance(
# BSI check (sole compliance criterion)
if bsi_approved:
current_year = datetime.now(timezone.utc).year
current_year = datetime.now(UTC).year
if bsi_valid_until and bsi_valid_until >= current_year:
details.append(f"BSI: Approved until {bsi_valid_until}")
passed = True
@@ -408,7 +408,7 @@ def _check_supported_group_compliance(
# BSI check (sole compliance criterion)
if bsi_approved:
current_year = datetime.now(timezone.utc).year
current_year = datetime.now(UTC).year
if bsi_valid_until and bsi_valid_until >= current_year:
details.append(f"BSI: Approved until {bsi_valid_until}")
passed = True

View File

@@ -1,13 +1,11 @@
#!/usr/bin/env python3
"""IANA XML Registry to SQLite Converter
"""IANA XML parser utilities.
Parses IANA XML registry files and exports specified registries directly to SQLite database
based on configuration from iana_parse.json.
Provides functions for parsing IANA XML registry files and extracting
registry data. Used by update_iana command and tests.
"""
"""Script to fetch and parse IANA TLS registries into SQLite database."""
import json
import re
import sqlite3
import xml.etree.ElementTree as ET
from pathlib import Path
@@ -213,6 +211,45 @@ def get_table_name_from_filename(filename: str) -> str:
return table_name
def extract_updated_date(xml_content: str) -> str:
"""Extract date from <updated> tag in XML content.
Args:
xml_content: XML content as string
Returns:
Date string in format YYYY-MM-DD or "unknown"
"""
for line in xml_content.split("\n")[:10]:
if "<updated>" in line:
match = re.search(r"<updated>([\d-]+)</updated>", line)
if match:
return match.group(1)
return "unknown"
def is_unassigned(record: ET.Element, ns: dict | None) -> bool:
"""Check if record is an unassigned or reserved range entry.
Args:
record: XML record element
ns: Namespace dictionary or None
Returns:
True if record should be skipped (unassigned/reserved ranges)
"""
value = get_element_text(record, "value", ns)
# Check for range notation (e.g., "42-255", "0x0000-0x0200")
# Range values indicate unassigned or reserved blocks
if re.search(r"\d+-\d+", value):
return True
return False
def write_registry_to_db(
root: ET.Element,
registry_id: str,
@@ -248,9 +285,11 @@ def write_registry_to_db(
else:
records = registry.findall("record")
# Prepare data
# Prepare data (skip unassigned entries)
rows = []
for record in records:
if is_unassigned(record, ns):
continue
row = []
for header in headers:
value = extract_field_value(record, header, ns)
@@ -279,7 +318,7 @@ def process_xml_file(
xml_path: str,
registries: list[tuple[str, str, list[str]]],
db_conn: sqlite3.Connection,
repo_root: str,
repo_root: Path,
) -> int:
"""Process single XML file and export all specified registries to database.
@@ -331,71 +370,3 @@ def process_xml_file(
) from e
return total_rows
def main() -> None:
"""Main entry point."""
# Determine paths
script_dir = Path(__file__).parent
repo_root = script_dir.parent.parent
config_path = script_dir / "data" / "iana_parse.json"
db_path = script_dir / "data" / "crypto_standards.db"
print("IANA XML zu SQLite Konverter")
print("=" * 50)
print(f"Repository Root: {repo_root}")
print(f"Konfiguration: {config_path}")
print(f"Datenbank: {db_path}")
# Check if database exists
if not db_path.exists():
print(f"\n✗ Fehler: Datenbank {db_path} nicht gefunden", file=sys.stderr)
print("Bitte zuerst die Datenbank erstellen.", file=sys.stderr)
sys.exit(1)
# Load configuration
try:
config = load_config(config_path)
except (FileNotFoundError, json.JSONDecodeError, OSError) as e:
print(f"\nFehler beim Laden der Konfiguration: {e}", file=sys.stderr)
sys.exit(1)
print(f"\n{len(config)} XML-Datei(en) gefunden in Konfiguration")
# Connect to database
try:
db_conn = sqlite3.connect(str(db_path))
except sqlite3.Error as e:
print(f"\n✗ Fehler beim Öffnen der Datenbank: {e}", file=sys.stderr)
sys.exit(1)
# Process each XML file
try:
success_count = 0
total_count = 0
total_rows = 0
for xml_path, registries in config.items():
total_count += len(registries)
try:
rows = process_xml_file(xml_path, registries, db_conn, str(repo_root))
success_count += len(registries)
total_rows += rows
except (RuntimeError, ValueError, sqlite3.Error) as e:
print(f"\nFehler: {e}", file=sys.stderr)
db_conn.close()
sys.exit(1)
# Summary
print("\n" + "=" * 50)
print(
f"Erfolgreich abgeschlossen: {success_count}/{total_count} Registries "
f"({total_rows} Einträge) in Datenbank importiert",
)
finally:
db_conn.close()
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,227 @@
"""Validation functions for IANA registry data."""
import sqlite3
class ValidationError(Exception):
"""Raised when IANA data validation fails."""
pass
def normalize_header(header: str) -> str:
"""Normalize header name to database column format.
Args:
header: Header name from JSON config
Returns:
Normalized column name (lowercase, / replaced with _)
"""
return header.lower().replace("/", "_")
def validate_headers(
table_name: str,
headers: list[str],
db_conn: sqlite3.Connection,
) -> None:
"""Validate that headers match database schema.
Args:
table_name: Database table name
headers: Headers from JSON config
db_conn: Database connection
Raises:
ValidationError: If headers don't match schema
"""
cursor = db_conn.cursor()
cursor.execute(f"PRAGMA table_info({table_name})")
db_columns = [row[1] for row in cursor.fetchall()]
normalized_headers = [normalize_header(h) for h in headers]
if len(normalized_headers) != len(db_columns):
raise ValidationError(
f"Column count mismatch for {table_name}: "
f"expected {len(db_columns)}, got {len(normalized_headers)}"
)
for i, (expected, actual) in enumerate(zip(db_columns, normalized_headers)):
if expected != actual:
raise ValidationError(
f"Column {i} mismatch for {table_name}: "
f"expected '{expected}', got '{actual}' "
f"(from header '{headers[i]}')"
)
def validate_cipher_suite_row(row: dict[str, str]) -> None:
"""Validate single cipher suite record.
Args:
row: Dictionary with column names as keys
Raises:
ValidationError: If data is invalid
"""
required_fields = ["value", "description"]
for field in required_fields:
if field not in row or not row[field]:
raise ValidationError(f"Missing required field: {field}")
value = row["value"]
if not value.startswith("0x"):
raise ValidationError(f"Invalid value format: {value}")
rec = row.get("recommended", "")
if rec and rec not in ["Y", "N", "D"]:
raise ValidationError(f"Invalid Recommended value: {rec}")
def validate_supported_groups_row(row: dict[str, str]) -> None:
"""Validate single supported groups record.
Args:
row: Dictionary with column names as keys
Raises:
ValidationError: If data is invalid
"""
required_fields = ["value", "description"]
for field in required_fields:
if field not in row or not row[field]:
raise ValidationError(f"Missing required field: {field}")
try:
int(row["value"])
except ValueError as e:
raise ValidationError(f"Value must be numeric: {row['value']}") from e
rec = row.get("recommended", "")
if rec and rec not in ["Y", "N", "D"]:
raise ValidationError(f"Invalid Recommended value: {rec}")
def validate_signature_schemes_row(row: dict[str, str]) -> None:
"""Validate single signature schemes record.
Args:
row: Dictionary with column names as keys
Raises:
ValidationError: If data is invalid
"""
required_fields = ["value", "description"]
for field in required_fields:
if field not in row or not row[field]:
raise ValidationError(f"Missing required field: {field}")
value = row["value"]
if not value.startswith("0x"):
raise ValidationError(f"Invalid value format: {value}")
def validate_ikev2_row(row: dict[str, str]) -> None:
"""Validate IKEv2 record (encryption, DH groups, auth methods).
Args:
row: Dictionary with column names as keys
Raises:
ValidationError: If data is invalid
"""
required_fields = ["value", "description"]
for field in required_fields:
if field not in row or not row[field]:
raise ValidationError(f"Missing required field: {field}")
try:
int(row["value"])
except ValueError as e:
raise ValidationError(f"Value must be numeric: {row['value']}") from e
VALIDATORS = {
"iana_tls_cipher_suites": validate_cipher_suite_row,
"iana_tls_supported_groups": validate_supported_groups_row,
"iana_tls_signature_schemes": validate_signature_schemes_row,
"iana_ikev2_encryption_algorithms": validate_ikev2_row,
"iana_ikev2_dh_groups": validate_ikev2_row,
"iana_ikev2_authentication_methods": validate_ikev2_row,
"iana_ikev2_prf_algorithms": validate_ikev2_row,
"iana_ikev2_integrity_algorithms": validate_ikev2_row,
}
MIN_ROWS = {
"iana_tls_cipher_suites": 50,
"iana_tls_signature_schemes": 10,
"iana_tls_supported_groups": 10,
"iana_tls_alerts": 10,
"iana_tls_content_types": 5,
"iana_ikev2_encryption_algorithms": 10,
"iana_ikev2_prf_algorithms": 5,
"iana_ikev2_integrity_algorithms": 5,
"iana_ikev2_dh_groups": 10,
"iana_ikev2_authentication_methods": 5,
}
def get_min_rows(table_name: str) -> int:
"""Get minimum expected rows for table.
Args:
table_name: Database table name
Returns:
Minimum number of rows expected
"""
return MIN_ROWS.get(table_name, 5)
def validate_registry_data(
table_name: str,
rows: list[dict[str, str]],
skip_min_rows_check: bool = False,
) -> None:
"""Validate complete registry data before DB write.
Args:
table_name: Database table name
rows: List of row dictionaries
skip_min_rows_check: Skip minimum rows validation (for tests)
Raises:
ValidationError: If validation fails
"""
if not skip_min_rows_check:
min_rows = get_min_rows(table_name)
if len(rows) < min_rows:
raise ValidationError(
f"Insufficient data for {table_name}: "
f"{len(rows)} rows (expected >= {min_rows})"
)
validator = VALIDATORS.get(table_name)
if not validator:
return
for i, row in enumerate(rows, 1):
try:
validator(row)
except ValidationError as e:
raise ValidationError(f"Row {i} in {table_name}: {e}") from e

View File

@@ -164,9 +164,7 @@ def _print_vulnerabilities(scan_result: ServerScanResult) -> None:
heartbleed_result = heartbleed_attempt.result
if heartbleed_result:
status = (
"VERWUNDBAR ⚠️"
if heartbleed_result.is_vulnerable_to_heartbleed
else "OK ✓"
"VERWUNDBAR" if heartbleed_result.is_vulnerable_to_heartbleed else "OK"
)
print(f" • Heartbleed: {status}")
@@ -182,7 +180,7 @@ def _print_vulnerabilities(scan_result: ServerScanResult) -> None:
)
elif hasattr(robot_result, "robot_result"):
vulnerable = str(robot_result.robot_result) != "NOT_VULNERABLE_NO_ORACLE"
status = "VERWUNDBAR ⚠️" if vulnerable else "OK"
status = "VERWUNDBAR" if vulnerable else "OK"
print(f" • ROBOT: {status}")
# OpenSSL CCS Injection
@@ -190,9 +188,7 @@ def _print_vulnerabilities(scan_result: ServerScanResult) -> None:
if ccs_attempt.status == ScanCommandAttemptStatusEnum.COMPLETED:
ccs_result = ccs_attempt.result
if ccs_result:
status = (
"VERWUNDBAR ⚠️" if ccs_result.is_vulnerable_to_ccs_injection else "OK ✓"
)
status = "VERWUNDBAR" if ccs_result.is_vulnerable_to_ccs_injection else "OK"
print(f" • OpenSSL CCS Injection: {status}")

View File

@@ -1,95 +1,26 @@
"""CSV report generation with granular file structure for reST integration."""
import csv
import json
import sqlite3
from pathlib import Path
from typing import Any
from .query import get_scan_data
def _get_headers(db_path: str, export_type: str) -> list[str]:
"""Get CSV headers from database.
Args:
db_path: Path to database file
export_type: Type of export (e.g. 'cipher_suites_accepted')
Returns:
List of column headers
"""
conn = sqlite3.connect(db_path)
cursor = conn.cursor()
cursor.execute(
"SELECT headers FROM csv_export_metadata WHERE export_type = ?",
(export_type,),
)
row = cursor.fetchone()
conn.close()
if row:
return json.loads(row[0])
raise ValueError(f"No headers found for export_type: {export_type}")
def _format_bool(
value: bool | None,
true_val: str = "Yes",
false_val: str = "No",
none_val: str = "-",
) -> str:
"""Format boolean value to string representation.
Args:
value: Boolean value to format
true_val: String representation for True
false_val: String representation for False
none_val: String representation for None
Returns:
Formatted string
"""
if value is True:
return true_val
if value is False:
return false_val
return none_val
def _write_csv(filepath: Path, headers: list[str], rows: list[list[Any]]) -> None:
"""Write data to CSV file.
Args:
filepath: Path to CSV file
headers: List of column headers
rows: List of data rows
"""
with filepath.open("w", newline="", encoding="utf-8") as f:
writer = csv.writer(f)
writer.writerow(headers)
writer.writerows(rows)
from .csv_utils import CSVExporter, format_bool
from .query import get_scan_data, has_tls_support
def _export_summary(
output_dir: Path,
exporter: CSVExporter,
summary: dict[str, Any],
db_path: str,
) -> list[str]:
"""Export summary statistics to CSV.
Args:
output_dir: Output directory path
exporter: CSVExporter instance
summary: Summary data dictionary
Returns:
List of generated file paths
"""
summary_file = output_dir / "summary.csv"
rows = [
["Scanned Ports", summary.get("total_ports", 0)],
["Ports with TLS Support", summary.get("successful_ports", 0)],
@@ -114,21 +45,19 @@ def _export_summary(
summary.get("critical_vulnerabilities", 0),
],
]
headers = _get_headers(db_path, "summary")
_write_csv(summary_file, headers, rows)
return [str(summary_file)]
filepath = exporter.write_csv("summary.csv", "summary", rows)
return [filepath]
def _export_cipher_suites(
output_dir: Path,
exporter: CSVExporter,
port: int,
cipher_suites: dict[str, dict[str, list]],
db_path: str,
) -> list[str]:
"""Export cipher suites to CSV files.
Args:
output_dir: Output directory path
exporter: CSVExporter instance
port: Port number
cipher_suites: Cipher suites data per TLS version
@@ -140,49 +69,46 @@ def _export_cipher_suites(
for tls_version, suites in cipher_suites.items():
if suites.get("accepted"):
filepath = output_dir / f"{port}_cipher_suites_{tls_version}_accepted.csv"
rows = [
[
suite["name"],
suite.get("iana_recommended", "-"),
_format_bool(suite.get("bsi_approved")),
format_bool(suite.get("bsi_approved")),
suite.get("bsi_valid_until", "-"),
_format_bool(suite.get("compliant")),
format_bool(suite.get("compliant")),
]
for suite in suites["accepted"]
]
headers = _get_headers(db_path, "cipher_suites_accepted")
_write_csv(filepath, headers, rows)
generated.append(str(filepath))
filename = f"{port}_cipher_suites_{tls_version}_accepted.csv"
filepath = exporter.write_csv(filename, "cipher_suites_accepted", rows)
generated.append(filepath)
if suites.get("rejected"):
filepath = output_dir / f"{port}_cipher_suites_{tls_version}_rejected.csv"
rows = [
[
suite["name"],
suite.get("iana_recommended", "-"),
_format_bool(suite.get("bsi_approved")),
format_bool(suite.get("bsi_approved")),
suite.get("bsi_valid_until", "-"),
]
for suite in suites["rejected"]
]
headers = _get_headers(db_path, "cipher_suites_rejected")
_write_csv(filepath, headers, rows)
generated.append(str(filepath))
filename = f"{port}_cipher_suites_{tls_version}_rejected.csv"
filepath = exporter.write_csv(filename, "cipher_suites_rejected", rows)
generated.append(filepath)
return generated
def _export_supported_groups(
output_dir: Path,
exporter: CSVExporter,
port: int,
groups: list[dict[str, Any]],
db_path: str,
) -> list[str]:
"""Export supported groups to CSV.
Args:
output_dir: Output directory path
exporter: CSVExporter instance
port: Port number
groups: List of supported groups
@@ -190,32 +116,30 @@ def _export_supported_groups(
List of generated file paths
"""
filepath = output_dir / f"{port}_supported_groups.csv"
rows = [
[
group["name"],
group.get("iana_recommended", "-"),
_format_bool(group.get("bsi_approved")),
format_bool(group.get("bsi_approved")),
group.get("bsi_valid_until", "-"),
_format_bool(group.get("compliant")),
format_bool(group.get("compliant")),
]
for group in groups
]
headers = _get_headers(db_path, "supported_groups")
_write_csv(filepath, headers, rows)
return [str(filepath)]
filename = f"{port}_supported_groups.csv"
filepath = exporter.write_csv(filename, "supported_groups", rows)
return [filepath]
def _export_missing_groups(
output_dir: Path,
exporter: CSVExporter,
port: int,
missing: dict[str, list[dict[str, Any]]],
db_path: str,
) -> list[str]:
"""Export missing recommended groups to CSV.
Args:
output_dir: Output directory path
exporter: CSVExporter instance
port: Port number
missing: Dictionary with bsi_approved and iana_recommended groups
@@ -226,7 +150,6 @@ def _export_missing_groups(
generated = []
if missing.get("bsi_approved"):
filepath = output_dir / f"{port}_missing_groups_bsi.csv"
rows = [
[
group["name"],
@@ -235,33 +158,31 @@ def _export_missing_groups(
]
for group in missing["bsi_approved"]
]
headers = _get_headers(db_path, "missing_groups_bsi")
_write_csv(filepath, headers, rows)
generated.append(str(filepath))
filename = f"{port}_missing_groups_bsi.csv"
filepath = exporter.write_csv(filename, "missing_groups_bsi", rows)
generated.append(filepath)
if missing.get("iana_recommended"):
filepath = output_dir / f"{port}_missing_groups_iana.csv"
rows = [
[group["name"], group.get("iana_value", "-")]
for group in missing["iana_recommended"]
]
headers = _get_headers(db_path, "missing_groups_iana")
_write_csv(filepath, headers, rows)
generated.append(str(filepath))
filename = f"{port}_missing_groups_iana.csv"
filepath = exporter.write_csv(filename, "missing_groups_iana", rows)
generated.append(filepath)
return generated
def _export_certificates(
output_dir: Path,
exporter: CSVExporter,
port: int,
certificates: list[dict[str, Any]],
db_path: str,
) -> list[str]:
"""Export certificates to CSV.
Args:
output_dir: Output directory path
exporter: CSVExporter instance
port: Port number
certificates: List of certificate data
@@ -269,7 +190,6 @@ def _export_certificates(
List of generated file paths
"""
filepath = output_dir / f"{port}_certificates.csv"
rows = [
[
cert["position"],
@@ -279,25 +199,24 @@ def _export_certificates(
cert["not_after"],
cert["key_type"],
cert["key_bits"],
_format_bool(cert.get("compliant")),
format_bool(cert.get("compliant")),
]
for cert in certificates
]
headers = _get_headers(db_path, "certificates")
_write_csv(filepath, headers, rows)
return [str(filepath)]
filename = f"{port}_certificates.csv"
filepath = exporter.write_csv(filename, "certificates", rows)
return [filepath]
def _export_vulnerabilities(
output_dir: Path,
exporter: CSVExporter,
port: int,
vulnerabilities: list[dict[str, Any]],
db_path: str,
) -> list[str]:
"""Export vulnerabilities to CSV.
Args:
output_dir: Output directory path
exporter: CSVExporter instance
port: Port number
vulnerabilities: List of vulnerability data
@@ -305,30 +224,28 @@ def _export_vulnerabilities(
List of generated file paths
"""
filepath = output_dir / f"{port}_vulnerabilities.csv"
rows = [
[
vuln["type"],
_format_bool(vuln["vulnerable"]),
format_bool(vuln["vulnerable"]),
vuln.get("details", "-"),
]
for vuln in vulnerabilities
]
headers = _get_headers(db_path, "vulnerabilities")
_write_csv(filepath, headers, rows)
return [str(filepath)]
filename = f"{port}_vulnerabilities.csv"
filepath = exporter.write_csv(filename, "vulnerabilities", rows)
return [filepath]
def _export_protocol_features(
output_dir: Path,
exporter: CSVExporter,
port: int,
features: list[dict[str, Any]],
db_path: str,
) -> list[str]:
"""Export protocol features to CSV.
Args:
output_dir: Output directory path
exporter: CSVExporter instance
port: Port number
features: List of protocol feature data
@@ -336,30 +253,28 @@ def _export_protocol_features(
List of generated file paths
"""
filepath = output_dir / f"{port}_protocol_features.csv"
rows = [
[
feature["name"],
_format_bool(feature["supported"]),
format_bool(feature["supported"]),
feature.get("details", "-"),
]
for feature in features
]
headers = _get_headers(db_path, "protocol_features")
_write_csv(filepath, headers, rows)
return [str(filepath)]
filename = f"{port}_protocol_features.csv"
filepath = exporter.write_csv(filename, "protocol_features", rows)
return [filepath]
def _export_session_features(
output_dir: Path,
exporter: CSVExporter,
port: int,
features: list[dict[str, Any]],
db_path: str,
) -> list[str]:
"""Export session features to CSV.
Args:
output_dir: Output directory path
exporter: CSVExporter instance
port: Port number
features: List of session feature data
@@ -367,33 +282,31 @@ def _export_session_features(
List of generated file paths
"""
filepath = output_dir / f"{port}_session_features.csv"
rows = [
[
feature["type"],
_format_bool(feature.get("client_initiated")),
_format_bool(feature.get("secure")),
_format_bool(feature.get("session_id_supported")),
_format_bool(feature.get("ticket_supported")),
format_bool(feature.get("client_initiated")),
format_bool(feature.get("secure")),
format_bool(feature.get("session_id_supported")),
format_bool(feature.get("ticket_supported")),
feature.get("details", "-"),
]
for feature in features
]
headers = _get_headers(db_path, "session_features")
_write_csv(filepath, headers, rows)
return [str(filepath)]
filename = f"{port}_session_features.csv"
filepath = exporter.write_csv(filename, "session_features", rows)
return [filepath]
def _export_http_headers(
output_dir: Path,
exporter: CSVExporter,
port: int,
headers: list[dict[str, Any]],
db_path: str,
) -> list[str]:
"""Export HTTP headers to CSV.
Args:
output_dir: Output directory path
exporter: CSVExporter instance
port: Port number
headers: List of HTTP header data
@@ -401,30 +314,28 @@ def _export_http_headers(
List of generated file paths
"""
filepath = output_dir / f"{port}_http_headers.csv"
rows = [
[
header["name"],
_format_bool(header["is_present"]),
format_bool(header["is_present"]),
header.get("value", "-"),
]
for header in headers
]
csv_headers = _get_headers(db_path, "http_headers")
_write_csv(filepath, csv_headers, rows)
return [str(filepath)]
filename = f"{port}_http_headers.csv"
filepath = exporter.write_csv(filename, "http_headers", rows)
return [filepath]
def _export_compliance_status(
output_dir: Path,
exporter: CSVExporter,
port: int,
compliance: dict[str, Any],
db_path: str,
) -> list[str]:
"""Export compliance status to CSV.
Args:
output_dir: Output directory path
exporter: CSVExporter instance
port: Port number
compliance: Compliance data dictionary
@@ -432,7 +343,6 @@ def _export_compliance_status(
List of generated file paths
"""
filepath = output_dir / f"{port}_compliance_status.csv"
rows = []
if "cipher_suites_checked" in compliance:
@@ -456,32 +366,13 @@ def _export_compliance_status(
)
if rows:
headers = _get_headers(db_path, "compliance_status")
_write_csv(filepath, headers, rows)
return [str(filepath)]
filename = f"{port}_compliance_status.csv"
filepath = exporter.write_csv(filename, "compliance_status", rows)
return [filepath]
return []
def _has_tls_support(port_data: dict[str, Any]) -> bool:
"""Check if port has TLS support.
Args:
port_data: Port data dictionary
Returns:
True if port has TLS support
"""
return bool(
port_data.get("cipher_suites")
or port_data.get("supported_groups")
or port_data.get("certificates")
or port_data.get("tls_version"),
)
# Export handlers mapping: (data_key, handler_function)
EXPORT_HANDLERS = (
("cipher_suites", _export_cipher_suites),
("supported_groups", _export_supported_groups),
@@ -515,22 +406,19 @@ def generate_csv_reports(
output_dir_path = Path(output_dir)
output_dir_path.mkdir(parents=True, exist_ok=True)
exporter = CSVExporter(db_path, output_dir_path)
generated_files = []
generated_files.extend(
_export_summary(output_dir_path, data.get("summary", {}), db_path),
)
generated_files.extend(_export_summary(exporter, data.get("summary", {})))
for port_data in data["ports_data"].values():
if not _has_tls_support(port_data):
if not has_tls_support(port_data):
continue
port = port_data["port"]
for data_key, handler_func in EXPORT_HANDLERS:
if port_data.get(data_key):
generated_files.extend(
handler_func(output_dir_path, port, port_data[data_key], db_path),
)
generated_files.extend(handler_func(exporter, port, port_data[data_key]))
return generated_files

View File

@@ -0,0 +1,102 @@
"""Utilities for CSV export with header caching and path management."""
import csv
import json
import sqlite3
from pathlib import Path
from typing import Any
class CSVExporter:
"""CSV export helper with header caching and path management."""
def __init__(self, db_path: str, output_dir: Path):
"""Initialize CSV exporter.
Args:
db_path: Path to database file
output_dir: Output directory for CSV files
"""
self.db_path = db_path
self.output_dir = output_dir
self._headers_cache: dict[str, list[str]] = {}
def get_headers(self, export_type: str) -> list[str]:
"""Get CSV headers from database with caching.
Args:
export_type: Type of export (e.g. 'cipher_suites_accepted')
Returns:
List of column headers
"""
if export_type not in self._headers_cache:
conn = sqlite3.connect(self.db_path)
cursor = conn.cursor()
cursor.execute(
"SELECT headers FROM csv_export_metadata WHERE export_type = ?",
(export_type,),
)
row = cursor.fetchone()
conn.close()
if row:
self._headers_cache[export_type] = json.loads(row[0])
else:
raise ValueError(f"No headers found for export_type: {export_type}")
return self._headers_cache[export_type]
def write_csv(
self,
filename: str,
export_type: str,
rows: list[list[Any]],
) -> str:
"""Write data to CSV file with headers from metadata.
Args:
filename: CSV filename
export_type: Type of export for header lookup
rows: List of data rows
Returns:
String path to created file
"""
filepath = self.output_dir / filename
headers = self.get_headers(export_type)
with filepath.open("w", newline="", encoding="utf-8") as f:
writer = csv.writer(f)
writer.writerow(headers)
writer.writerows(rows)
return str(filepath)
def format_bool(
value: bool | None,
true_val: str = "Yes",
false_val: str = "No",
none_val: str = "-",
) -> str:
"""Format boolean value to string representation.
Args:
value: Boolean value to format
true_val: String representation for True
false_val: String representation for False
none_val: String representation for None
Returns:
Formatted string
"""
if value is True:
return true_val
if value is False:
return false_val
return none_val

View File

@@ -7,6 +7,24 @@ from typing import Any
COMPLIANCE_WARNING_THRESHOLD = 50.0
def has_tls_support(port_data: dict[str, Any]) -> bool:
"""Check if port has TLS support based on data presence.
Args:
port_data: Port data dictionary
Returns:
True if port has TLS support
"""
return bool(
port_data.get("cipher_suites")
or port_data.get("supported_groups")
or port_data.get("certificates")
or port_data.get("tls_version")
)
def list_scans(db_path: str) -> list[dict[str, Any]]:
"""List all available scans in the database.

View File

@@ -1,11 +1,13 @@
"""Shared utilities for report template rendering."""
from datetime import datetime, timezone
from datetime import UTC, datetime
from pathlib import Path
from typing import Any
from jinja2 import Environment, FileSystemLoader, select_autoescape
from .query import has_tls_support
def format_tls_version(version: str) -> str:
"""Format TLS version string for display.
@@ -59,7 +61,7 @@ def generate_report_id(metadata: dict[str, Any]) -> str:
dt = datetime.fromisoformat(metadata["timestamp"])
date_str = dt.strftime("%Y%m%d")
except (ValueError, KeyError):
date_str = datetime.now(timezone.utc).strftime("%Y%m%d")
date_str = datetime.now(UTC).strftime("%Y%m%d")
return f"{date_str}_{metadata['scan_id']}"
@@ -95,13 +97,7 @@ def build_template_context(data: dict[str, Any]) -> dict[str, Any]:
# Filter ports with TLS support for port sections
ports_with_tls = []
for port_data in data["ports_data"].values():
has_tls = (
port_data.get("cipher_suites")
or port_data.get("supported_groups")
or port_data.get("certificates")
or port_data.get("tls_version")
)
if has_tls:
if has_tls_support(port_data):
ports_with_tls.append(port_data)
return {

View File

@@ -1,11 +1,9 @@
"""Module for performing SSL/TLS scans with SSLyze."""
import logging
from datetime import datetime, timezone
from datetime import UTC, datetime
from typing import Any
logger = logging.getLogger(__name__)
from sslyze import (
ProtocolWithOpportunisticTlsEnum,
Scanner,
@@ -19,6 +17,8 @@ from sslyze import (
from .protocol_loader import get_protocol_for_port
logger = logging.getLogger(__name__)
def create_scan_request(
hostname: str,
@@ -194,7 +194,7 @@ def perform_scan(
continue
# Calculate scan duration
scan_end_time = datetime.now(timezone.utc)
scan_end_time = datetime.now(UTC)
scan_duration = (scan_end_time - scan_start_time).total_seconds()
# Return first result (we only scan one host)