From bc1b4279decb529913a19a602fca0f850792c563 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mateusz=20Gruszczy=C5=84ski?= Date: Fri, 13 Feb 2026 12:42:53 +0100 Subject: [PATCH] first commit --- .dockerignore | 20 ++ .env.example | 257 ++++++++++++++ .gitignore | 5 + Dockerfile | 29 ++ api.py | 252 ++++++++++++++ app.py | 303 +++++++++++++++++ config.py | 478 ++++++++++++++++++++++++++ cve_handler.py | 788 +++++++++++++++++++++++++++++++++++++++++++ discord_bot.py | 218 ++++++++++++ docker-compose.yml | 87 +++++ docker-entrypoint.sh | 61 ++++ env.example | 142 ++++++++ full_scan.py | 75 ++++ requirements.txt | 8 + scheduler.py | 40 +++ static/css/style.css | 264 +++++++++++++++ static/js/app.js | 425 +++++++++++++++++++++++ templates/404.html | 27 ++ templates/500.html | 27 ++ templates/base.html | 89 +++++ templates/index.html | 240 +++++++++++++ 21 files changed, 3835 insertions(+) create mode 100644 .dockerignore create mode 100644 .env.example create mode 100644 .gitignore create mode 100644 Dockerfile create mode 100644 api.py create mode 100644 app.py create mode 100644 config.py create mode 100644 cve_handler.py create mode 100644 discord_bot.py create mode 100644 docker-compose.yml create mode 100644 docker-entrypoint.sh create mode 100644 env.example create mode 100644 full_scan.py create mode 100644 requirements.txt create mode 100644 scheduler.py create mode 100644 static/css/style.css create mode 100644 static/js/app.js create mode 100644 templates/404.html create mode 100644 templates/500.html create mode 100644 templates/base.html create mode 100644 templates/index.html diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..a7d3ebc --- /dev/null +++ b/.dockerignore @@ -0,0 +1,20 @@ +__pycache__/ +*.pyc +*.pyo +*.pyd +.Python +*.so +*.egg +*.egg-info/ +dist/ +build/ +.git/ +.gitignore +README.md +.env +*.db +*.db-journal +*.db-wal +*.log +.vscode/ +.idea/ diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..12a085a --- /dev/null +++ b/.env.example @@ -0,0 +1,257 @@ +# ============================================================ +# CVE MONITOR - CONFIGURATION FILE +# ============================================================ +# Copy this file to .env and adjust values for your environment + + +# ============================================================ +# APPLICATION SETTINGS +# ============================================================ +APP_NAME=CVE Monitor +APP_VERSION=1.0.0 +DEBUG=False +HOST=0.0.0.0 +PORT=5000 + + +# ============================================================ +# DATABASE CONFIGURATION +# ============================================================ +DATABASE_PATH=./cve_db/cve_cache.db +DATABASE_WAL_MODE=True +DATABASE_CACHE_SIZE=10000 + + +# ============================================================ +# LOGGING CONFIGURATION +# ============================================================ +LOG_LEVEL=INFO +LOG_FORMAT=%(asctime)s - %(name)s - %(levelname)s - %(message)s +LOG_FILE=./logs/cve_monitor.log +LOG_MAX_BYTES=10485760 +LOG_BACKUP_COUNT=5 + + +# ============================================================ +# AUTO-UPDATE CONFIGURATION +# ============================================================ +# Enable automatic CVE updates in background +ENABLE_AUTO_UPDATE=True + +# How often to check for new CVEs (in hours) +UPDATE_INTERVAL_HOURS=1 + +# How many days back to fetch CVEs on first run +INITIAL_LOOKBACK_DAYS=365 + +# Cache duration in hours before considering data stale +CACHE_HOURS=24 + + +# ============================================================ +# EXTERNAL API KEYS (Optional but Recommended) +# ============================================================ +# NVD API Key - Get yours at: https://nvd.nist.gov/developers/request-an-api-key +# Without API key: 5 requests per 30 seconds +# With API key: 50 requests per 30 seconds +NVD_API_KEY= + +# GitHub Personal Access Token - Get yours at: https://github.com/settings/tokens +# Increases rate limit from 60 to 5000 requests per hour +GITHUB_TOKEN= + + +# ============================================================ +# API ENDPOINTS (Advanced - Don't change unless necessary) +# ============================================================ +NVD_API_URL=https://services.nvd.nist.gov/rest/json/cves/2.0 +GITHUB_ADVISORIES_URL=https://api.github.com/advisories +NVD_TIMEOUT=30 +GITHUB_TIMEOUT=15 + + +# ============================================================ +# GUNICORN CONFIGURATION (Production) +# ============================================================ +WORKERS=4 +WORKER_TIMEOUT=120 +WORKER_CLASS=sync +MAX_REQUESTS=1000 +MAX_REQUESTS_JITTER=50 + + +# ============================================================ +# SECURITY SETTINGS +# ============================================================ +# Enable/disable security headers +ENABLE_SECURITY_HEADERS=True + +# Enable rate limiting to prevent abuse +ENABLE_RATE_LIMITING=True + +# Rate limit: requests per minute per IP +RATE_LIMIT_PER_MINUTE=60 + +# Enable gzip compression +ENABLE_COMPRESSION=True + +# Enable ETag for caching +ENABLE_ETAG=True + +# Content Security Policy +CSP_DEFAULT_SRC='self' +CSP_SCRIPT_SRC='self' 'unsafe-inline' cdn.jsdelivr.net cdnjs.cloudflare.com +CSP_STYLE_SRC='self' 'unsafe-inline' cdn.jsdelivr.net cdnjs.cloudflare.com +CSP_FONT_SRC='self' cdnjs.cloudflare.com +CSP_IMG_SRC='self' data: +CSP_CONNECT_SRC='self' cdn.jsdelivr.net + +# X-Frame-Options: DENY, SAMEORIGIN, or ALLOW-FROM uri +X_FRAME_OPTIONS=DENY + +# HSTS max age in seconds (1 year = 31536000) +HSTS_MAX_AGE=31536000 + + +# ============================================================ +# FEATURE FLAGS +# ============================================================ +# Enable/disable specific features +ENABLE_CHARTS=True +ENABLE_SEARCH=True +ENABLE_EXPORT=True +ENABLE_DARK_MODE=True +ENABLE_PAGINATION=True + + +# ============================================================ +# UI CONFIGURATION +# ============================================================ +# Items per page in CVE list +ITEMS_PER_PAGE=50 + +# Maximum search results to display +MAX_SEARCH_RESULTS=50 + + +# ============================================================ +# CDN URLS (for offline use, download and host locally) +# ============================================================ +BOOTSTRAP_CSS_CDN=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css +BOOTSTRAP_JS_CDN=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js +FONTAWESOME_CDN=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.5.1/css/all.min.css +CHARTJS_CDN=https://cdn.jsdelivr.net/npm/chart.js@4.4.1/dist/chart.umd.js + + +# ============================================================ +# DOCKER SPECIFIC SETTINGS +# ============================================================ +# Set to True when running inside Docker +IS_DOCKER=False + +# Timezone for Docker container +TZ=Europe/Warsaw + + +# ============================================================ +# MONITORED VENDORS +# ============================================================ +# Comma-separated list of vendor codes to monitor +# Available: microsoft,apple,fortinet,cisco,adobe,oracle,google,linux,vmware,paloalto,docker,kubernetes +MONITORED_VENDORS=microsoft,apple,cisco,fortinet,oracle,google,linux + + +# ============================================================ +# NOTIFICATION SETTINGS (Future Feature) +# ============================================================ +# Discord webhook for critical CVE notifications +DISCORD_WEBHOOK_URL= + +# Slack webhook for notifications +SLACK_WEBHOOK_URL= + +# Email notifications +SMTP_SERVER= +SMTP_PORT=587 +SMTP_USERNAME= +SMTP_PASSWORD= +SMTP_FROM=cve-monitor@example.com +SMTP_TO=admin@example.com + +# Notification thresholds +NOTIFY_ON_CRITICAL=True +NOTIFY_ON_HIGH=False +NOTIFY_MIN_CVSS=9.0 + + +# ============================================================ +# BACKUP CONFIGURATION +# ============================================================ +# Enable automatic database backups +ENABLE_BACKUPS=True + +# Backup directory +BACKUP_DIR=./backups + +# How many backup files to keep +BACKUP_RETENTION_DAYS=30 + +# Backup interval in hours +BACKUP_INTERVAL_HOURS=24 + + +# ============================================================ +# PROMETHEUS METRICS (Future Feature) +# ============================================================ +ENABLE_METRICS=False +METRICS_PORT=9090 + + +# ============================================================ +# DEVELOPMENT SETTINGS +# ============================================================ +# Enable Flask debug toolbar (development only) +FLASK_DEBUG_TOOLBAR=False + +# Enable SQL query logging +SQL_DEBUG=False + +# Disable security features for local development +# WARNING: Never use in production! +DEV_MODE=False + + +# ============================================================ +# HEALTH CHECK +# ============================================================ +# Health check endpoint timeout +HEALTH_CHECK_TIMEOUT=5 + + +# ============================================================ +# CORS SETTINGS (if using as API backend) +# ============================================================ +ENABLE_CORS=False +CORS_ORIGINS=* + + +# Discord Bot Configuration +ENABLE_DISCORD_BOT=True +DISCORD_BOT_TOKEN=YOUR_BOT_TOKEN_HERE +DISCORD_CHANNEL_ID=1234567890123456789 +DISCORD_CHECK_INTERVAL_MINUTES=60 +DISCORD_NOTIFY_CRITICAL=True +DISCORD_NOTIFY_HIGH=True +DISCORD_MIN_CVSS=7.0 + + +# ============================================================ +# NOTES +# ============================================================ +# 1. Boolean values: True/False (case-sensitive) +# 2. Empty values will use defaults from config.py +# 3. Paths can be absolute or relative to project root +# 4. For production, always set DEBUG=False +# 5. Get NVD API key to avoid rate limits +# 6. Use strong CSP in production +# 7. Enable HTTPS in production (handled by reverse proxy) diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..d7e4398 --- /dev/null +++ b/.gitignore @@ -0,0 +1,5 @@ +__pycache__ +logs/ +venv +.env +cve_db diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..d99d493 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,29 @@ +FROM python:3.14-alpine + +WORKDIR /app +RUN apk add --no-cache \ + gcc \ + musl-dev \ + libffi-dev \ + openssl-dev \ + sqlite \ + curl + +COPY requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +COPY . . + +RUN mkdir -p /app/cve_db /app/logs && \ + chmod -R 777 /app/cve_db /app/logs + +COPY docker-entrypoint.sh /docker-entrypoint.sh +RUN chmod +x /docker-entrypoint.sh + +EXPOSE 5000 + +HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \ + CMD curl -f http://localhost:5000/health || exit 1 + +ENTRYPOINT ["/docker-entrypoint.sh"] +CMD ["app"] diff --git a/api.py b/api.py new file mode 100644 index 0000000..f767e64 --- /dev/null +++ b/api.py @@ -0,0 +1,252 @@ +from flask import Blueprint, jsonify, request, make_response +import logging +import csv +from io import StringIO +from datetime import datetime +import config +from cve_handler import CVEHandler + +logger = logging.getLogger(__name__) +api_bp = Blueprint('api', __name__) +cve_handler = CVEHandler() + +def api_response(data: any, status: int = 200) -> tuple: + return jsonify(data), status + +def api_error(message: str, status: int = 400) -> tuple: + return jsonify({ + 'error': message, + 'timestamp': datetime.utcnow().isoformat() + }), status + +@api_bp.route('/vendors', methods=['GET']) +def get_vendors(): + try: + summary = cve_handler.get_all_vendors_summary() + return api_response({ + 'vendors': summary, + 'total_vendors': len(summary), + 'timestamp': datetime.utcnow().isoformat() + }) + except Exception as e: + logger.error(f"Error in get_vendors: {e}", exc_info=True) + return api_error(str(e), 500) + +@api_bp.route('/cve/', methods=['GET']) +def get_vendor_cves(vendor_code: str): + try: + vendor = next((v for v in config.VENDORS if v['code'] == vendor_code), None) + if not vendor: + return api_error(f"Unknown vendor: {vendor_code}", 404) + + try: + limit = min( + int(request.args.get('limit', config.ITEMS_PER_PAGE)), + config.MAX_ITEMS_PER_PAGE + ) + offset = max(int(request.args.get('offset', 0)), 0) + except ValueError: + return api_error("Invalid pagination parameters", 400) + + cves = cve_handler.get_vendor_cves( + vendor_code, + limit=limit, + offset=offset + ) + + return api_response({ + 'vendor': { + 'code': vendor_code, + 'name': vendor['name'] + }, + 'cves': cves, + 'count': len(cves), + 'limit': limit, + 'offset': offset, + 'timestamp': datetime.utcnow().isoformat() + }) + + except Exception as e: + logger.error(f"Error in get_vendor_cves: {e}", exc_info=True) + return api_error(str(e), 500) + +@api_bp.route('/stats/', methods=['GET']) +def get_vendor_stats(vendor_code: str): + try: + vendor = next((v for v in config.VENDORS if v['code'] == vendor_code), None) + if not vendor: + return api_error(f"Unknown vendor: {vendor_code}", 404) + + stats = cve_handler.get_vendor_stats(vendor_code) + + return api_response({ + 'vendor': { + 'code': vendor_code, + 'name': vendor['name'] + }, + 'stats': stats, + 'timestamp': datetime.utcnow().isoformat() + }) + + except Exception as e: + logger.error(f"Error in get_vendor_stats: {e}", exc_info=True) + return api_error(str(e), 500) + +@api_bp.route('/cve//filter', methods=['GET']) +def filter_vendor_cves(vendor_code: str): + try: + vendor = next((v for v in config.VENDORS if v['code'] == vendor_code), None) + if not vendor: + return api_error(f"Unknown vendor: {vendor_code}", 404) + + severity = request.args.get('severity', '').upper() + year = request.args.get('year', type=int) + + try: + limit = min( + int(request.args.get('limit', config.ITEMS_PER_PAGE)), + config.MAX_ITEMS_PER_PAGE + ) + offset = max(int(request.args.get('offset', 0)), 0) + except ValueError: + return api_error("Invalid pagination parameters", 400) + + if severity and severity not in ['CRITICAL', 'HIGH', 'MEDIUM', 'LOW']: + return api_error(f"Invalid severity: {severity}", 400) + + cves = cve_handler.get_vendor_cves( + vendor_code, + limit=limit, + offset=offset, + severity=severity if severity else None, + year=year + ) + + return api_response({ + 'vendor': { + 'code': vendor_code, + 'name': vendor['name'] + }, + 'filters': { + 'severity': severity, + 'year': year + }, + 'cves': cves, + 'count': len(cves), + 'timestamp': datetime.utcnow().isoformat() + }) + + except Exception as e: + logger.error(f"Error in filter_vendor_cves: {e}", exc_info=True) + return api_error(str(e), 500) + +@api_bp.route('/search', methods=['GET']) +def search_cves(): + if not config.ENABLE_SEARCH: + return api_error("Search is disabled", 403) + + try: + query = request.args.get('q', '').strip() + if not query: + return api_error("Missing search query", 400) + + if len(query) < 3: + return api_error("Query too short (min 3 characters)", 400) + + limit = min( + int(request.args.get('limit', config.ITEMS_PER_PAGE)), + config.MAX_ITEMS_PER_PAGE + ) + + results = cve_handler.search_cves(query, limit=limit) + + return api_response({ + 'query': query, + 'results': results, + 'count': len(results), + 'timestamp': datetime.utcnow().isoformat() + }) + + except Exception as e: + logger.error(f"Error in search_cves: {e}", exc_info=True) + return api_error(str(e), 500) + +@api_bp.route('/update/', methods=['POST']) +def trigger_update(vendor_code: str): + try: + vendor = next((v for v in config.VENDORS if v['code'] == vendor_code), None) + if not vendor: + return api_error(f"Unknown vendor: {vendor_code}", 404) + + logger.info(f"Manual update triggered for {vendor_code}") + success = cve_handler.update_vendor_cache(vendor_code, force=True) + + if success: + return api_response({ + 'message': f'Update triggered for {vendor_code}', + 'vendor': vendor['name'], + 'status': 'success', + 'timestamp': datetime.utcnow().isoformat() + }) + else: + return api_error('Update failed', 500) + + except Exception as e: + logger.error(f"Error in trigger_update: {e}", exc_info=True) + return api_error(str(e), 500) + +@api_bp.route('/export//', methods=['GET']) +def export_cves(vendor_code: str, format: str): + if not config.ENABLE_EXPORT: + return api_error("Export is disabled", 403) + + try: + if format not in config.EXPORT_FORMATS: + return api_error(f"Invalid format: {format}. Supported: {', '.join(config.EXPORT_FORMATS)}", 400) + + vendor = next((v for v in config.VENDORS if v['code'] == vendor_code), None) + if not vendor: + return api_error(f"Unknown vendor: {vendor_code}", 404) + + cves = cve_handler.get_vendor_cves( + vendor_code, + limit=config.EXPORT_MAX_ITEMS + ) + + if format == 'json': + return api_response({ + 'vendor': { + 'code': vendor_code, + 'name': vendor['name'] + }, + 'export_date': datetime.utcnow().isoformat(), + 'cve_count': len(cves), + 'cves': cves + }) + + elif format == 'csv': + output = StringIO() + writer = csv.DictWriter(output, fieldnames=[ + 'cve_id', 'severity', 'cvss_score', 'published_date', + 'last_modified', 'description' + ]) + writer.writeheader() + + for cve in cves: + writer.writerow({ + 'cve_id': cve.get('cve_id', ''), + 'severity': cve.get('severity', ''), + 'cvss_score': cve.get('cvss_score', ''), + 'published_date': cve.get('published_date', ''), + 'last_modified': cve.get('last_modified', ''), + 'description': (cve.get('description', '') or '')[:500] + }) + + response = make_response(output.getvalue()) + response.headers['Content-Type'] = 'text/csv; charset=utf-8' + response.headers['Content-Disposition'] = f'attachment; filename={vendor_code}_cves_{datetime.utcnow().strftime("%Y%m%d")}.csv' + return response + + except Exception as e: + logger.error(f"Error in export_cves: {e}", exc_info=True) + return api_error(str(e), 500) \ No newline at end of file diff --git a/app.py b/app.py new file mode 100644 index 0000000..8751d63 --- /dev/null +++ b/app.py @@ -0,0 +1,303 @@ +#!/usr/bin/env python3 + +from flask import Flask, render_template, make_response, request, jsonify +import gzip +import hashlib +import logging +from functools import wraps +from datetime import datetime +import threading +import time +import os +from logging.handlers import RotatingFileHandler +from dotenv import load_dotenv +load_dotenv() + +import config +from cve_handler import CVEHandler, update_all_vendors +import api + +def setup_logging(): + log_level = getattr(logging, config.LOG_LEVEL.upper(), logging.INFO) + + formatter = logging.Formatter(config.LOG_FORMAT) + root_logger = logging.getLogger() + root_logger.handlers.clear() + root_logger.setLevel(log_level) + + console_handler = logging.StreamHandler() + console_handler.setFormatter(formatter) + console_handler.setLevel(log_level) + root_logger.addHandler(console_handler) + + if config.LOG_FILE: + os.makedirs(os.path.dirname(config.LOG_FILE), exist_ok=True) + file_handler = RotatingFileHandler( + config.LOG_FILE, + maxBytes=config.LOG_MAX_BYTES, + backupCount=config.LOG_BACKUP_COUNT + ) + file_handler.setFormatter(formatter) + file_handler.setLevel(log_level) + root_logger.addHandler(file_handler) + + logging.getLogger('werkzeug').handlers.clear() + logging.getLogger('werkzeug').setLevel(logging.WARNING if not config.DEBUG else logging.INFO) + logging.getLogger('urllib3').setLevel(logging.WARNING) + logging.getLogger('requests').setLevel(logging.WARNING) + + +setup_logging() +logger = logging.getLogger(__name__) + +app = Flask(__name__) +app.config['JSON_SORT_KEYS'] = False + +if not config.DEBUG: + import logging as flask_logging + flask_logging.getLogger('werkzeug').disabled = True + +app.register_blueprint(api.api_bp, url_prefix='/api') +cve_handler = CVEHandler() + +@app.context_processor +def inject_config(): + return {'config': config} + +def add_security_headers(response): + if config.ENABLE_SECURITY_HEADERS: + for header, value in config.SECURITY_HEADERS.items(): + response.headers[header] = value + return response + +def gzip_response(f): + @wraps(f) + def decorated_function(*args, **kwargs): + if not config.ENABLE_COMPRESSION: + return f(*args, **kwargs) + + response = make_response(f(*args, **kwargs)) + + accept_encoding = request.headers.get('Accept-Encoding', '') + + if 'gzip' not in accept_encoding.lower(): + return response + + if response.status_code < 200 or response.status_code >= 300: + return response + + if response.headers.get('Content-Encoding'): + return response + + gzip_buffer = gzip.compress(response.get_data()) + + response.set_data(gzip_buffer) + response.headers['Content-Encoding'] = 'gzip' + response.headers['Content-Length'] = len(gzip_buffer) + response.headers['Vary'] = 'Accept-Encoding' + + return response + return decorated_function + +def etag_support(f): + @wraps(f) + def decorated_function(*args, **kwargs): + if not config.ENABLE_ETAG: + return f(*args, **kwargs) + + response = make_response(f(*args, **kwargs)) + + content = response.get_data() + etag = hashlib.md5(content).hexdigest() + + response.headers['ETag'] = f'"{etag}"' + response.headers['Cache-Control'] = f'public, max-age={config.CACHE_HOURS * 3600}' + + if_none_match = request.headers.get('If-None-Match') + if if_none_match and if_none_match.strip('"') == etag: + return make_response('', 304) + + return response + + return decorated_function + +@app.route('/') +@gzip_response +@etag_support +def index(): + return render_template('index.html', vendors=config.VENDORS) + +@app.route('/health') +def health(): + try: + with cve_handler.get_db_connection() as conn: + cursor = conn.cursor() + cursor.execute("SELECT COUNT(*) as count FROM cve_cache") + cve_count = cursor.fetchone()['count'] + + cursor.execute("SELECT COUNT(*) as count FROM cve_metadata") + vendors_count = cursor.fetchone()['count'] + + health_data = { + 'status': 'healthy', + 'timestamp': datetime.utcnow().isoformat(), + 'version': config.APP_VERSION, + 'database': { + 'status': 'connected', + 'cve_count': cve_count, + 'vendors_tracked': vendors_count + }, + 'features': { + 'auto_update': config.ENABLE_AUTO_UPDATE, + 'discord_bot': config.DISCORD_BOT_ENABLED, + 'charts': config.ENABLE_CHARTS, + 'search': config.ENABLE_SEARCH, + 'export': config.ENABLE_EXPORT + } + } + + return jsonify(health_data), 200 + + except Exception as e: + logger.error(f"Health check failed: {e}") + return jsonify({ + 'status': 'unhealthy', + 'timestamp': datetime.utcnow().isoformat(), + 'error': str(e) + }), 503 + +@app.route('/favicon.ico') +def favicon(): + return '', 204 + +@app.route('/version') +def version(): + return jsonify({ + 'app_name': config.APP_NAME, + 'version': config.APP_VERSION, + 'python_version': '3.14', + 'flask_version': '3.0.2' + }) + +@app.errorhandler(404) +def not_found(error): + if request.path.startswith('/api/'): + return jsonify({'error': 'Endpoint not found'}), 404 + return render_template('404.html'), 404 + +@app.errorhandler(500) +def internal_error(error): + logger.error(f"Internal error: {error}") + if request.path.startswith('/api/'): + return jsonify({'error': 'Internal server error'}), 500 + return render_template('500.html'), 500 + +@app.after_request +def after_request(response): + response = add_security_headers(response) + + if config.DEBUG and logger.isEnabledFor(logging.DEBUG): + logger.debug( + f"{request.method} {request.path} - " + f"{response.status_code} - {request.remote_addr}" + ) + + return response + +def background_update_task(): + logger.info("Background update task started") + time.sleep(60) + + while True: + try: + if config.ENABLE_AUTO_UPDATE: + logger.info("Running scheduled CVE update...") + update_all_vendors() + logger.info("Scheduled update completed successfully") + else: + logger.debug("Auto-update disabled, skipping") + except Exception as e: + logger.error(f"Error in background update: {e}", exc_info=True) + + sleep_seconds = config.UPDATE_INTERVAL_HOURS * 3600 + logger.info(f"Next update in {config.UPDATE_INTERVAL_HOURS} hours") + time.sleep(sleep_seconds) + + +def start_background_tasks(): + if config.ENABLE_AUTO_UPDATE: + update_thread = threading.Thread( + target=background_update_task, + daemon=True, + name="CVE-Update-Thread" + ) + update_thread.start() + logger.info("Auto-update enabled: background task started") + else: + logger.info("Auto-update disabled") + + if config.DISCORD_BOT_ENABLED: + try: + from discord_bot import start_discord_bot + discord_thread = threading.Thread( + target=start_discord_bot, + daemon=True, + name="Discord-Bot-Thread" + ) + discord_thread.start() + logger.info("Discord bot started") + except ImportError: + logger.warning("discord.py not installed, Discord bot disabled") + except Exception as e: + logger.error(f"Failed to start Discord bot: {e}", exc_info=True) + +def initialize_app(): + logger.info(f"{'='*60}") + logger.info(f"{config.APP_NAME} v{config.APP_VERSION}") + logger.info(f"{'='*60}") + logger.info(f"Environment: {'DEBUG' if config.DEBUG else 'PRODUCTION'}") + logger.info(f"Database: {config.DATABASE_PATH}") + logger.info(f"Update interval: {config.UPDATE_INTERVAL_HOURS}h") + logger.info(f"Features:") + logger.info(f" - Security headers: {config.ENABLE_SECURITY_HEADERS}") + logger.info(f" - Compression: {config.ENABLE_COMPRESSION}") + logger.info(f" - Auto-update: {config.ENABLE_AUTO_UPDATE}") + logger.info(f" - Discord bot: {config.DISCORD_BOT_ENABLED}") + logger.info(f" - Charts: {config.ENABLE_CHARTS}") + logger.info(f" - Search: {config.ENABLE_SEARCH}") + logger.info(f" - Export: {config.ENABLE_EXPORT}") + + if os.path.exists(config.DATABASE_PATH): + try: + with cve_handler.get_db_connection() as conn: + cursor = conn.cursor() + cursor.execute("SELECT COUNT(*) as count FROM cve_cache") + count = cursor.fetchone()['count'] + logger.info(f"Database contains {count} CVEs") + except Exception as e: + logger.warning(f"Could not read database: {e}") + else: + logger.info("Database will be created on first update") + + logger.info(f"{'='*60}") + start_background_tasks() + +_initialized = False + +if __name__ == '__main__': + if not _initialized: + initialize_app() + _initialized = True + + logger.info(f"Starting Flask development server on {config.HOST}:{config.PORT}") + app.run( + host=config.HOST, + port=config.PORT, + debug=config.DEBUG, + threaded=True, + use_reloader=False + ) +else: + if not _initialized: + initialize_app() + _initialized = True \ No newline at end of file diff --git a/config.py b/config.py new file mode 100644 index 0000000..ef9bfd6 --- /dev/null +++ b/config.py @@ -0,0 +1,478 @@ +import os +from typing import List, Dict + +try: + from dotenv import load_dotenv + load_dotenv() +except ImportError: + pass + +def get_bool_env(key: str, default: bool = False) -> bool: + return os.getenv(key, str(default)).lower() in ('true', '1', 'yes') + +def get_int_env(key: str, default: int) -> int: + try: + return int(os.getenv(key, str(default))) + except ValueError: + return default + +def _is_docker(): + return os.path.exists('/.dockerenv') or os.path.exists('/run/.containerenv') + +IS_DOCKER = _is_docker() + +# ============================================================ +# VENDORS CONFIGURATION +# ============================================================ +VENDORS: List[Dict[str, any]] = [ + { + 'code': 'microsoft', + 'name': 'Microsoft', + 'cpe_vendor': 'microsoft', + 'use_cpe': True, + 'require_cvss': True, + 'keywords': [ + 'microsoft', 'windows', 'office', 'azure', 'exchange', 'sharepoint', + 'ms-', 'msft', 'outlook', 'teams', 'edge', 'internet explorer', 'ie', + 'sql server', 'visual studio', 'dotnet', '.net', 'iis', 'hyper-v', + 'active directory', 'powershell', 'windows server', 'defender', + 'onedrive', 'dynamics', 'skype', 'surface', 'xbox' + ], + 'icon': 'fa-windows' + }, + { + 'code': 'apple', + 'name': 'Apple', + 'cpe_vendor': 'apple', + 'use_cpe': True, + 'require_cvss': True, + 'keywords': [ + 'apple', 'macos', 'ios', 'ipados', 'safari', 'webkit', + 'iphone', 'ipad', 'mac os', 'watchos', 'tvos', + 'xcode', 'swift', 'darwin', 'core foundation' + ], + 'icon': 'fa-apple' + }, + { + 'code': 'fortinet', + 'name': 'Fortinet', + 'cpe_vendor': 'fortinet', + 'use_cpe': True, + 'require_cvss': True, + 'keywords': [ + 'fortinet', 'fortigate', 'fortios', 'fortianalyzer', 'fortimanager', + 'fortiweb', 'fortimail', 'fortisandbox', 'forticlient', 'fortiadc', + 'fortiap', 'fortiswitch', 'fortiwan', 'fortiddos', 'fortiextender' + ], + 'icon': 'fa-shield-halved' + }, + { + 'code': 'cisco', + 'name': 'Cisco', + 'cpe_vendor': 'cisco', + 'use_cpe': True, + 'require_cvss': True, + 'keywords': [ + 'cisco', 'ios', 'nx-os', 'asa', 'webex', 'firepower', + 'ios xe', 'ios xr', 'nexus', 'catalyst', 'meraki', + 'duo', 'umbrella', 'anyconnect', 'jabber', 'telepresence' + ], + 'icon': 'fa-network-wired' + }, + { + 'code': 'oracle', + 'name': 'Oracle', + 'cpe_vendor': 'oracle', + 'use_cpe': True, + 'require_cvss': True, + 'keywords': [ + 'oracle', 'java', 'jdk', 'jre', 'mysql', 'weblogic', 'solaris', + 'virtualbox', 'glassfish', 'peoplesoft', + 'siebel', 'fusion', 'coherence', 'primavera' + ], + 'icon': 'fa-database' + }, + { + 'code': 'google', + 'name': 'Google', + 'cpe_vendor': 'google', + 'use_cpe': True, + 'require_cvss': True, + 'keywords': [ + 'google', 'chrome', 'android', 'chromium', 'chromeos', + 'pixel', 'nest', 'workspace', 'cloud platform', 'gcp', + 'firebase', 'tensorflow', 'kubernetes', 'golang' + ], + 'icon': 'fa-google' + }, + { + 'code': 'linux', + 'name': 'Linux Kernel', + 'cpe_vendor': 'linux', + 'cpe_product': 'linux_kernel', + 'use_cpe': True, + 'strict_matching': True, + 'require_cvss': True, + 'keywords': [ + 'in the linux kernel', + 'linux kernel vulnerability' + ], + 'exclude_keywords': [ + 'android', 'solaredge', 'solar edge', 'inverter', + 'router', 'camera', 'nas device', 'synology', 'qnap', + 'netgear', 'tp-link', 'asus router', 'd-link', 'linksys', + 'firmware update', 'embedded system', 'iot', + 'smart tv', 'television', 'printer' + ], + 'icon': 'fa-linux' + }, + { + 'code': 'vmware', + 'name': 'VMware', + 'cpe_vendor': 'vmware', + 'use_cpe': True, + 'require_cvss': True, + 'keywords': [ + 'vmware', 'vsphere', 'esxi', 'vcenter', 'workstation', + 'fusion', 'horizon', 'nsx', 'vsan', 'vrealize', + 'tanzu', 'aria', 'carbon black', 'workspace one' + ], + 'icon': 'fa-server' + }, + { + 'code': 'paloalto', + 'name': 'Palo Alto Networks', + 'cpe_vendor': 'paloaltonetworks', + 'use_cpe': True, + 'require_cvss': True, + 'keywords': [ + 'palo alto', 'pan-os', 'panorama', 'globalprotect', + 'palo alto networks', 'wildfire', 'cortex', 'prisma', + 'expedition', 'traps' + ], + 'icon': 'fa-fire' + }, + { + 'code': 'mikrotik', + 'name': 'MikroTik', + 'cpe_vendor': 'mikrotik', + 'use_cpe': True, + 'require_cvss': True, + 'keywords': [ + 'mikrotik', 'routeros', 'routerboard', 'winbox', + 'capsman', 'the dude', 'user manager', 'swos', + 'rb', 'crs', 'ccr', 'hap', 'hex', 'groove' + ], + 'icon': 'fa-router' + }, + { + 'code': 'proxmox', + 'name': 'Proxmox', + 'cpe_vendor': 'proxmox', + 'use_cpe': True, + 'require_cvss': True, + 'keywords': [ + 'proxmox', 'proxmox ve', 'proxmox virtual environment', + 'pve', 'proxmox backup server', 'pbs', 'qemu-server', + 'pve-manager', 'corosync', 'ceph proxmox' + ], + 'icon': 'fa-server' + }, + { + 'code': 'openssl', + 'name': 'OpenSSL', + 'cpe_vendor': 'openssl', + 'cpe_product': 'openssl', + 'use_cpe': True, + 'require_cvss': True, + 'keywords': [ + 'openssl', 'openssl project', 'libssl', 'libcrypto', + 'openssl library', 'tls implementation', 'ssl library' + ], + 'icon': 'fa-lock' + }, + { + 'code': 'php', + 'name': 'PHP', + 'cpe_vendor': 'php', + 'cpe_product': 'php', + 'use_cpe': True, + 'require_cvss': True, + 'keywords': [ + 'php', 'php group', 'php language', 'php interpreter', + 'php-fpm', 'php core', 'zend engine', 'php runtime' + ], + 'icon': 'fa-code' + }, + { + 'code': 'wordpress', + 'name': 'WordPress', + 'cpe_vendor': 'wordpress', + 'cpe_product': 'wordpress', + 'use_cpe': True, + 'require_cvss': True, + 'keywords': [ + 'wordpress', 'wordpress core', 'wp-admin', 'wp-content', + 'wordpress cms', 'automattic', 'woocommerce core', + 'wordpress multisite', 'gutenberg' + ], + 'exclude_keywords': [ + 'plugin', 'theme', 'elementor', 'yoast', 'jetpack', + 'contact form 7', 'akismet', 'wordfence' + ], + 'icon': 'fa-wordpress' + }, + { + 'code': 'f5', + 'name': 'F5 Networks', + 'cpe_vendor': 'f5', + 'use_cpe': True, + 'require_cvss': True, + 'keywords': [ + 'f5 networks', 'big-ip', 'bigip', 'f5 big-ip', 'tmos', + 'nginx plus', 'f5 nginx', 'traffix', 'nginx controller', + 'nginx ingress', 'f5os', 'icontrol', 'asm', 'afm', 'apm' + ], + 'icon': 'fa-network-wired' + }, + { + 'code': 'nginx', + 'name': 'NGINX (OSS)', + 'cpe_vendor': 'f5', + 'cpe_product': 'nginx', + 'use_cpe': True, + 'require_cvss': True, + 'keywords': [ + 'nginx', 'nginx web server', 'nginx http server', + 'nginx open source', 'nginx reverse proxy', + 'nginx inc', 'nginx software' + ], + 'exclude_keywords': [ + 'nginx plus', 'nginx controller', 'f5 nginx' + ], + 'icon': 'fa-server' + }, +] + +# ============================================================ +# API SOURCES CONFIGURATION +# ============================================================ +NVD_API_URL = os.getenv('NVD_API_URL', 'https://services.nvd.nist.gov/rest/json/cves/2.0') +NVD_API_KEY = os.getenv('NVD_API_KEY', '') +NVD_RATE_LIMIT = get_int_env('NVD_RATE_LIMIT', 5) +NVD_TIMEOUT = get_int_env('NVD_TIMEOUT', 30) + +GITHUB_API_URL = os.getenv('GITHUB_API_URL', 'https://api.github.com/advisories') +GITHUB_TOKEN = os.getenv('GITHUB_TOKEN', '') +GITHUB_RATE_LIMIT = get_int_env('GITHUB_RATE_LIMIT', 60) +GITHUB_TIMEOUT = get_int_env('GITHUB_TIMEOUT', 15) + +CVE_SOURCES = { + 'nvd': { + 'url': NVD_API_URL, + 'rate_limit': NVD_RATE_LIMIT, + 'timeout': NVD_TIMEOUT + }, + 'github': { + 'url': GITHUB_API_URL, + 'rate_limit': GITHUB_RATE_LIMIT, + 'timeout': GITHUB_TIMEOUT + } +} + +# ============================================================ +# DATABASE CONFIGURATION +# ============================================================ +if IS_DOCKER: + DEFAULT_DB_PATH = '/app/cve_db/cve_cache.db' +else: + DEFAULT_DB_PATH = './cve_db/cve_cache.db' + +DATABASE_PATH = os.getenv('DATABASE_PATH', DEFAULT_DB_PATH) +DATABASE_WAL_MODE = get_bool_env('DATABASE_WAL_MODE', True) +DATABASE_CACHE_SIZE = get_int_env('DATABASE_CACHE_SIZE', 64000) + +# ============================================================ +# CACHE SETTINGS +# ============================================================ +CACHE_HOURS = get_int_env('CACHE_HOURS', 24) +UPDATE_INTERVAL_HOURS = get_int_env('UPDATE_INTERVAL_HOURS', 6) +INITIAL_LOOKBACK_DAYS = get_int_env('INITIAL_LOOKBACK_DAYS', 365) + +# ============================================================ +# APPLICATION SETTINGS +# ============================================================ +DEBUG = get_bool_env('DEBUG', False) +HOST = os.getenv('HOST', '0.0.0.0') +PORT = get_int_env('PORT', 5000) +WORKERS = get_int_env('WORKERS', 4) +WORKER_TIMEOUT = get_int_env('WORKER_TIMEOUT', 120) + +APP_NAME = os.getenv('APP_NAME', 'CVE Monitor') +APP_VERSION = os.getenv('APP_VERSION', '1.0.0') + +# ============================================================ +# SECURITY SETTINGS +# ============================================================ +ENABLE_SECURITY_HEADERS = get_bool_env('ENABLE_SECURITY_HEADERS', not IS_DOCKER) +ENABLE_RATE_LIMITING = get_bool_env('ENABLE_RATE_LIMITING', True) +ENABLE_COMPRESSION = get_bool_env('ENABLE_COMPRESSION', True) +ENABLE_ETAG = get_bool_env('ENABLE_ETAG', True) + +CSP_DEFAULT_SRC = os.getenv('CSP_DEFAULT_SRC', "'self'") +CSP_SCRIPT_SRC = os.getenv('CSP_SCRIPT_SRC', "'self' 'unsafe-inline' cdn.jsdelivr.net cdnjs.cloudflare.com") +CSP_STYLE_SRC = os.getenv('CSP_STYLE_SRC', "'self' 'unsafe-inline' cdn.jsdelivr.net cdnjs.cloudflare.com") +CSP_FONT_SRC = os.getenv('CSP_FONT_SRC', "'self' cdnjs.cloudflare.com") +CSP_IMG_SRC = os.getenv('CSP_IMG_SRC', "'self' data:") +CSP_CONNECT_SRC = os.getenv('CSP_CONNECT_SRC', "'self' cdn.jsdelivr.net") + +SECURITY_HEADERS = { + 'Content-Security-Policy': f"default-src {CSP_DEFAULT_SRC}; script-src {CSP_SCRIPT_SRC}; style-src {CSP_STYLE_SRC}; font-src {CSP_FONT_SRC}; img-src {CSP_IMG_SRC}; connect-src {CSP_CONNECT_SRC};", + 'X-Content-Type-Options': 'nosniff', + 'X-Frame-Options': os.getenv('X_FRAME_OPTIONS', 'DENY'), + 'X-XSS-Protection': '1; mode=block', + 'Strict-Transport-Security': f"max-age={get_int_env('HSTS_MAX_AGE', 31536000)}; includeSubDomains" +} + +# ============================================================ +# DISCORD BOT CONFIGURATION +# ============================================================ +ENABLE_DISCORD_BOT = get_bool_env('ENABLE_DISCORD_BOT', False) +DISCORD_BOT_TOKEN = os.getenv('DISCORD_BOT_TOKEN', '') +DISCORD_CHANNEL_ID = os.getenv('DISCORD_CHANNEL_ID', '') +DISCORD_CHECK_INTERVAL_MINUTES = get_int_env('DISCORD_CHECK_INTERVAL_MINUTES', 60) +DISCORD_NOTIFY_CRITICAL = get_bool_env('DISCORD_NOTIFY_CRITICAL', True) +DISCORD_NOTIFY_HIGH = get_bool_env('DISCORD_NOTIFY_HIGH', True) +DISCORD_MIN_CVSS = float(os.getenv('DISCORD_MIN_CVSS', '7.0')) +DISCORD_BOT_ENABLED = ENABLE_DISCORD_BOT +DISCORD_UPDATE_INTERVAL = DISCORD_CHECK_INTERVAL_MINUTES * 60 # Convert to seconds +DISCORD_CVE_LOOKBACK_HOURS = DISCORD_CHECK_INTERVAL_MINUTES / 60 +DISCORD_MIN_SEVERITY = os.getenv('DISCORD_MIN_SEVERITY', 'HIGH') + +# ============================================================ +# VALIDATION +# ============================================================ +def validate_config(): + """Walidacja krytycznych ustawień""" + errors = [] + + if ENABLE_DISCORD_BOT: + if not DISCORD_BOT_TOKEN: + errors.append("ENABLE_DISCORD_BOT=True but DISCORD_BOT_TOKEN is empty") + if not DISCORD_CHANNEL_ID: + errors.append("ENABLE_DISCORD_BOT=True but DISCORD_CHANNEL_ID is empty") + try: + int(DISCORD_CHANNEL_ID) + except ValueError: + errors.append("DISCORD_CHANNEL_ID must be a valid integer") + + if CACHE_HOURS < 1: + errors.append("CACHE_HOURS must be at least 1") + + if UPDATE_INTERVAL_HOURS < 1: + errors.append("UPDATE_INTERVAL_HOURS must be at least 1") + + if WORKERS < 1: + errors.append("WORKERS must be at least 1") + + if errors: + raise ValueError("Configuration errors:\n" + "\n".join(errors)) + +# Run validation only if Discord bot enabled +if ENABLE_DISCORD_BOT: + validate_config() + +# ============================================================ +# PAGINATION SETTINGS +# ============================================================ +ITEMS_PER_PAGE = get_int_env('ITEMS_PER_PAGE', 50) +MAX_ITEMS_PER_PAGE = get_int_env('MAX_ITEMS_PER_PAGE', 200) + +# ============================================================ +# EXPORT SETTINGS +# ============================================================ +EXPORT_FORMATS = os.getenv('EXPORT_FORMATS', 'json,csv').split(',') +EXPORT_MAX_ITEMS = get_int_env('EXPORT_MAX_ITEMS', 1000) + +# ============================================================ +# LOGGING CONFIGURATION +# ============================================================ +LOG_LEVEL = os.getenv('LOG_LEVEL', 'DEBUG' if not IS_DOCKER else 'INFO') +LOG_FORMAT = os.getenv('LOG_FORMAT', '%(asctime)s - %(name)s - %(levelname)s - %(message)s') +LOG_FILE = os.getenv('LOG_FILE', '') +LOG_MAX_BYTES = get_int_env('LOG_MAX_BYTES', 10485760) +LOG_BACKUP_COUNT = get_int_env('LOG_BACKUP_COUNT', 5) + +# ============================================================ +# SEVERITY CONFIGURATION +# ============================================================ +SEVERITY_LEVELS = { + 'CRITICAL': { + 'color': 'danger', + 'icon': 'fa-skull-crossbones', + 'min_cvss': 9.0, + 'badge_class': 'badge-critical' + }, + 'HIGH': { + 'color': 'warning', + 'icon': 'fa-exclamation-triangle', + 'min_cvss': 7.0, + 'badge_class': 'badge-high' + }, + 'MEDIUM': { + 'color': 'info', + 'icon': 'fa-exclamation-circle', + 'min_cvss': 4.0, + 'badge_class': 'badge-medium' + }, + 'LOW': { + 'color': 'secondary', + 'icon': 'fa-info-circle', + 'min_cvss': 0.0, + 'badge_class': 'badge-low' + } +} + +# ============================================================ +# CDN CONFIGURATION +# ============================================================ +BOOTSTRAP_CSS_CDN = os.getenv('BOOTSTRAP_CSS_CDN', 'https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css') +BOOTSTRAP_JS_CDN = os.getenv('BOOTSTRAP_JS_CDN', 'https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js') +FONTAWESOME_CDN = os.getenv('FONTAWESOME_CDN', 'https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.5.1/css/all.min.css') +CHARTJS_CDN = os.getenv('CHARTJS_CDN', 'https://cdn.jsdelivr.net/npm/chart.js@4.4.1/dist/chart.umd.min.js') + +# ============================================================ +# FEATURE FLAGS +# ============================================================ +ENABLE_AUTO_UPDATE = get_bool_env('ENABLE_AUTO_UPDATE', True) +ENABLE_CHARTS = get_bool_env('ENABLE_CHARTS', True) +ENABLE_SEARCH = get_bool_env('ENABLE_SEARCH', True) +ENABLE_EXPORT = get_bool_env('ENABLE_EXPORT', True) +ENABLE_DARK_MODE = get_bool_env('ENABLE_DARK_MODE', True) + +# ============================================================ +# VALIDATION +# ============================================================ +def validate_config(): + errors = [] + + if DISCORD_BOT_ENABLED: + if not DISCORD_BOT_TOKEN: + errors.append("DISCORD_BOT_ENABLED=True but DISCORD_BOT_TOKEN is empty") + if not DISCORD_CHANNEL_ID: + errors.append("DISCORD_BOT_ENABLED=True but DISCORD_CHANNEL_ID is empty") + + if CACHE_HOURS < 1: + errors.append("CACHE_HOURS must be at least 1") + + if UPDATE_INTERVAL_HOURS < 1: + errors.append("UPDATE_INTERVAL_HOURS must be at least 1") + + if WORKERS < 1: + errors.append("WORKERS must be at least 1") + + if errors: + raise ValueError("Configuration errors:\n" + "\n".join(errors)) + +# Run validation +validate_config() diff --git a/cve_handler.py b/cve_handler.py new file mode 100644 index 0000000..1146482 --- /dev/null +++ b/cve_handler.py @@ -0,0 +1,788 @@ +import sqlite3 +import json +import logging +import time +import os +from datetime import datetime, timedelta +from typing import List, Dict, Optional +from contextlib import contextmanager +import requests +from requests.adapters import HTTPAdapter +from urllib3.util.retry import Retry +import config + +logging.basicConfig(level=config.LOG_LEVEL, format=config.LOG_FORMAT) +logger = logging.getLogger(__name__) + +class CVEHandler: + + def __init__(self, db_path: str = None): + self.db_path = db_path or config.DATABASE_PATH + + db_dir = os.path.dirname(self.db_path) + if db_dir and not os.path.exists(db_dir): + os.makedirs(db_dir, exist_ok=True) + logger.info(f"Created database directory: {db_dir}") + + self._init_database() + self.session = self._create_session() + + def _create_session(self) -> requests.Session: + session = requests.Session() + retry_strategy = Retry( + total=3, + backoff_factor=1, + status_forcelist=[429, 500, 502, 503, 504], + allowed_methods=["GET"] + ) + adapter = HTTPAdapter(max_retries=retry_strategy) + session.mount("http://", adapter) + session.mount("https://", adapter) + return session + + @contextmanager + def get_db_connection(self): + conn = sqlite3.connect(self.db_path) + conn.row_factory = sqlite3.Row + if config.DATABASE_WAL_MODE: + conn.execute("PRAGMA journal_mode=WAL") + conn.execute("PRAGMA synchronous=NORMAL") + conn.execute(f"PRAGMA cache_size=-{config.DATABASE_CACHE_SIZE}") + try: + yield conn + conn.commit() + except Exception as e: + conn.rollback() + logger.error(f"Database error: {e}") + raise + finally: + conn.close() + + def _init_database(self): + with self.get_db_connection() as conn: + cursor = conn.cursor() + + cursor.execute(""" + CREATE TABLE IF NOT EXISTS cve_cache ( + cve_id TEXT PRIMARY KEY, + vendor_code TEXT NOT NULL, + description TEXT, + published_date TEXT, + last_modified TEXT, + cvss_score REAL, + cvss_vector TEXT, + severity TEXT, + refs TEXT, + cwe_ids TEXT, + affected_products TEXT, + raw_data TEXT, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP + ) + """) + + cursor.execute(""" + CREATE TABLE IF NOT EXISTS cve_metadata ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + vendor_code TEXT UNIQUE NOT NULL, + last_update TIMESTAMP, + total_cve_count INTEGER DEFAULT 0, + last_cve_id TEXT, + update_status TEXT, + error_message TEXT + ) + """) + + cursor.execute("CREATE INDEX IF NOT EXISTS idx_vendor_code ON cve_cache(vendor_code)") + cursor.execute("CREATE INDEX IF NOT EXISTS idx_severity ON cve_cache(severity)") + cursor.execute("CREATE INDEX IF NOT EXISTS idx_published_date ON cve_cache(published_date DESC)") + cursor.execute("CREATE INDEX IF NOT EXISTS idx_cvss_score ON cve_cache(cvss_score DESC)") + cursor.execute("CREATE INDEX IF NOT EXISTS idx_updated_at ON cve_cache(updated_at DESC)") + + logger.info(f"Database initialized at {self.db_path}") + + def fetch_cve_from_nvd(self, keywords: List[str], vendor_config: Dict = None, days_back: int = None) -> List[Dict]: + + if days_back is None: + days_back = config.INITIAL_LOOKBACK_DAYS + + if not vendor_config: + logger.error("vendor_config is required!") + return [] + + cpe_vendor = vendor_config.get('cpe_vendor') + cpe_product = vendor_config.get('cpe_product') + + if not cpe_vendor: + logger.error(f"cpe_vendor is required in vendor_config!") + return [] + + require_cvss = vendor_config.get('require_cvss', True) + min_cvss = vendor_config.get('min_cvss') + max_days_per_chunk = 120 + all_results = [] + unique_cve_ids = set() + num_chunks = (days_back // max_days_per_chunk) + (1 if days_back % max_days_per_chunk else 0) + + logger.info(f"Fetching CVEs from NVD") + logger.info(f" CPE filter: vendor='{cpe_vendor}'") + if cpe_product: + logger.info(f" product='{cpe_product}'") + if require_cvss: + logger.info(f" CVSS required: yes") + if min_cvss: + logger.info(f" Minimum CVSS: {min_cvss}") + logger.info(f" Status filter: Analyzed/Modified only") + logger.info(f" Platform filter: Ignore OS-only CPE entries") + logger.info(f" Date range: {days_back} days in {num_chunks} chunks") + + for chunk_idx in range(num_chunks): + chunk_end = datetime.utcnow() - timedelta(days=chunk_idx * max_days_per_chunk) + days_in_chunk = min(max_days_per_chunk, days_back - (chunk_idx * max_days_per_chunk)) + chunk_start = chunk_end - timedelta(days=days_in_chunk) + + start_str = chunk_start.strftime('%Y-%m-%dT%H:%M:%S.000') + end_str = chunk_end.strftime('%Y-%m-%dT%H:%M:%S.000') + + logger.info(f"Chunk {chunk_idx+1}/{num_chunks}: {start_str[:10]} to {end_str[:10]}") + + params = { + 'pubStartDate': start_str, + 'pubEndDate': end_str, + 'resultsPerPage': 2000, + 'startIndex': 0 + } + + headers = {} + if config.NVD_API_KEY: + headers['apiKey'] = config.NVD_API_KEY + + page = 0 + while True: + try: + params['startIndex'] = page * 2000 + + response = self.session.get( + config.CVE_SOURCES['nvd']['url'], + params=params, + headers=headers, + timeout=config.CVE_SOURCES['nvd']['timeout'] + ) + + if response.status_code != 200: + if page == 0: + logger.warning(f"Chunk {chunk_idx+1} returned {response.status_code}") + break + + data = response.json() + vulnerabilities = data.get('vulnerabilities', []) + total_results = data.get('totalResults', 0) + results_per_page = data.get('resultsPerPage', 2000) + + if page == 0: + logger.info(f" Total CVEs in chunk: {total_results}") + + if not vulnerabilities: + break + + matched_count = 0 + for vuln in vulnerabilities: + cve_data = vuln.get('cve', {}) + cve_id = cve_data.get('id', '') + + if cve_id in unique_cve_ids: + continue + + vuln_status = cve_data.get('vulnStatus', '') + + if vuln_status in ['Rejected', 'Awaiting Analysis', 'Undergoing Analysis']: + continue + + cpe_match_found = False + + configurations = cve_data.get('configurations', []) + + if not configurations: + continue + + for config_item in configurations: + for node in config_item.get('nodes', []): + for cpe_match in node.get('cpeMatch', []): + cpe_uri = cpe_match.get('criteria', '').lower() + + # Format: cpe:2.3:part:vendor:product:version:update:edition:... + cpe_parts = cpe_uri.split(':') + if len(cpe_parts) >= 6: + cpe_part_type = cpe_parts[2] + cpe_vendor_part = cpe_parts[3] + cpe_product_part = cpe_parts[4] + cpe_version_part = cpe_parts[5] + + vendor_match = (cpe_vendor_part == cpe_vendor.lower()) + + if vendor_match: + is_platform_only = False + + if cpe_part_type == 'o': + if cpe_version_part in ['-', '*', 'any']: + is_platform_only = True + + platform_products = [ + 'windows', 'windows_server', 'windows_10', 'windows_11', + 'macos', 'mac_os_x', 'ios', 'ipados', 'tvos', 'watchos', + 'linux', 'linux_kernel', + 'android', 'chrome_os', + 'ubuntu', 'debian', 'centos', 'rhel', 'fedora', + 'freebsd', 'netbsd', 'openbsd' + ] + + if cpe_product_part in platform_products: + if cpe_version_part in ['-', '*', 'any']: + is_platform_only = True + + if is_platform_only: + continue + + if cpe_product: + product_match = (cpe_product_part == cpe_product.lower()) + if product_match: + cpe_match_found = True + break + else: + cpe_match_found = True + break + + if cpe_match_found: + break + + if cpe_match_found: + break + + if not cpe_match_found: + continue + + cvss_score = None + cvss_vector = None + severity = 'UNKNOWN' + + metrics = cve_data.get('metrics', {}) + + for version, key in [('4.0', 'cvssMetricV40'), + ('3.1', 'cvssMetricV31'), + ('3.0', 'cvssMetricV30'), + ('2.0', 'cvssMetricV2')]: + if key in metrics and metrics[key]: + cvss_data = metrics[key][0].get('cvssData', {}) + cvss_score = cvss_data.get('baseScore') + cvss_vector = cvss_data.get('vectorString') + severity = cvss_data.get('baseSeverity', 'UNKNOWN') + + if (not severity or severity == 'UNKNOWN') and cvss_score: + if cvss_score >= 9.0: + severity = 'CRITICAL' + elif cvss_score >= 7.0: + severity = 'HIGH' + elif cvss_score >= 4.0: + severity = 'MEDIUM' + elif cvss_score > 0: + severity = 'LOW' + break + + + if require_cvss and not cvss_score: + continue + + if min_cvss and cvss_score and cvss_score < min_cvss: + continue + + matched_count += 1 + unique_cve_ids.add(cve_id) + descriptions = cve_data.get('descriptions', []) + description_text = ' '.join([desc.get('value', '') for desc in descriptions]) + refs = [ref.get('url', '') for ref in cve_data.get('references', [])] + cwe_ids = [] + for weakness in cve_data.get('weaknesses', []): + for desc in weakness.get('description', []): + value = desc.get('value', '') + if value.startswith('CWE-'): + cwe_ids.append(value) + + all_results.append({ + 'cve_id': cve_id, + 'description': description_text[:2000], + 'published_date': cve_data.get('published', ''), + 'last_modified': cve_data.get('lastModified', ''), + 'cvss_score': cvss_score, + 'cvss_vector': cvss_vector, + 'severity': severity, + 'references': json.dumps(refs), + 'cwe_ids': json.dumps(cwe_ids), + 'raw_data': json.dumps(cve_data) + }) + + if matched_count > 0: + logger.info(f" Page {page+1}: Matched {matched_count} CVEs") + + if len(vulnerabilities) < results_per_page or (page + 1) * results_per_page >= total_results: + break + + page += 1 + + if not config.NVD_API_KEY: + time.sleep(6) + else: + time.sleep(0.6) + + except Exception as e: + logger.error(f"Error in chunk {chunk_idx+1} page {page+1}: {e}", exc_info=True) + break + + if chunk_idx < num_chunks - 1: + delay = 6 if not config.NVD_API_KEY else 1 + time.sleep(delay) + + logger.info(f"✓ Total unique CVEs matched: {len(all_results)} (CPE + CVSS + Status + Platform filters)") + return all_results + + def fetch_cve_from_github(self, keywords: List[str]) -> List[Dict]: + results = [] + unique_cve_ids = set() + + headers = {} + if config.GITHUB_TOKEN: + headers['Authorization'] = f'token {config.GITHUB_TOKEN}' + + logger.info(f"Fetching advisories from GitHub for keywords: {keywords[:5]}") + + page = 1 + max_pages = 10 + + while page <= max_pages: + params = { + 'per_page': 100, + 'page': page, + 'sort': 'published', + 'direction': 'desc' + } + + try: + response = self.session.get( + config.CVE_SOURCES['github']['url'], + params=params, + headers=headers, + timeout=config.CVE_SOURCES['github']['timeout'] + ) + response.raise_for_status() + + advisories = response.json() + + if not advisories: + logger.info(f"GitHub page {page}: No more results") + break + + logger.info(f"GitHub page {page}: Got {len(advisories)} advisories") + + matched_count = 0 + for advisory in advisories: + summary = advisory.get('summary', '') + description = advisory.get('description', '') + full_text = (summary + ' ' + description).lower() + + if not any(keyword.lower() in full_text for keyword in keywords): + continue + + cve_id = None + for identifier in advisory.get('identifiers', []): + if identifier.get('type') == 'CVE': + cve_id = identifier.get('value') + break + + if not cve_id: + cve_id = advisory.get('ghsa_id', f"GHSA-{advisory.get('id', 'unknown')}") + + if cve_id in unique_cve_ids: + continue + + unique_cve_ids.add(cve_id) + matched_count += 1 + + cvss_data = advisory.get('cvss', {}) + cvss_score = cvss_data.get('score') + cvss_vector = cvss_data.get('vector_string') + severity = advisory.get('severity', 'UNKNOWN').upper() + cwe_list = [cwe.get('cwe_id', '') for cwe in advisory.get('cwes', [])] + + results.append({ + 'cve_id': cve_id, + 'description': (summary + ' ' + description)[:1000], + 'published_date': advisory.get('published_at', ''), + 'last_modified': advisory.get('updated_at', ''), + 'cvss_score': cvss_score, + 'cvss_vector': cvss_vector, + 'severity': severity, + 'references': json.dumps([advisory.get('html_url', '')]), + 'cwe_ids': json.dumps(cwe_list), + 'raw_data': json.dumps(advisory) + }) + + logger.info(f"GitHub page {page}: Matched {matched_count} advisories") + + page += 1 + time.sleep(1) + + except Exception as e: + logger.error(f"Error fetching GitHub page {page}: {e}", exc_info=True) + break + + logger.info(f"✓ Total unique CVEs from GitHub: {len(results)}") + return results + + def update_vendor_cache(self, vendor_code: str, force: bool = False) -> bool: + vendor = next((v for v in config.VENDORS if v['code'] == vendor_code), None) + if not vendor: + logger.error(f"Unknown vendor code: {vendor_code}") + logger.error(f"Available vendors: {[v['code'] for v in config.VENDORS]}") + return False + + if not force: + with self.get_db_connection() as conn: + cursor = conn.cursor() + cursor.execute( + "SELECT last_update FROM cve_metadata WHERE vendor_code = ?", + (vendor_code,) + ) + row = cursor.fetchone() + if row and row['last_update']: + last_update = datetime.fromisoformat(row['last_update']) + time_since_update = datetime.utcnow() - last_update + cache_valid_hours = config.UPDATE_INTERVAL_HOURS + + if time_since_update < timedelta(hours=cache_valid_hours): + logger.info(f"Cache for {vendor_code} is fresh (updated {time_since_update.seconds//3600}h ago)") + return True + + logger.info(f"Updating cache for vendor: {vendor['name']} ({vendor_code})") + + if vendor.get('cpe_vendor'): + logger.info(f"CPE vendor: {vendor['cpe_vendor']}") + if vendor.get('cpe_product'): + logger.info(f"CPE product: {vendor['cpe_product']}") + else: + logger.error(f"No CPE vendor configured for {vendor_code}!") + return False + + try: + all_cves = [] + days_back = config.INITIAL_LOOKBACK_DAYS if force else config.UPDATE_LOOKBACK_DAYS + + nvd_cves = self.fetch_cve_from_nvd( + vendor['keywords'], + vendor_config=vendor, + days_back=days_back + ) + all_cves.extend(nvd_cves) + logger.info(f"Collected {len(nvd_cves)} CVEs from NVD") + + if config.CVE_SOURCES.get('github', {}).get('enabled', False): + github_cves = self.fetch_cve_from_github(vendor['keywords']) + all_cves.extend(github_cves) + logger.info(f"Collected {len(github_cves)} CVEs from GitHub") + + unique_cves = {cve['cve_id']: cve for cve in all_cves} + + logger.info(f"Total unique CVEs after deduplication: {len(unique_cves)}") + + with self.get_db_connection() as conn: + cursor = conn.cursor() + + for cve_id, cve in unique_cves.items(): + cursor.execute(""" + INSERT OR REPLACE INTO cve_cache + (cve_id, vendor_code, description, published_date, last_modified, + cvss_score, cvss_vector, severity, refs, cwe_ids, + affected_products, raw_data, updated_at) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP) + """, ( + cve_id, + vendor_code, + cve.get('description'), + cve.get('published_date'), + cve.get('last_modified'), + cve.get('cvss_score'), + cve.get('cvss_vector'), + cve.get('severity'), + cve.get('references'), + cve.get('cwe_ids'), + None, + cve.get('raw_data') + )) + + cursor.execute(""" + INSERT OR REPLACE INTO cve_metadata + (vendor_code, last_update, total_cve_count, last_cve_id, update_status, error_message) + VALUES (?, CURRENT_TIMESTAMP, ?, ?, 'success', NULL) + """, ( + vendor_code, + len(unique_cves), + list(unique_cves.keys())[0] if unique_cves else None + )) + + logger.info(f"✓ Successfully updated {len(unique_cves)} CVEs for {vendor['name']}") + return True + + except Exception as e: + logger.error(f"✗ Error updating vendor cache for {vendor_code}: {e}", exc_info=True) + + try: + with self.get_db_connection() as conn: + cursor = conn.cursor() + cursor.execute(""" + INSERT OR REPLACE INTO cve_metadata + (vendor_code, last_update, update_status, error_message) + VALUES (?, CURRENT_TIMESTAMP, 'failed', ?) + """, (vendor_code, str(e)[:500])) + except: + pass + + return False + + def get_vendor_cves(self, vendor_code: str, limit: int = None, offset: int = 0, + severity: str = None, year: int = None) -> List[Dict]: + with self.get_db_connection() as conn: + cursor = conn.cursor() + + query = """ + SELECT cve_id, vendor_code, description, published_date, last_modified, + cvss_score, cvss_vector, severity, refs, cwe_ids, updated_at + FROM cve_cache + WHERE vendor_code = ? + """ + params = [vendor_code] + + if severity: + query += " AND severity = ?" + params.append(severity.upper()) + + if year: + query += " AND strftime('%Y', published_date) = ?" + params.append(str(year)) + + query += " ORDER BY published_date DESC" + + if limit: + query += f" LIMIT {limit} OFFSET {offset}" + + cursor.execute(query, params) + + results = [] + for row in cursor.fetchall(): + row_dict = dict(row) + if 'refs' in row_dict: + row_dict['references'] = row_dict.pop('refs') + results.append(row_dict) + + return results + + def get_vendor_stats(self, vendor_code: str) -> Dict: + with self.get_db_connection() as conn: + cursor = conn.cursor() + + # Total count + cursor.execute( + "SELECT COUNT(*) as total FROM cve_cache WHERE vendor_code = ?", + (vendor_code,) + ) + total = cursor.fetchone()['total'] + + # Severity distribution + cursor.execute(""" + SELECT severity, COUNT(*) as count + FROM cve_cache + WHERE vendor_code = ? + GROUP BY severity + """, (vendor_code,)) + severity_counts = {row['severity']: row['count'] for row in cursor.fetchall()} + + # Monthly trend (last 12 months) + cursor.execute(""" + SELECT strftime('%Y-%m', published_date) as month, COUNT(*) as count + FROM cve_cache + WHERE vendor_code = ? AND published_date >= date('now', '-12 months') + GROUP BY month + ORDER BY month + """, (vendor_code,)) + monthly_counts = {row['month']: row['count'] for row in cursor.fetchall()} + + # This month + cursor.execute(""" + SELECT COUNT(*) as count + FROM cve_cache + WHERE vendor_code = ? AND strftime('%Y-%m', published_date) = strftime('%Y-%m', 'now') + """, (vendor_code,)) + this_month = cursor.fetchone()['count'] + + # This year + cursor.execute(""" + SELECT COUNT(*) as count + FROM cve_cache + WHERE vendor_code = ? AND strftime('%Y', published_date) = strftime('%Y', 'now') + """, (vendor_code,)) + this_year = cursor.fetchone()['count'] + + # Recent (last 7 days) + cursor.execute(""" + SELECT COUNT(*) as count + FROM cve_cache + WHERE vendor_code = ? AND published_date >= date('now', '-7 days') + """, (vendor_code,)) + recent = cursor.fetchone()['count'] + + return { + 'total': total, + 'severity': severity_counts, + 'monthly': monthly_counts, + 'this_month': this_month, + 'this_year': this_year, + 'recent': recent + } + + def search_cves(self, query: str, limit: int = 50) -> List[Dict]: + with self.get_db_connection() as conn: + cursor = conn.cursor() + search_query = f"%{query}%" + + cursor.execute(""" + SELECT cve_id, vendor_code, description, published_date, cvss_score, severity + FROM cve_cache + WHERE cve_id LIKE ? OR description LIKE ? + ORDER BY published_date DESC + LIMIT ? + """, (search_query, search_query, limit)) + + return [dict(row) for row in cursor.fetchall()] + + def get_all_vendors_summary(self) -> List[Dict]: + summary = [] + + with self.get_db_connection() as conn: + cursor = conn.cursor() + + for vendor in config.VENDORS: + cursor.execute(""" + SELECT + COUNT(*) as total, + SUM(CASE WHEN severity = 'CRITICAL' THEN 1 ELSE 0 END) as critical, + SUM(CASE WHEN severity = 'HIGH' THEN 1 ELSE 0 END) as high, + SUM(CASE WHEN published_date >= date('now', '-7 days') THEN 1 ELSE 0 END) as recent + FROM cve_cache + WHERE vendor_code = ? + """, (vendor['code'],)) + stats = cursor.fetchone() + + cursor.execute( + "SELECT last_update FROM cve_metadata WHERE vendor_code = ?", + (vendor['code'],) + ) + metadata = cursor.fetchone() + + summary.append({ + 'code': vendor['code'], + 'name': vendor['name'], + 'icon': vendor.get('icon', 'fa-shield-alt'), + 'total': stats['total'] or 0, + 'critical': stats['critical'] or 0, + 'high': stats['high'] or 0, + 'recent': stats['recent'] or 0, + 'last_update': metadata['last_update'] if metadata else None + }) + return summary + + def get_recent_cves_for_discord(self, hours: int = 1) -> List[Dict]: + with self.get_db_connection() as conn: + cursor = conn.cursor() + + cursor.execute(""" + SELECT cve_id, vendor_code, description, published_date, cvss_score, severity + FROM cve_cache + WHERE updated_at >= datetime('now', ? || ' hours') + AND severity IN ('CRITICAL', 'HIGH') + AND cvss_score >= ? + ORDER BY published_date DESC + LIMIT 10 + """, (f'-{hours}', config.DISCORD_MIN_CVSS)) + + return [dict(row) for row in cursor.fetchall()] + +def update_all_vendors(force: bool = True): + handler = CVEHandler() + updated = 0 + failed = 0 + + total_vendors = len(config.VENDORS) + logger.info(f"{'='*60}") + logger.info(f"Starting {'FULL' if force else 'incremental'} update for {total_vendors} vendors") + logger.info(f"Lookback period: {config.INITIAL_LOOKBACK_DAYS} days") + logger.info(f"{'='*60}") + + start_time = time.time() + + for idx, vendor in enumerate(config.VENDORS, 1): + vendor_start = time.time() + + logger.info(f"\n[{idx}/{total_vendors}] {'='*50}") + logger.info(f"Updating {vendor['name']} ({vendor['code']})...") + logger.info(f"Keywords: {vendor['keywords'][:5]}...") + + try: + if handler.update_vendor_cache(vendor['code'], force=force): + updated += 1 + stats = handler.get_vendor_stats(vendor['code']) + logger.info(f"✓ {vendor['name']} updated successfully") + logger.info(f" Total CVEs: {stats['total']}") + logger.info(f" Critical: {stats['severity'].get('CRITICAL', 0)}") + logger.info(f" High: {stats['severity'].get('HIGH', 0)}") + logger.info(f" Medium: {stats['severity'].get('MEDIUM', 0)}") + logger.info(f" Low: {stats['severity'].get('LOW', 0)}") + else: + failed += 1 + logger.error(f"✗ {vendor['name']} update failed") + + vendor_time = time.time() - vendor_start + logger.info(f" Time taken: {vendor_time:.1f}s") + + if idx < total_vendors: + if not config.NVD_API_KEY: + delay = 10 + logger.info(f"Waiting {delay}s before next vendor (no API key)...") + else: + delay = 2 + logger.debug(f"Waiting {delay}s before next vendor...") + time.sleep(delay) + + except KeyboardInterrupt: + logger.warning("Update interrupted by user") + break + except Exception as e: + failed += 1 + logger.error(f"✗ Exception updating {vendor['name']}: {e}", exc_info=True) + + total_time = time.time() - start_time + logger.info(f"\n{'='*60}") + logger.info(f"Update completed in {total_time/60:.1f} minutes") + logger.info(f"Results: {updated} successful, {failed} failed") + logger.info(f"{'='*60}") + + logger.info("\nFinal summary:") + summary = handler.get_all_vendors_summary() + for v in summary: + logger.info(f" {v['name']:25s} - Total: {v['total']:5d} | Critical: {v['critical']:4d} | High: {v['high']:4d}") + + return updated, failed + +if __name__ == '__main__': + logging.basicConfig(level=logging.INFO) + handler = CVEHandler() + print(f"Database: {config.DATABASE_PATH}") + print(f"Available vendors: {[v['code'] for v in config.VENDORS]}") + + \ No newline at end of file diff --git a/discord_bot.py b/discord_bot.py new file mode 100644 index 0000000..f3c41ae --- /dev/null +++ b/discord_bot.py @@ -0,0 +1,218 @@ +#!/usr/bin/env python3 + +import discord +from discord.ext import tasks +import logging +import asyncio +from datetime import datetime +import sys + +import config +from cve_handler import CVEHandler + +logging.basicConfig(level=config.LOG_LEVEL, format=config.LOG_FORMAT) +logger = logging.getLogger(__name__) + +class CVEDiscordBot(discord.Client): + def __init__(self, *args, **kwargs): + intents = discord.Intents.default() + intents.message_content = True + super().__init__(intents=intents, *args, **kwargs) + + self.cve_handler = CVEHandler() + self.channel_id = None + self.check_interval = config.DISCORD_CHECK_INTERVAL_MINUTES + + if not config.DISCORD_BOT_TOKEN: + logger.error("DISCORD_BOT_TOKEN not configured in .env") + sys.exit(1) + + if not config.DISCORD_CHANNEL_ID: + logger.error("DISCORD_CHANNEL_ID not configured in .env") + sys.exit(1) + + try: + self.channel_id = int(config.DISCORD_CHANNEL_ID) + except ValueError: + logger.error(f"Invalid DISCORD_CHANNEL_ID: {config.DISCORD_CHANNEL_ID}") + sys.exit(1) + + async def on_ready(self): + logger.info(f'Discord bot logged in as {self.user}') + logger.info(f'Bot ID: {self.user.id}') + logger.info(f'Monitoring channel ID: {self.channel_id}') + logger.info(f'Check interval: {self.check_interval} minutes') + logger.info(f'Min CVSS score: {config.DISCORD_MIN_CVSS}') + + channel = self.get_channel(self.channel_id) + if not channel: + logger.error(f"Cannot access channel {self.channel_id}") + logger.error("Make sure the bot has been invited to the server and has permissions") + await self.close() + return + + logger.info(f"Successfully connected to channel: #{channel.name}") + + if not self.check_new_cves.is_running(): + self.check_new_cves.start() + logger.info("CVE monitoring task started") + + async def on_error(self, event, *args, **kwargs): + logger.error(f"Discord error in event {event}", exc_info=True) + + @tasks.loop(minutes=config.DISCORD_CHECK_INTERVAL_MINUTES) + async def check_new_cves(self): + try: + logger.info("Checking for new CVEs...") + + channel = self.get_channel(self.channel_id) + if not channel: + logger.error(f"Channel {self.channel_id} not found") + return + + hours_back = self.check_interval / 60 + new_cves = self.cve_handler.get_recent_cves_for_discord(hours=hours_back) + + if not new_cves: + logger.info("No new CVEs found") + return + + filtered_cves = [ + cve for cve in new_cves + if cve.get('cvss_score', 0) >= config.DISCORD_MIN_CVSS + ] + + if not filtered_cves: + logger.info(f"Found {len(new_cves)} CVEs but none meet min CVSS threshold of {config.DISCORD_MIN_CVSS}") + return + + logger.info(f"Found {len(filtered_cves)} CVEs meeting criteria (out of {len(new_cves)} total)") + + for cve in filtered_cves: + try: + embed = self.create_cve_embed(cve) + await channel.send(embed=embed) + logger.info(f"Sent notification for {cve['cve_id']}") + await asyncio.sleep(1) + except Exception as e: + logger.error(f"Error sending notification for {cve['cve_id']}: {e}") + + logger.info(f"Successfully sent {len(filtered_cves)} CVE notifications") + + except Exception as e: + logger.error(f"Error in check_new_cves: {e}", exc_info=True) + + @check_new_cves.before_loop + async def before_check_new_cves(self): + await self.wait_until_ready() + logger.info("Bot is ready, starting CVE monitoring...") + + def create_cve_embed(self, cve: dict) -> discord.Embed: + + # Severity colors + severity_colors = { + 'CRITICAL': 0xDC3545, # Red + 'HIGH': 0xFD7E14, # Orange + 'MEDIUM': 0xFFC107, # Yellow + 'LOW': 0x6C757D # Gray + } + + severity = cve.get('severity', 'UNKNOWN') + color = severity_colors.get(severity, 0x6C757D) + + # Get vendor info + vendor_code = cve.get('vendor_code', '') + vendor = next((v for v in config.VENDORS if v['code'] == vendor_code), None) + vendor_name = vendor['name'] if vendor else vendor_code.title() + + # Create embed + embed = discord.Embed( + title=f"🚨 {cve['cve_id']}", + description=cve.get('description', 'No description available')[:2000], + color=color, + timestamp=datetime.utcnow() + ) + + # Add fields + embed.add_field( + name="🏢 Vendor", + value=vendor_name, + inline=True + ) + + embed.add_field( + name="⚠️ Severity", + value=severity, + inline=True + ) + + cvss_score = cve.get('cvss_score') + embed.add_field( + name="📊 CVSS Score", + value=f"**{cvss_score:.1f}**" if cvss_score else "N/A", + inline=True + ) + + published = cve.get('published_date', '') + if published: + try: + pub_date = datetime.fromisoformat(published.replace('Z', '+00:00')) + embed.add_field( + name="📅 Published", + value=pub_date.strftime('%Y-%m-%d %H:%M UTC'), + inline=True + ) + except: + embed.add_field(name="📅 Published", value=published[:10], inline=True) + + nvd_url = f"https://nvd.nist.gov/vuln/detail/{cve['cve_id']}" + embed.add_field( + name="🔗 Links", + value=f"[View on NVD]({nvd_url})", + inline=False + ) + + embed.set_footer( + text=f"CVE Monitor • {vendor_name}", + icon_url="https://nvd.nist.gov/favicon.ico" + ) + + return embed + +def start_discord_bot(): + if not config.ENABLE_DISCORD_BOT: + logger.info("Discord bot is disabled (ENABLE_DISCORD_BOT=False)") + return + + if not config.DISCORD_BOT_TOKEN: + logger.error("DISCORD_BOT_TOKEN not configured") + logger.error("Please set DISCORD_BOT_TOKEN in .env file") + logger.error("Get your token at: https://discord.com/developers/applications") + return + + if not config.DISCORD_CHANNEL_ID: + logger.error("DISCORD_CHANNEL_ID not configured") + logger.error("Please set DISCORD_CHANNEL_ID in .env file") + logger.error("Enable Developer Mode in Discord, right-click channel -> Copy ID") + return + + logger.info("Starting Discord bot...") + logger.info(f"Configuration:") + logger.info(f" - Check interval: {config.DISCORD_CHECK_INTERVAL_MINUTES} minutes") + logger.info(f" - Min CVSS score: {config.DISCORD_MIN_CVSS}") + logger.info(f" - Notify on CRITICAL: {config.DISCORD_NOTIFY_CRITICAL}") + logger.info(f" - Notify on HIGH: {config.DISCORD_NOTIFY_HIGH}") + + bot = CVEDiscordBot() + + try: + bot.run(config.DISCORD_BOT_TOKEN) + except discord.LoginFailure: + logger.error("Invalid Discord bot token") + logger.error("Please check DISCORD_BOT_TOKEN in .env file") + except Exception as e: + logger.error(f"Failed to start Discord bot: {e}", exc_info=True) + + +if __name__ == '__main__': + start_discord_bot() diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..bb7cf58 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,87 @@ +services: + cve-monitor: + build: . + container_name: cve-monitor + restart: unless-stopped + ports: + - "${PORT:-5000}:5000" + volumes: + - ./cve_db:/app/cve_db + - ./logs:/app/logs + environment: + - WORKERS=${WORKERS:-4} + - WORKER_TIMEOUT=${WORKER_TIMEOUT:-120} + - PORT=5000 + - DATABASE_PATH=/app/cve_db/cve_cache.db + - LOG_LEVEL=${LOG_LEVEL:-INFO} + - NVD_API_KEY=${NVD_API_KEY:-} + - GITHUB_TOKEN=${GITHUB_TOKEN:-} + env_file: + - .env + command: app + networks: + - cve-network + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:5000/health"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 40s + + cve-monitor-scheduler: + build: . + container_name: cve-monitor-scheduler + restart: unless-stopped + volumes: + - ./cve_db:/app/cve_db + - ./logs:/app/logs + environment: + - DATABASE_PATH=/app/cve_db/cve_cache.db + - LOG_LEVEL=${LOG_LEVEL:-INFO} + - ENABLE_AUTO_UPDATE=${ENABLE_AUTO_UPDATE:-True} + - UPDATE_INTERVAL_HOURS=${UPDATE_INTERVAL_HOURS:-24} + - INITIAL_LOOKBACK_DAYS=${INITIAL_LOOKBACK_DAYS:-365} + - NVD_API_KEY=${NVD_API_KEY:-} + - GITHUB_TOKEN=${GITHUB_TOKEN:-} + env_file: + - .env + command: scheduler + depends_on: + - cve-monitor + networks: + - cve-network + healthcheck: + test: ["CMD", "test", "-f", "/app/cve_db/cve_cache.db"] + interval: 15s + timeout: 5s + retries: 10 + start_period: 60s + + cve-monitor-discord: + build: . + container_name: cve-monitor-discord + restart: unless-stopped + volumes: + - ./cve_db:/app/cve_db + - ./logs:/app/logs + environment: + - DATABASE_PATH=/app/cve_db/cve_cache.db + - LOG_LEVEL=${LOG_LEVEL:-INFO} + - DISCORD_BOT_TOKEN=${DISCORD_BOT_TOKEN} + - DISCORD_CHANNEL_ID=${DISCORD_CHANNEL_ID} + - DISCORD_CHECK_INTERVAL=${DISCORD_CHECK_INTERVAL:-60} + - DISCORD_MIN_CVSS=${DISCORD_MIN_CVSS:-7.0} + - DISCORD_NOTIFY_CRITICAL=${DISCORD_NOTIFY_CRITICAL:-True} + - DISCORD_NOTIFY_HIGH=${DISCORD_NOTIFY_HIGH:-True} + env_file: + - .env + command: discord + depends_on: + cve-monitor-scheduler: + condition: service_healthy + networks: + - cve-network + +networks: + cve-network: + driver: bridge diff --git a/docker-entrypoint.sh b/docker-entrypoint.sh new file mode 100644 index 0000000..7440027 --- /dev/null +++ b/docker-entrypoint.sh @@ -0,0 +1,61 @@ +#!/bin/sh +set -e + +mkdir -p /app/cve_db /app/logs +chmod -R 777 /app/cve_db /app/logs + +WORKERS=${WORKERS:-4} +WORKER_TIMEOUT=${WORKER_TIMEOUT:-120} +PORT=${PORT:-5000} + +echo "======================================" +echo "CVE Monitor Docker Container" +echo "======================================" + +case "$1" in + app) + echo "Starting Flask app with Gunicorn..." + echo " Workers: $WORKERS" + echo " Timeout: $WORKER_TIMEOUT" + echo " Port: $PORT" + echo "======================================" + exec gunicorn \ + --bind 0.0.0.0:$PORT \ + --workers $WORKERS \ + --timeout $WORKER_TIMEOUT \ + --access-logfile - \ + --error-logfile - \ + --log-level info \ + app:app + ;; + + scheduler) + echo "Starting CVE Scheduler..." + echo "======================================" + exec python scheduler.py + ;; + + discord) + echo "Starting Discord Bot..." + echo "======================================" + exec python discord_bot.py + ;; + + update) + echo "Running manual CVE update..." + echo "======================================" + exec python -c "from cve_handler import update_all_vendors; update_all_vendors(force=True)" + ;; + + shell) + echo "Starting interactive shell..." + echo "======================================" + exec /bin/sh + ;; + + *) + echo "Executing custom command: $@" + echo "======================================" + exec "$@" + ;; +esac diff --git a/env.example b/env.example new file mode 100644 index 0000000..42d8f88 --- /dev/null +++ b/env.example @@ -0,0 +1,142 @@ +# ============================================================ +# CVE MONITOR - CONFIGURATION FILE +# ============================================================ +# Copy this file to .env and adjust values for your environment + +# ============================================================ +# APPLICATION SETTINGS +# ============================================================ +APP_NAME=CVE Monitor +APP_VERSION=1.0.0 +DEBUG=False +HOST=0.0.0.0 +PORT=5000 + +# ============================================================ +# DATABASE CONFIGURATION +# ============================================================ +DATABASE_PATH=./cve_db/cve_cache.db +DATABASE_WAL_MODE=True +DATABASE_CACHE_SIZE=10000 + +# ============================================================ +# LOGGING CONFIGURATION +# ============================================================ +LOG_LEVEL=INFO +LOG_FORMAT=%(asctime)s - %(name)s - %(levelname)s - %(message)s +LOG_FILE=./logs/cve_monitor.log +LOG_MAX_BYTES=10485760 +LOG_BACKUP_COUNT=5 + +# ============================================================ +# AUTO-UPDATE CONFIGURATION +# ============================================================ +ENABLE_AUTO_UPDATE=True +UPDATE_INTERVAL_HOURS=1 +INITIAL_LOOKBACK_DAYS=365 +CACHE_HOURS=24 + +# ============================================================ +# EXTERNAL API KEYS (Optional but Recommended) +# ============================================================ +# NVD API Key - Get yours at: https://nvd.nist.gov/developers/request-an-api-key +# Without API key: 5 requests per 30 seconds +# With API key: 50 requests per 30 seconds +NVD_API_KEY= + +# GitHub Personal Access Token - Get yours at: https://github.com/settings/tokens +# Increases rate limit from 60 to 5000 requests per hour +GITHUB_TOKEN= + +# ============================================================ +# API ENDPOINTS (Advanced - Don't change unless necessary) +# ============================================================ +NVD_API_URL=https://services.nvd.nist.gov/rest/json/cves/2.0 +GITHUB_API_URL=https://api.github.com/advisories +NVD_TIMEOUT=30 +GITHUB_TIMEOUT=15 + +# ============================================================ +# GUNICORN CONFIGURATION (Production) +# ============================================================ +WORKERS=4 +WORKER_TIMEOUT=120 + +# ============================================================ +# SECURITY SETTINGS +# ============================================================ +ENABLE_SECURITY_HEADERS=True +ENABLE_RATE_LIMITING=True +ENABLE_COMPRESSION=True +ENABLE_ETAG=True + +# Content Security Policy +CSP_DEFAULT_SRC='self' +CSP_SCRIPT_SRC='self' 'unsafe-inline' cdn.jsdelivr.net cdnjs.cloudflare.com +CSP_STYLE_SRC='self' 'unsafe-inline' cdn.jsdelivr.net cdnjs.cloudflare.com +CSP_FONT_SRC='self' cdnjs.cloudflare.com +CSP_IMG_SRC='self' data: +CSP_CONNECT_SRC='self' cdn.jsdelivr.net + +# X-Frame-Options: DENY, SAMEORIGIN, or ALLOW-FROM uri +X_FRAME_OPTIONS=DENY + +# HSTS max age in seconds (1 year = 31536000) +HSTS_MAX_AGE=31536000 + +# ============================================================ +# FEATURE FLAGS +# ============================================================ +ENABLE_CHARTS=True +ENABLE_SEARCH=True +ENABLE_EXPORT=True +ENABLE_DARK_MODE=True + +# ============================================================ +# UI CONFIGURATION +# ============================================================ +ITEMS_PER_PAGE=50 +MAX_ITEMS_PER_PAGE=200 + +# ============================================================ +# EXPORT SETTINGS +# ============================================================ +EXPORT_FORMATS=json,csv +EXPORT_MAX_ITEMS=1000 + +# ============================================================ +# CDN URLS (for offline use, download and host locally) +# ============================================================ +BOOTSTRAP_CSS_CDN=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css +BOOTSTRAP_JS_CDN=https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js +FONTAWESOME_CDN=https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.5.1/css/all.min.css +CHARTJS_CDN=https://cdn.jsdelivr.net/npm/chart.js@4.4.1/dist/chart.umd.min.js + +# ============================================================ +# DISCORD BOT CONFIGURATION +# ============================================================ +ENABLE_DISCORD_BOT=False +DISCORD_BOT_TOKEN= +DISCORD_CHANNEL_ID= +DISCORD_CHECK_INTERVAL_MINUTES=60 +DISCORD_NOTIFY_CRITICAL=True +DISCORD_NOTIFY_HIGH=True +DISCORD_MIN_CVSS=7.0 +DISCORD_MIN_SEVERITY=HIGH + +# ============================================================ +# MONITORED VENDORS +# ============================================================ +# Vendors are configured in config.py VENDORS list, not via environment variables. +# Edit config.py to add/remove/modify vendors. + +# ============================================================ +# NOTES +# ============================================================ +# 1. Boolean values: True/False (case-sensitive) +# 2. Empty values will use defaults from config.py +# 3. Paths can be absolute or relative to project root +# 4. For production, always set DEBUG=False +# 5. Get NVD API key to avoid rate limits +# 6. Use strong CSP in production +# 7. Enable HTTPS in production (handled by reverse proxy) diff --git a/full_scan.py b/full_scan.py new file mode 100644 index 0000000..28272b2 --- /dev/null +++ b/full_scan.py @@ -0,0 +1,75 @@ +#!/usr/bin/env python3 + +import logging +import sys +from datetime import datetime +from cve_handler import update_all_vendors, CVEHandler + +logging.basicConfig( + level=logging.INFO, + format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', + handlers=[ + logging.FileHandler(f'logs/full_scan_{datetime.now().strftime("%Y%m%d_%H%M%S")}.log'), + logging.StreamHandler(sys.stdout) + ] +) + +logger = logging.getLogger(__name__) + + +def main(): + + print("=" * 70) + print("CVE MONITOR - FULL SCAN") + print("=" * 70) + print(f"Started at: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}") + print() + + try: + updated, failed = update_all_vendors(force=True) + + print() + print("=" * 70) + print("SCAN COMPLETED") + print("=" * 70) + print(f"✓ Successfully updated: {updated} vendors") + print(f"✗ Failed: {failed} vendors") + print(f"Finished at: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}") + print() + print("=" * 70) + print("DATABASE STATISTICS") + print("=" * 70) + + handler = CVEHandler() + summary = handler.get_all_vendors_summary() + + total_cves = sum(v['total'] for v in summary) + total_critical = sum(v['critical'] for v in summary) + total_high = sum(v['high'] for v in summary) + + print(f"Total CVEs in database: {total_cves}") + print(f" Critical: {total_critical}") + print(f" High: {total_high}") + print() + print("Per vendor breakdown:") + print("-" * 70) + print(f"{'Vendor':<25} {'Total':>8} {'Critical':>10} {'High':>10} {'Recent':>10}") + print("-" * 70) + + for v in sorted(summary, key=lambda x: x['total'], reverse=True): + print(f"{v['name']:<25} {v['total']:>8} {v['critical']:>10} {v['high']:>10} {v['recent']:>10}") + + print("=" * 70) + + return 0 if failed == 0 else 1 + + except KeyboardInterrupt: + print("\n\n Scan interrupted by user") + return 2 + except Exception as e: + logger.error(f"Fatal error during scan: {e}", exc_info=True) + return 3 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..16515b5 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,8 @@ +Flask +Werkzeug +requests +urllib3 +python-dateutil +gunicorn +discord.py +python-dotenv \ No newline at end of file diff --git a/scheduler.py b/scheduler.py new file mode 100644 index 0000000..2d85b0c --- /dev/null +++ b/scheduler.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python3 + +import time +import logging +from datetime import datetime +from cve_handler import update_all_vendors +import config + +logging.basicConfig( + level=config.LOG_LEVEL, + format=config.LOG_FORMAT +) +logger = logging.getLogger(__name__) + +def run_scheduler(): + logger.info("CVE Update Scheduler started") + logger.info(f"Update interval: {config.UPDATE_INTERVAL_HOURS} hours") + logger.info(f"Auto-update enabled: {config.ENABLE_AUTO_UPDATE}") + + time.sleep(120) + + while True: + if config.ENABLE_AUTO_UPDATE: + try: + logger.info(f"Starting scheduled update at {datetime.now()}") + updated, failed = update_all_vendors() + logger.info(f"Update completed: {updated} successful, {failed} failed") + except Exception as e: + logger.error(f"Error in scheduled update: {e}", exc_info=True) + else: + logger.debug("Auto-update is disabled, skipping") + + sleep_seconds = config.UPDATE_INTERVAL_HOURS * 3600 + next_update = datetime.now().timestamp() + sleep_seconds + logger.info(f"Next update scheduled at {datetime.fromtimestamp(next_update)}") + time.sleep(sleep_seconds) + + +if __name__ == '__main__': + run_scheduler() diff --git a/static/css/style.css b/static/css/style.css new file mode 100644 index 0000000..1ce1ee3 --- /dev/null +++ b/static/css/style.css @@ -0,0 +1,264 @@ +/* CVE Monitor - Custom Styles */ + +:root { + --sidebar-width: 250px; + --navbar-height: 56px; +} + +body { + font-size: 0.95rem; + min-height: 100vh; +} + +/* Sidebar */ +.sidebar { + position: fixed; + top: var(--navbar-height); + bottom: 0; + left: 0; + z-index: 100; + padding: 0; + box-shadow: inset -1px 0 0 rgba(0, 0, 0, 0.1); + overflow-y: auto; +} + +.sidebar .nav-link { + font-weight: 500; + color: #333; + padding: 0.75rem 1rem; + border-left: 3px solid transparent; + transition: all 0.2s; +} + +.sidebar .nav-link:hover { + color: #0d6efd; + background-color: rgba(0, 0, 0, 0.05); +} + +.sidebar .nav-link.active { + color: #0d6efd; + border-left-color: #0d6efd; + background-color: rgba(13, 110, 253, 0.1); +} + +.sidebar .nav-link i { + margin-right: 0.5rem; + width: 20px; + text-align: center; +} + +.sidebar-heading { + font-size: 0.75rem; + font-weight: 600; + letter-spacing: 0.05em; +} + +/* Dark mode */ +[data-bs-theme="dark"] .sidebar { + box-shadow: inset -1px 0 0 rgba(255, 255, 255, 0.1); +} + +[data-bs-theme="dark"] .sidebar .nav-link { + color: #dee2e6; +} + +[data-bs-theme="dark"] .sidebar .nav-link:hover { + background-color: rgba(255, 255, 255, 0.1); +} + +[data-bs-theme="dark"] .sidebar .nav-link.active { + background-color: rgba(13, 110, 253, 0.2); +} + +/* Main content */ +main { + margin-left: var(--sidebar-width); + padding-top: 1rem; +} + +@media (max-width: 767.98px) { + .sidebar { + position: static; + height: auto; + } + + main { + margin-left: 0; + } +} + +/* Cards */ +.card { + border-radius: 0.5rem; + box-shadow: 0 0.125rem 0.25rem rgba(0, 0, 0, 0.075); + margin-bottom: 1rem; +} + +.card-body { + padding: 1.25rem; +} + +/* Stats cards */ +.card .fs-1 { + opacity: 0.2; +} + +.card:hover .fs-1 { + opacity: 0.3; + transition: opacity 0.3s; +} + +/* Table */ +.table { + font-size: 0.9rem; +} + +.table thead th { + border-bottom: 2px solid #dee2e6; + font-weight: 600; + text-transform: uppercase; + font-size: 0.75rem; + letter-spacing: 0.05em; + color: #6c757d; +} + +.table tbody tr { + cursor: pointer; + transition: background-color 0.2s; +} + +.table tbody tr:hover { + background-color: rgba(0, 0, 0, 0.02); +} + +[data-bs-theme="dark"] .table tbody tr:hover { + background-color: rgba(255, 255, 255, 0.05); +} + +/* Severity badges */ +.badge-critical { + background-color: #dc3545; + color: white; +} + +.badge-high { + background-color: #fd7e14; + color: white; +} + +.badge-medium { + background-color: #0dcaf0; + color: black; +} + +.badge-low { + background-color: #6c757d; + color: white; +} + +/* CVE ID column */ +.cve-id { + font-family: 'Courier New', monospace; + font-weight: 600; + color: #0d6efd; +} + +/* Loading spinner overlay */ +.loading-overlay { + position: absolute; + top: 0; + left: 0; + right: 0; + bottom: 0; + background: rgba(255, 255, 255, 0.8); + display: flex; + align-items: center; + justify-content: center; + z-index: 1000; +} + +[data-bs-theme="dark"] .loading-overlay { + background: rgba(0, 0, 0, 0.8); +} + +/* Pagination */ +.pagination { + margin-top: 1rem; +} + +/* Search results */ +#searchResults .list-group-item { + cursor: pointer; + transition: background-color 0.2s; +} + +#searchResults .list-group-item:hover { + background-color: rgba(0, 0, 0, 0.05); +} + +/* Charts */ +canvas { + max-height: 300px; +} + +/* Responsive */ +@media (max-width: 575.98px) { + .btn-toolbar { + flex-direction: column; + align-items: flex-start !important; + } + + .btn-group { + margin-bottom: 0.5rem; + } +} + +/* Scrollbar customization */ +::-webkit-scrollbar { + width: 8px; + height: 8px; +} + +::-webkit-scrollbar-track { + background: #f1f1f1; +} + +::-webkit-scrollbar-thumb { + background: #888; + border-radius: 4px; +} + +::-webkit-scrollbar-thumb:hover { + background: #555; +} + +[data-bs-theme="dark"] ::-webkit-scrollbar-track { + background: #2d2d2d; +} + +[data-bs-theme="dark"] ::-webkit-scrollbar-thumb { + background: #666; +} + +/* Animations */ +@keyframes fadeIn { + from { + opacity: 0; + transform: translateY(10px); + } + to { + opacity: 1; + transform: translateY(0); + } +} + +.fade-in { + animation: fadeIn 0.3s ease-in; +} + +/* Vendor badge counter */ +.vendor-badge { + font-size: 0.7rem; + padding: 0.25rem 0.5rem; + border-radius: 10px; +} diff --git a/static/js/app.js b/static/js/app.js new file mode 100644 index 0000000..9e34921 --- /dev/null +++ b/static/js/app.js @@ -0,0 +1,425 @@ +const state = { + currentVendor: null, + currentPage: 1, + itemsPerPage: 50, + filters: { severity: '', year: '' }, + charts: { trend: null, severity: null } +}; + +function updateThemeIcon(theme) { + const icon = document.querySelector('#darkModeToggle i'); + if (icon) icon.className = theme === 'dark' ? 'fas fa-sun' : 'fas fa-moon'; +} + +function toggleDarkMode() { + const currentTheme = document.documentElement.getAttribute('data-bs-theme'); + const newTheme = currentTheme === 'dark' ? 'light' : 'dark'; + document.documentElement.setAttribute('data-bs-theme', newTheme); + localStorage.setItem('theme', newTheme); + updateThemeIcon(newTheme); + if (state.currentVendor) { + fetch(`/api/stats/${state.currentVendor}`) + .then(r => r.json()) + .then(data => updateCharts(data.stats)) + .catch(console.error); + } +} + +function getSeverityIcon(severity) { + const icons = { + 'CRITICAL': '', + 'HIGH': '', + 'MEDIUM': '', + 'LOW': '' + }; + return icons[severity] || ''; +} + +function formatDate(dateString) { + if (!dateString) return 'N/A'; + return new Date(dateString).toLocaleDateString('en-US', { year: 'numeric', month: 'short', day: 'numeric' }); +} + +function formatMonth(monthString) { + const [year, month] = monthString.split('-'); + return new Date(year, month - 1).toLocaleDateString('en-US', { year: 'numeric', month: 'short' }); +} + +function escapeHtml(text) { + const div = document.createElement('div'); + div.textContent = text; + return div.innerHTML; +} + +function updateLastUpdate() { + const elem = document.getElementById('lastUpdate'); + if (elem) elem.innerHTML = `Last update: ${new Date().toLocaleTimeString()}`; +} + +function showLoading() { + const tbody = document.getElementById('cveTableBody'); + if (tbody) tbody.innerHTML = `

Loading...

`; +} + +function showError(message) { + console.error(message); + alert(message); +} + +document.addEventListener('DOMContentLoaded', () => { + initializeApp(); + setupEventListeners(); + loadVendors(); +}); + +function initializeApp() { + const savedTheme = localStorage.getItem('theme') || 'light'; + document.documentElement.setAttribute('data-bs-theme', savedTheme); + updateThemeIcon(savedTheme); +} + +function setupEventListeners() { + const handlers = { + 'darkModeToggle': toggleDarkMode, + 'refreshBtn': () => state.currentVendor && loadVendorData(state.currentVendor, true), + 'clearFilters': clearFiltersHandler, + 'exportJSON': () => exportData('json'), + 'exportCSV': () => exportData('csv'), + 'searchBtn': performSearch + }; + + Object.entries(handlers).forEach(([id, handler]) => { + const elem = document.getElementById(id); + if (elem) elem.addEventListener('click', handler); + }); + + const filterForm = document.getElementById('filterForm'); + if (filterForm) filterForm.addEventListener('submit', (e) => { e.preventDefault(); applyFilters(); }); + + const searchInput = document.getElementById('searchInput'); + if (searchInput) searchInput.addEventListener('keypress', (e) => { if (e.key === 'Enter') performSearch(); }); + + const prevPage = document.getElementById('prevPage'); + const nextPage = document.getElementById('nextPage'); + if (prevPage) prevPage.querySelector('a')?.addEventListener('click', (e) => { e.preventDefault(); changePage(-1); }); + if (nextPage) nextPage.querySelector('a')?.addEventListener('click', (e) => { e.preventDefault(); changePage(1); }); +} + +async function loadVendors() { + try { + const response = await fetch('/api/vendors'); + const data = await response.json(); + if (data.vendors) { + renderVendorList(data.vendors); + renderVendorDropdown(data.vendors); + if (data.vendors.length > 0) loadVendorData(data.vendors[0].code); + } + } catch (error) { + console.error('Error loading vendors:', error); + showError('Failed to load vendors list'); + } +} + +function renderVendorList(vendors) { + const vendorList = document.getElementById('vendorList'); + if (!vendorList) return; + + vendorList.innerHTML = vendors.map(vendor => ` + + `).join(''); + + vendorList.querySelectorAll('a[data-vendor]').forEach(link => { + link.addEventListener('click', (e) => { + e.preventDefault(); + loadVendorData(link.getAttribute('data-vendor')); + vendorList.querySelectorAll('.nav-link').forEach(l => l.classList.remove('active')); + link.classList.add('active'); + }); + }); +} + +function renderVendorDropdown(vendors) { + const dropdown = document.getElementById('vendorDropdown'); + if (!dropdown) return; + + dropdown.innerHTML = vendors.map(vendor => ` +
  • ${vendor.name}
  • + `).join(''); + + dropdown.querySelectorAll('a[data-vendor]').forEach(link => { + link.addEventListener('click', (e) => { + e.preventDefault(); + loadVendorData(link.getAttribute('data-vendor')); + }); + }); +} + +async function loadVendorData(vendorCode, forceRefresh = false) { + state.currentVendor = vendorCode; + state.currentPage = 1; + showLoading(); + + try { + const [cvesResponse, statsResponse] = await Promise.all([ + fetch(`/api/cve/${vendorCode}?limit=${state.itemsPerPage}&offset=0`), + fetch(`/api/stats/${vendorCode}`) + ]); + + const cvesData = await cvesResponse.json(); + const statsData = await statsResponse.json(); + + updateTitle(vendorCode); + updateStats(statsData.stats); + renderCVETable(cvesData.cves); + updateCharts(statsData.stats); + updateLastUpdate(); + } catch (error) { + console.error('Error loading vendor data:', error); + showError('Failed to load CVE data'); + } +} + +function updateTitle(vendorCode) { + const vendors = { + 'microsoft': 'Microsoft', 'apple': 'Apple', 'fortinet': 'Fortinet', + 'cisco': 'Cisco', 'adobe': 'Adobe', 'oracle': 'Oracle', + 'google': 'Google', 'linux': 'Linux Kernel', 'vmware': 'VMware', + 'paloalto': 'Palo Alto Networks', 'docker': 'Docker', 'kubernetes': 'Kubernetes' + }; + + const mainTitle = document.getElementById('mainTitle'); + if (mainTitle) mainTitle.innerHTML = `${vendors[vendorCode] || vendorCode} CVEs`; +} + +function updateStats(stats) { + const elements = { + statTotal: stats.total || 0, + statCritical: stats.severity?.CRITICAL || 0, + statHigh: stats.severity?.HIGH || 0, + statMonth: stats.this_month || 0 + }; + Object.entries(elements).forEach(([id, value]) => { + const elem = document.getElementById(id); + if (elem) elem.textContent = value; + }); +} + +function renderCVETable(cves) { + const tbody = document.getElementById('cveTableBody'); + if (!tbody) return; + + if (!cves || cves.length === 0) { + tbody.innerHTML = `

    No CVEs found

    `; + return; + } + + tbody.innerHTML = cves.map(cve => ` + + ${cve.cve_id} + ${getSeverityIcon(cve.severity)} ${cve.severity || 'UNKNOWN'} + ${cve.cvss_score ? cve.cvss_score.toFixed(1) : 'N/A'} +
    ${escapeHtml(cve.description || 'No description available')}
    + ${formatDate(cve.published_date)} + + `).join(''); + + if (cves.length >= state.itemsPerPage) { + const paginationNav = document.getElementById('paginationNav'); + if (paginationNav) paginationNav.classList.remove('d-none'); + } +} + +async function showCVEDetails(cveId) { + const modalElement = document.getElementById('cveModal'); + if (!modalElement) return; + + const modal = new bootstrap.Modal(modalElement); + const modalTitle = document.getElementById('cveModalTitle'); + const modalBody = document.getElementById('cveModalBody'); + + if (modalTitle) modalTitle.textContent = cveId; + if (modalBody) modalBody.innerHTML = '
    '; + + modal.show(); + + try { + const response = await fetch(`/api/cve/${state.currentVendor}`); + const data = await response.json(); + const cve = data.cves.find(c => c.cve_id === cveId); + + if (cve && modalBody) { + const references = cve.references ? JSON.parse(cve.references) : []; + const cweIds = cve.cwe_ids ? JSON.parse(cve.cwe_ids) : []; + + modalBody.innerHTML = ` +
    +
    +
    Severity
    + ${getSeverityIcon(cve.severity)} ${cve.severity || 'UNKNOWN'} +
    +
    +
    CVSS Score
    +

    ${cve.cvss_score ? cve.cvss_score.toFixed(1) : 'N/A'}

    + ${cve.cvss_vector ? `${cve.cvss_vector}` : ''} +
    +
    +
    Description

    ${escapeHtml(cve.description || 'N/A')}

    +
    +
    Published

    ${formatDate(cve.published_date)}

    +
    Modified

    ${formatDate(cve.last_modified)}

    +
    + ${cweIds.length > 0 ? `
    CWE IDs

    ${cweIds.map(cwe => `${cwe}`).join('')}

    ` : ''} + ${references.length > 0 ? `
    References
      ${references.slice(0, 5).map(ref => `
    • ${ref}
    • `).join('')}
    ` : ''} + `; + } + } catch (error) { + console.error('Error loading CVE details:', error); + if (modalBody) modalBody.innerHTML = '

    Error loading CVE details

    '; + } +} + +function updateCharts(stats) { + updateTrendChart(stats.monthly || {}); + updateSeverityChart(stats.severity || {}); +} + +function updateTrendChart(monthlyData) { + const ctx = document.getElementById('trendChart'); + if (!ctx) return; + if (state.charts.trend) state.charts.trend.destroy(); + + const months = Object.keys(monthlyData).sort(); + state.charts.trend = new Chart(ctx, { + type: 'bar', + data: { + labels: months.map(m => formatMonth(m)), + datasets: [{ + label: 'CVE Count', + data: months.map(m => monthlyData[m]), + backgroundColor: 'rgba(13, 110, 253, 0.5)', + borderColor: 'rgba(13, 110, 253, 1)', + borderWidth: 1 + }] + }, + options: { + responsive: true, + maintainAspectRatio: false, + plugins: { legend: { display: false } }, + scales: { y: { beginAtZero: true } } + } + }); +} + +function updateSeverityChart(severityData) { + const ctx = document.getElementById('severityChart'); + if (!ctx) return; + if (state.charts.severity) state.charts.severity.destroy(); + + state.charts.severity = new Chart(ctx, { + type: 'pie', + data: { + labels: ['Critical', 'High', 'Medium', 'Low'], + datasets: [{ + data: [severityData.CRITICAL || 0, severityData.HIGH || 0, severityData.MEDIUM || 0, severityData.LOW || 0], + backgroundColor: ['rgba(220, 53, 69, 0.8)', 'rgba(253, 126, 20, 0.8)', 'rgba(13, 202, 240, 0.8)', 'rgba(108, 117, 125, 0.8)'] + }] + }, + options: { + responsive: true, + maintainAspectRatio: false, + plugins: { legend: { position: 'bottom' } } + } + }); +} + +async function applyFilters() { + state.filters.severity = document.getElementById('filterSeverity')?.value || ''; + state.filters.year = document.getElementById('filterYear')?.value || ''; + state.currentPage = 1; + showLoading(); + + try { + let url = `/api/cve/${state.currentVendor}/filter?limit=${state.itemsPerPage}&offset=0`; + if (state.filters.severity) url += `&severity=${state.filters.severity}`; + if (state.filters.year) url += `&year=${state.filters.year}`; + + const response = await fetch(url); + const data = await response.json(); + renderCVETable(data.cves); + } catch (error) { + console.error('Error applying filters:', error); + showError('Failed to apply filters'); + } +} + +function clearFiltersHandler() { + const severityFilter = document.getElementById('filterSeverity'); + const yearFilter = document.getElementById('filterYear'); + if (severityFilter) severityFilter.value = ''; + if (yearFilter) yearFilter.value = ''; + state.filters = { severity: '', year: '' }; + if (state.currentVendor) loadVendorData(state.currentVendor); +} + +function changePage(delta) { + state.currentPage += delta; + if (state.currentPage < 1) state.currentPage = 1; + + const offset = (state.currentPage - 1) * state.itemsPerPage; + fetch(`/api/cve/${state.currentVendor}?limit=${state.itemsPerPage}&offset=${offset}`) + .then(r => r.json()) + .then(data => { + renderCVETable(data.cves); + const currentPage = document.getElementById('currentPage'); + if (currentPage) currentPage.textContent = state.currentPage; + }) + .catch(console.error); +} + +async function performSearch() { + const searchInput = document.getElementById('searchInput'); + if (!searchInput) return; + + const query = searchInput.value.trim(); + const resultsDiv = document.getElementById('searchResults'); + if (!resultsDiv) return; + + if (query.length < 3) { + resultsDiv.innerHTML = '

    Please enter at least 3 characters

    '; + return; + } + + resultsDiv.innerHTML = '
    '; + + try { + const response = await fetch(`/api/search?q=${encodeURIComponent(query)}`); + const data = await response.json(); + + if (data.results && data.results.length > 0) { + resultsDiv.innerHTML = ``; + } else { + resultsDiv.innerHTML = '

    No results found

    '; + } + } catch (error) { + console.error('Error searching:', error); + resultsDiv.innerHTML = '

    Search failed

    '; + } +} + +function exportData(format) { + if (!state.currentVendor) return; + window.open(`/api/export/${state.currentVendor}/${format}`, '_blank'); +} diff --git a/templates/404.html b/templates/404.html new file mode 100644 index 0000000..f1c7708 --- /dev/null +++ b/templates/404.html @@ -0,0 +1,27 @@ + + + + + + 404 - Page Not Found | CVE Monitor + + + + +
    +
    +
    + +

    404

    +

    Page Not Found

    +

    + The page you are looking for doesn't exist or has been moved. +

    + + Go Home + +
    +
    +
    + + diff --git a/templates/500.html b/templates/500.html new file mode 100644 index 0000000..9a5500d --- /dev/null +++ b/templates/500.html @@ -0,0 +1,27 @@ + + + + + + 500 - Server Error | CVE Monitor + + + + +
    +
    +
    + +

    500

    +

    Internal Server Error

    +

    + Something went wrong on our end. Please try again later. +

    + + Go Home + +
    +
    +
    + + diff --git a/templates/base.html b/templates/base.html new file mode 100644 index 0000000..201f8f9 --- /dev/null +++ b/templates/base.html @@ -0,0 +1,89 @@ + + + + + + {% block title %}{{ config.APP_NAME }}{% endblock %} + + + + + {% if config.ENABLE_CHARTS %} + + {% endif %} + + + {% block extra_head %}{% endblock %} + + + + + +
    +
    + + +
    + {% block content %}{% endblock %} +
    +
    +
    + + {% block modals %}{% endblock %} + + + + {% block extra_scripts %}{% endblock %} + + diff --git a/templates/index.html b/templates/index.html new file mode 100644 index 0000000..ab24d06 --- /dev/null +++ b/templates/index.html @@ -0,0 +1,240 @@ +{% extends "base.html" %} + +{% block title %}{{ config.APP_NAME }} - Dashboard{% endblock %} + +{% block content %} + +
    +

    + + CVE Dashboard +

    + +
    +
    + + {% if config.ENABLE_EXPORT %} + + + {% endif %} +
    + + {% if config.ENABLE_SEARCH %} + + {% endif %} +
    +
    + + +
    +
    +
    +
    +
    +
    +
    Total CVEs
    +

    -

    +
    +
    +
    +
    +
    +
    + +
    +
    +
    +
    +
    +
    Critical
    +

    -

    +
    +
    +
    +
    +
    +
    + +
    +
    +
    +
    +
    +
    High Risk
    +

    -

    +
    +
    +
    +
    +
    +
    + +
    +
    +
    +
    +
    +
    This Month
    +

    -

    +
    +
    +
    +
    +
    +
    +
    + + +
    +
    +
    +
    + + +
    +
    + + +
    +
    + + +
    +
    +
    +
    + + +
    +
    +
    Vulnerabilities
    +
    +
    +
    + + + + + + + + + + + + + + + +
    CVE IDSeverityCVSSDescriptionPublished
    +
    +

    Loading CVE data...

    +
    +
    + +
    +
    + +{% if config.ENABLE_CHARTS %} + +
    +
    +
    +
    +
    CVE Trend (Last 12 Months)
    +
    +
    + +
    +
    +
    +
    +
    +
    +
    Severity Distribution
    +
    +
    + +
    +
    +
    +
    +{% endif %} +{% endblock %} + +{% block modals %} + + + +{% if config.ENABLE_SEARCH %} + + +{% endif %} +{% endblock %}