first commit

This commit is contained in:
Mateusz Gruszczyński
2026-02-17 09:04:09 +01:00
commit c0afc1554d
32 changed files with 7217 additions and 0 deletions

84
.dockerignore Normal file
View File

@@ -0,0 +1,84 @@
# Git
.git
.gitignore
.gitattributes
# Python cache
__pycache__/
*.py[cod]
*$py.class
*.so
# Virtual environments
venv/
env/
ENV/
.venv/
# IDE
.vscode/
.idea/
*.swp
*.swo
*~
# Testing
.pytest_cache/
.coverage
htmlcov/
.tox/
.nox/
tests/
test_*.py
*_test.py
# Documentation
README.md
docs/
*.md
LICENSE
# CI/CD
.github/
.gitlab-ci.yml
.travis.yml
Jenkinsfile
# Docker
Dockerfile*
docker-compose*.yml
.dockerignore
# Environment files (will be injected at runtime)
.env
.env.*
# Development files
*.log
logs/
# OS files
.DS_Store
Thumbs.db
desktop.ini
# Temporary files
*.tmp
*.bak
*~
# GeoIP database (downloaded at runtime or mounted)
geoip_db/*.mmdb
geoip_db/config.json
# Build artifacts
build/
dist/
*.egg-info/
# Node modules (if any frontend tools)
node_modules/
npm-debug.log
yarn-error.log
geoip_db

51
.env.example Normal file
View File

@@ -0,0 +1,51 @@
# Flask Configuration
FLASK_HOST=0.0.0.0
FLASK_PORT=5000
FLASK_DEBUG=False
SECRET_KEY=change-me-in-production-use-random-string
# Application Settings
APP_NAME=GeoIP Ban Generator
LOGO_URL=
LOGO_LINK=/
# Footer
FOOTER_TEXT=© 2026 GeoIP Ban Generator
FOOTER_LINK=
FOOTER_LINK_TEXT=Documentation
# MaxMind Database
MAXMIND_PRIMARY_URL=https://github.com/P3TERX/GeoLite.mmdb/releases/download/2026.02.07/GeoLite2-Country.mmdb
MAXMIND_FALLBACK_URL=https://git.io/GeoLite2-Country.mmdb
MAXMIND_UPDATE_INTERVAL_DAYS=7
MAXMIND_AUTO_UPDATE=True
# Cache Settings
CACHE_ENABLED=True
CACHE_TTL_SECONDS=3600
# MaxMind Database
MAXMIND_UPDATE_INTERVAL_DAYS=7
MAXMIND_AUTO_UPDATE=True
# Background Scheduler Settings
SCHEDULER_ENABLED=true
SCAN_INTERVAL=7d
SCAN_TIME=02:00
SCAN_ON_STARTUP=true
CACHE_MAX_AGE_HOURS=168
# Parallel scanning
PARALLEL_WORKERS=8 # 0=auto
# Redis
REDIS_HOST=redis
REDIS_PORT=6379
REDIS_DB=0
REDIS_PASSWORD=
REDIS_ENABLED=true
REDIS_CACHE_TTL=86400
# Precache Daemon Settings
PRECACHE_INTERVAL_HOURS=168
PRECACHE_CHECK_INTERVAL=3600

121
.gitignore vendored Normal file
View File

@@ -0,0 +1,121 @@
# Python
__pycache__/
*.py[cod]
*$py.class
*.so
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# Virtual Environment
venv/
env/
ENV/
env.bak/
venv.bak/
.venv/
# Flask
instance/
.webassets-cache
*.log
# Environment variables
.env
.env.local
.env.*.local
# GeoIP Database
geoip_db/*.mmdb
geoip_db/config.json
# IDE
.vscode/
.idea/
*.swp
*.swo
*~
.DS_Store
# Testing
.pytest_cache/
.coverage
htmlcov/
.tox/
.nox/
# Jupyter Notebook
.ipynb_checkpoints
# pyenv
.python-version
# Celery
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# Temporary files
*.tmp
*.bak
*.swp
*~
# Logs
logs/
*.log
# OS files
.DS_Store
Thumbs.db
desktop.ini
# Cache
.cache/
*.cache
# Local config overrides
config.local.py
local_settings.py
geoip_db/*

38
Dockerfile Normal file
View File

@@ -0,0 +1,38 @@
FROM python:3.14-alpine
WORKDIR /app
RUN apk add --no-cache \
gcc \
musl-dev \
linux-headers \
curl
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
COPY . .
RUN mkdir -p /app/geoip_db /app/static /app/templates
ENV FLASK_HOST=0.0.0.0
ENV FLASK_PORT=5000
ENV FLASK_DEBUG=False
ENV PYTHONUNBUFFERED=1
EXPOSE ${FLASK_PORT:-5000}
HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \
CMD curl -f http://localhost:${FLASK_PORT:-5000}/ || exit 1
CMD gunicorn --bind 0.0.0.0:${FLASK_PORT:-5000} \
--workers 1 \
--threads 8 \
--timeout 900 \
--graceful-timeout 900 \
--keep-alive 300 \
--access-logfile - \
--error-logfile - \
--log-level info \
app:app

31
README.md Normal file
View File

@@ -0,0 +1,31 @@
# Pierwsze uruchomienie - pobierze bazę automatycznie
python3 generate_ban.py --country CN --app nginx --output china.conf
# Wiele krajów po przecinku
python3 generate_ban.py --country CN,RU,KP,IR --app haproxy --output multi_country.conf
# Ręczna aktualizacja bazy
python3 generate_ban.py --update-db
# Zmiana URL bazy danych
python3 generate_ban.py --set-config database_url=https://twoj-serwer.pl/GeoIP.zip
# Zmiana interwału aktualizacji (dni)
python3 generate_ban.py --set-config update_interval_days=14
# Wyłączenie auto-update
python3 generate_ban.py --set-config auto_update=false
# Podgląd konfiguracji
python3 generate_ban.py --show-config
# Użycie niestandardowej lokalizacji config
python3 generate_ban.py --config /etc/geoip/config.json --country US --app nginx
# Bez agregacji sieci (wszystkie oryginalne zakresy)
python3 generate_ban.py --country CN --app nginx --no-aggregate
# Apache z wieloma krajami
python3 generate_ban.py --country CN,RU,BY,KP --app apache --output apache_geoblock.conf

705
api.py Normal file
View File

@@ -0,0 +1,705 @@
"""
API endpoints for GeoIP Ban Generator
"""
from flask import Blueprint, request, jsonify, Response
from geoip_handler import GeoIPHandler, ConfigGenerator, generate_metadata
from datetime import datetime
import json
import config
import sqlite3
api_blueprint = Blueprint('api', __name__)
handler = GeoIPHandler()
redis_cache = None
if config.REDIS_ENABLED:
try:
from redis_cache import RedisCache
redis_cache = RedisCache()
print("[REDIS] Cache enabled", flush=True)
except Exception as e:
print(f"[REDIS] Failed to initialize: {e}", flush=True)
redis_cache = None
progress_state = {
'active': False,
'message': '',
'progress': 0,
'total': 0
}
def update_progress(message, progress=0, total=0):
progress_state['active'] = True
progress_state['message'] = message
progress_state['progress'] = progress
progress_state['total'] = total
def clear_progress():
progress_state['active'] = False
progress_state['message'] = ''
progress_state['progress'] = 0
progress_state['total'] = 0
def get_country_networks_cached(country_code: str, use_cache: bool = True):
country_code = country_code.upper()
if use_cache and redis_cache:
redis_key = f"geoban:country:{country_code}"
try:
cached_data = redis_cache.redis_client.get(redis_key)
if cached_data:
# FIX: Handle both bytes and str
if isinstance(cached_data, bytes):
networks = json.loads(cached_data.decode('utf-8'))
else:
networks = json.loads(cached_data) # Already string
return networks, 'redis'
except Exception as e:
print(f"[{country_code}] Redis read error: {e}", flush=True)
networks = handler._get_cached_networks(country_code)
if networks is not None:
if redis_cache:
try:
redis_key = f"geoban:country:{country_code}"
redis_cache.redis_client.setex(
redis_key,
86400,
json.dumps(networks)
)
except Exception as e:
print(f"[{country_code}] Redis save error: {e}", flush=True)
return networks, 'sqlite'
networks = handler.fetch_country_networks(country_code)
if networks and redis_cache:
try:
redis_key = f"geoban:country:{country_code}"
redis_cache.redis_client.setex(
redis_key,
86400,
json.dumps(networks)
)
except Exception as e:
print(f"[{country_code}] Redis save error: {e}", flush=True)
return networks, 'maxmind'
class ProgressTracker:
def __init__(self, country, country_idx, total_countries, base_progress, next_progress):
self.country = country
self.country_idx = country_idx
self.total_countries = total_countries
self.base_progress = base_progress
self.next_progress = next_progress
self.current_progress = base_progress
def callback(self, message):
progress_range = self.next_progress - self.base_progress
increment = max(1, progress_range // 10)
self.current_progress = min(self.current_progress + increment, self.next_progress - 1)
update_progress(
f'[{self.country_idx}/{self.total_countries}] {self.country}: {message}',
self.current_progress,
100
)
@api_blueprint.route('/api/progress', methods=['GET'])
def get_progress():
return jsonify(progress_state)
@api_blueprint.route('/api/database/status', methods=['GET'])
def database_status():
cfg = handler.load_config()
exists = handler.mmdb_file.exists()
needs_update = handler.needs_update()
return jsonify({
'success': True,
'exists': exists,
'needs_update': needs_update,
'last_update': cfg.get('last_update'),
'file_size': cfg.get('file_size', 0) if exists else 0,
'auto_update': config.MAXMIND_AUTO_UPDATE
})
@api_blueprint.route('/api/database/update', methods=['POST'])
def update_database():
result = handler.download_database()
return jsonify(result)
@api_blueprint.route('/api/countries', methods=['GET'])
def get_countries():
return jsonify({
'success': True,
'countries': config.COMMON_COUNTRIES
})
@api_blueprint.route('/api/cache/status', methods=['GET'])
def cache_status():
if not redis_cache:
return jsonify({
'success': False,
'enabled': False,
'message': 'Redis cache is not enabled'
})
try:
health = redis_cache.health_check()
country_keys_count = 0
config_keys_count = 0
total_size_bytes = 0
try:
# Count country keys
pattern_country = "geoban:country:*"
cursor = 0
while True:
cursor, keys = redis_cache.redis_client.scan(cursor, match=pattern_country, count=1000)
country_keys_count += len(keys)
for key in keys:
try:
size = redis_cache.redis_client.memory_usage(key)
if size:
total_size_bytes += size
except:
pass
if cursor == 0:
break
# Count config keys (old format: geoip:config:*)
pattern_config = "geoip:config:*"
cursor = 0
while True:
cursor, keys = redis_cache.redis_client.scan(cursor, match=pattern_config, count=1000)
config_keys_count += len(keys)
for key in keys:
try:
size = redis_cache.redis_client.memory_usage(key)
if size:
total_size_bytes += size
except:
pass
if cursor == 0:
break
# Also check for new format: geoban:config:*
pattern_config_new = "geoban:config:*"
cursor = 0
while True:
cursor, keys = redis_cache.redis_client.scan(cursor, match=pattern_config_new, count=1000)
config_keys_count += len(keys)
for key in keys:
try:
size = redis_cache.redis_client.memory_usage(key)
if size:
total_size_bytes += size
except:
pass
if cursor == 0:
break
except Exception as e:
print(f"[REDIS] Error counting keys: {e}", flush=True)
import traceback
traceback.print_exc()
return jsonify({
'success': True,
'enabled': True,
'health': health,
'stats': {
'country_keys': country_keys_count,
'config_keys': config_keys_count,
'total_keys': country_keys_count + config_keys_count,
'total_size_mb': round(total_size_bytes / 1024 / 1024, 2),
'memory_used_mb': health.get('memory_used_mb', 0),
'total_keys_in_db': health.get('keys', 0)
}
})
except Exception as e:
import traceback
traceback.print_exc()
return jsonify({
'success': False,
'enabled': True,
'error': str(e)
}), 500
@api_blueprint.route('/api/cache/flush', methods=['POST'])
def cache_flush():
if not redis_cache:
return jsonify({'success': False, 'error': 'Redis not enabled'}), 503
try:
success = redis_cache.flush_all()
return jsonify({
'success': success,
'message': 'Cache flushed successfully' if success else 'Failed to flush cache'
})
except Exception as e:
return jsonify({'success': False, 'error': str(e)}), 500
@api_blueprint.route('/api/cache/invalidate/<country_code>', methods=['POST'])
def cache_invalidate(country_code):
if not redis_cache:
return jsonify({'success': False, 'error': 'Redis not enabled'}), 503
try:
country_code = country_code.upper()
country_key = f"geoban:country:{country_code}"
deleted = redis_cache.redis_client.delete(country_key)
pattern = f"geoban:config:*{country_code}*"
cursor = 0
config_deleted = 0
while True:
cursor, keys = redis_cache.redis_client.scan(cursor, match=pattern, count=100)
if keys:
config_deleted += redis_cache.redis_client.delete(*keys)
if cursor == 0:
break
total_deleted = deleted + config_deleted
return jsonify({
'success': True,
'deleted': total_deleted,
'country': country_code,
'details': {
'country_cache': deleted,
'config_caches': config_deleted
}
})
except Exception as e:
return jsonify({'success': False, 'error': str(e)}), 500
@api_blueprint.route('/api/generate/preview', methods=['POST'])
def generate_preview():
try:
clear_progress()
data = request.get_json()
countries = data.get('countries', [])
app_type = data.get('app_type', 'nginx')
app_variant = data.get('app_variant', 'geo')
aggregate = data.get('aggregate', True)
use_cache = data.get('use_cache', True)
if not countries:
return jsonify({'success': False, 'error': 'No countries selected'}), 400
if use_cache and redis_cache:
cached = redis_cache.get_cached_config(countries, f"{app_type}_{app_variant}", aggregate)
if cached:
return jsonify({
'success': True,
'config': cached['config'],
'stats': cached['stats'],
'from_cache': True,
'cache_type': 'redis-full',
'generated_at': cached['generated_at']
})
if handler.needs_update():
handler.check_and_update()
update_progress(f'Loading data for {len(countries)} countries...', 0, 100)
country_networks = {}
cache_sources = {}
total_countries = len(countries)
for idx, country in enumerate(countries, 1):
base_progress = int((idx - 1) / total_countries * 80)
update_progress(f'[{idx}/{total_countries}] Loading {country}...', base_progress, 100)
networks, source = get_country_networks_cached(country, use_cache=use_cache)
if networks:
country_networks[country] = networks
cache_sources[country] = source
update_progress(
f'[{idx}/{total_countries}] {country}: {len(networks):,} networks ({source})',
base_progress + 10,
100
)
else:
update_progress(f'[{idx}/{total_countries}] {country}: No networks found', base_progress + 10, 100)
if not country_networks:
clear_progress()
return jsonify({'success': False, 'error': 'No networks found'}), 404
update_progress('Generating configuration...', 90, 100)
if app_type == 'raw-cidr':
if app_variant == 'csv':
config_text = ConfigGenerator.generate_csv(country_networks, aggregate=aggregate, redis_ips=None)
else:
config_text = ConfigGenerator.generate_raw_cidr(country_networks, aggregate=aggregate, redis_ips=None)
else:
generators = {
'nginx_geo': ConfigGenerator.generate_nginx_geo,
'nginx_map': ConfigGenerator.generate_nginx_map,
'nginx_deny': ConfigGenerator.generate_nginx_deny,
'apache_22': ConfigGenerator.generate_apache_22,
'apache_24': ConfigGenerator.generate_apache_24,
'haproxy_acl': ConfigGenerator.generate_haproxy_acl,
'haproxy_lua': ConfigGenerator.generate_haproxy_lua,
}
generator_key = f"{app_type}_{app_variant}"
generator = generators.get(generator_key)
if not generator:
clear_progress()
return jsonify({'success': False, 'error': f'Invalid configuration type: {generator_key}'}), 400
config_text = generator(country_networks, aggregate=aggregate, redis_ips=None)
total_networks = sum(len(nets) for nets in country_networks.values())
stats = {
'countries': len(country_networks),
'total_networks': total_networks,
'per_country': {cc: len(nets) for cc, nets in country_networks.items()}
}
if redis_cache:
update_progress('Saving to Redis cache for future use...', 95, 100)
redis_cache.save_config(countries, f"{app_type}_{app_variant}", aggregate, config_text, stats)
update_progress('Complete!', 100, 100)
clear_progress()
source_summary = {
'redis': sum(1 for s in cache_sources.values() if s == 'redis'),
'sqlite': sum(1 for s in cache_sources.values() if s == 'sqlite'),
'maxmind': sum(1 for s in cache_sources.values() if s == 'maxmind')
}
return jsonify({
'success': True,
'config': config_text,
'stats': stats,
'from_cache': False,
'cache_type': 'hybrid',
'cache_sources': cache_sources,
'source_summary': source_summary,
'generated_at': datetime.now().isoformat()
})
except Exception as e:
clear_progress()
import traceback
traceback.print_exc()
return jsonify({'success': False, 'error': str(e)}), 500
@api_blueprint.route('/api/generate/raw', methods=['POST'])
def generate_raw_cidr():
try:
clear_progress()
data = request.get_json()
countries = data.get('countries', [])
aggregate = data.get('aggregate', True)
format_type = data.get('app_variant', 'txt')
use_cache = data.get('use_cache', True)
if not countries:
return jsonify({'success': False, 'error': 'No countries selected'}), 400
if use_cache and redis_cache:
cached = redis_cache.get_cached_config(countries, f"raw-cidr_{format_type}", aggregate)
if cached:
filename = f"cidr_blocklist_{'_'.join(sorted(countries))}.{format_type}"
mimetype = 'text/csv' if format_type == 'csv' else 'text/plain'
return Response(
cached['config'],
mimetype=mimetype,
headers={
'Content-Disposition': f'attachment; filename="{filename}"',
'X-From-Cache': 'true',
'X-Cache-Type': 'redis-full',
'X-Generated-At': cached['generated_at'],
'Cache-Control': 'no-cache, no-store, must-revalidate',
'Pragma': 'no-cache',
'Expires': '0'
}
)
if handler.needs_update():
handler.check_and_update()
update_progress(f'Loading data for {len(countries)} countries...', 0, 100)
country_networks = {}
cache_sources = {}
total_countries = len(countries)
for idx, country in enumerate(countries, 1):
base_progress = int((idx - 1) / total_countries * 80)
update_progress(f'[{idx}/{total_countries}] Loading {country}...', base_progress, 100)
networks, source = get_country_networks_cached(country, use_cache=use_cache)
if networks:
country_networks[country] = networks
cache_sources[country] = source
next_progress = int(idx / total_countries * 80)
update_progress(
f'[{idx}/{total_countries}] {country}: {len(networks):,} networks ({source})',
next_progress,
100
)
if not country_networks:
clear_progress()
return jsonify({'success': False, 'error': 'No networks found'}), 404
update_progress('Generating file...', 85, 100)
if format_type == 'txt':
config_text = ConfigGenerator.generate_raw_cidr(country_networks, aggregate=aggregate, redis_ips=None)
filename = f"cidr_blocklist_{'_'.join(sorted(countries))}.txt"
mimetype = 'text/plain'
else:
config_text = ConfigGenerator.generate_csv(country_networks, aggregate=aggregate, redis_ips=None)
filename = f"cidr_blocklist_{'_'.join(sorted(countries))}.csv"
mimetype = 'text/csv'
total_networks = sum(len(nets) for nets in country_networks.values())
stats = {
'countries': len(country_networks),
'total_networks': total_networks,
'per_country': {cc: len(nets) for cc, nets in country_networks.items()}
}
if redis_cache:
update_progress('Saving to Redis cache...', 95, 100)
redis_cache.save_config(countries, f"raw-cidr_{format_type}", aggregate, config_text, stats)
update_progress('Complete!', 100, 100)
clear_progress()
cache_type = 'hybrid'
if cache_sources:
most_common = max(set(cache_sources.values()), key=list(cache_sources.values()).count)
cache_type = most_common
return Response(
config_text,
mimetype=mimetype,
headers={
'Content-Disposition': f'attachment; filename="{filename}"',
'X-From-Cache': 'false',
'X-Cache-Type': cache_type,
'X-Generated-At': datetime.now().isoformat(),
'Cache-Control': 'no-cache, no-store, must-revalidate',
'Pragma': 'no-cache',
'Expires': '0'
}
)
except Exception as e:
clear_progress()
return jsonify({'success': False, 'error': str(e)}), 500
@api_blueprint.route('/api/generate', methods=['POST'])
def generate_config():
try:
clear_progress()
data = request.get_json()
countries = data.get('countries', [])
app_type = data.get('app_type', 'nginx')
app_variant = data.get('app_variant', 'geo')
aggregate = data.get('aggregate', True)
use_cache = data.get('use_cache', True)
if not countries:
return jsonify({'success': False, 'error': 'No countries selected'}), 400
if use_cache and redis_cache:
cached = redis_cache.get_cached_config(countries, f"{app_type}_{app_variant}", aggregate)
if cached:
filename = f"geoblock_{app_type}_{app_variant}.conf"
if app_variant == 'lua':
filename = f"geoblock_{app_type}.lua"
return Response(
cached['config'],
mimetype='text/plain',
headers={
'Content-Disposition': f'attachment; filename="{filename}"',
'X-From-Cache': 'true',
'X-Cache-Type': 'redis-full',
'X-Generated-At': cached['generated_at'],
'Cache-Control': 'no-cache, no-store, must-revalidate',
'Pragma': 'no-cache',
'Expires': '0'
}
)
if handler.needs_update():
handler.check_and_update()
update_progress(f'Loading data for {len(countries)} countries...', 0, 100)
country_networks = {}
cache_sources = {}
total_countries = len(countries)
for idx, country in enumerate(countries, 1):
base_progress = int((idx - 1) / total_countries * 80)
update_progress(f'[{idx}/{total_countries}] Loading {country}...', base_progress, 100)
networks, source = get_country_networks_cached(country, use_cache=use_cache)
if networks:
country_networks[country] = networks
cache_sources[country] = source
next_progress = int(idx / total_countries * 80)
update_progress(
f'[{idx}/{total_countries}] {country}: {len(networks):,} networks ({source})',
next_progress,
100
)
if not country_networks:
clear_progress()
return jsonify({'success': False, 'error': 'No networks found'}), 404
update_progress('Generating configuration...', 85, 100)
generators = {
'nginx_geo': ConfigGenerator.generate_nginx_geo,
'nginx_map': ConfigGenerator.generate_nginx_map,
'nginx_deny': ConfigGenerator.generate_nginx_deny,
'apache_22': ConfigGenerator.generate_apache_22,
'apache_24': ConfigGenerator.generate_apache_24,
'haproxy_acl': ConfigGenerator.generate_haproxy_acl,
'haproxy_lua': ConfigGenerator.generate_haproxy_lua,
}
generator_key = f"{app_type}_{app_variant}"
generator = generators.get(generator_key)
if not generator:
clear_progress()
return jsonify({'success': False, 'error': 'Invalid configuration type'}), 400
config_text = generator(country_networks, aggregate=aggregate, redis_ips=None)
stats = {
'countries': len(country_networks),
'total_networks': sum(len(nets) for nets in country_networks.values()),
'per_country': {cc: len(nets) for cc, nets in country_networks.items()}
}
if redis_cache:
update_progress('Saving to Redis cache...', 95, 100)
redis_cache.save_config(countries, f"{app_type}_{app_variant}", aggregate, config_text, stats)
filename = f"geoblock_{app_type}_{app_variant}.conf"
if app_variant == 'lua':
filename = f"geoblock_{app_type}.lua"
update_progress('Complete!', 100, 100)
clear_progress()
cache_type = 'hybrid'
if cache_sources:
most_common = max(set(cache_sources.values()), key=list(cache_sources.values()).count)
cache_type = most_common
return Response(
config_text,
mimetype='text/plain',
headers={
'Content-Disposition': f'attachment; filename="{filename}"',
'X-From-Cache': 'false',
'X-Cache-Type': cache_type,
'X-Generated-At': datetime.now().isoformat(),
'Cache-Control': 'no-cache, no-store, must-revalidate',
'Pragma': 'no-cache',
'Expires': '0'
}
)
except Exception as e:
clear_progress()
return jsonify({'success': False, 'error': str(e)}), 500
@api_blueprint.route('/api/database/sqlite/status', methods=['GET'])
def sqlite_status():
"""Get SQLite cache database statistics"""
db_path = config.GEOIP_DB_DIR / 'networks_cache.db'
if not db_path.exists():
return jsonify({
'success': False,
'exists': False,
'message': 'SQLite cache database not found'
})
try:
file_size = db_path.stat().st_size
modified_time = datetime.fromtimestamp(db_path.stat().st_mtime)
conn = sqlite3.connect(str(db_path), timeout=10.0)
cursor = conn.cursor()
cursor.execute("SELECT COUNT(*) FROM cache_metadata")
total_countries = cursor.fetchone()[0]
cursor.execute("SELECT SUM(network_count) FROM cache_metadata")
total_networks = cursor.fetchone()[0] or 0
cursor.execute("SELECT MIN(last_scan), MAX(last_scan) FROM cache_metadata")
oldest, newest = cursor.fetchone()
cursor.execute("""
SELECT country_code, network_count
FROM cache_metadata
ORDER BY network_count DESC
LIMIT 5
""")
top_countries = [{'code': row[0], 'networks': row[1]} for row in cursor.fetchall()]
conn.close()
return jsonify({
'success': True,
'exists': True,
'file_size': file_size,
'file_size_mb': round(file_size / 1024 / 1024, 2),
'modified': modified_time.isoformat(),
'total_countries': total_countries,
'total_networks': total_networks,
'oldest_scan': oldest,
'newest_scan': newest,
'top_countries': top_countries
})
except Exception as e:
import traceback
traceback.print_exc()
return jsonify({
'success': False,
'exists': True,
'error': str(e)
}), 500
api = api_blueprint

242
app.py Normal file
View File

@@ -0,0 +1,242 @@
"""
GeoIP Ban Generator - Web Application
"""
from flask import Flask, render_template, request, Response, jsonify
import hashlib
import os
import sqlite3
from pathlib import Path
from functools import wraps
from datetime import datetime
import config
from api import api
from geoip_handler import GeoIPHandler
app = Flask(__name__,
static_folder=str(config.STATIC_DIR),
template_folder=str(config.TEMPLATE_DIR))
app.config['SECRET_KEY'] = config.SECRET_KEY
app.register_blueprint(api)
handler = GeoIPHandler()
redis_cache = None
if config.REDIS_ENABLED:
try:
from redis_cache import RedisCache
redis_cache = RedisCache()
redis_health = redis_cache.health_check()
if redis_health['connected']:
print(f"[REDIS] Connected successfully - {redis_health['memory_used_mb']}MB used", flush=True)
else:
print(f"[REDIS] Connection failed: {redis_health.get('error')}", flush=True)
except Exception as e:
print(f"[REDIS] Failed to initialize: {e}", flush=True)
redis_cache = None
def get_file_hash(filepath: Path) -> str:
"""Generate MD5 hash for cache busting"""
try:
if filepath.exists():
with open(filepath, 'rb') as f:
return hashlib.md5(f.read()).hexdigest()[:8]
except:
pass
return 'v1'
def get_sqlite_status():
"""Get SQLite cache database status"""
db_path = config.GEOIP_DB_DIR / 'networks_cache.db'
if not db_path.exists():
return {
'exists': False,
'status': 'missing'
}
try:
file_size = db_path.stat().st_size
modified_time = datetime.fromtimestamp(db_path.stat().st_mtime)
conn = sqlite3.connect(str(db_path), timeout=5.0)
cursor = conn.cursor()
cursor.execute("SELECT COUNT(*) FROM cache_metadata")
total_countries = cursor.fetchone()[0]
cursor.execute("SELECT SUM(network_count) FROM cache_metadata")
total_networks = cursor.fetchone()[0] or 0
conn.close()
return {
'exists': True,
'status': 'ok',
'file_size_mb': round(file_size / 1024 / 1024, 2),
'total_countries': total_countries,
'total_networks': total_networks,
'modified': modified_time.isoformat()
}
except Exception as e:
return {
'exists': True,
'status': 'error',
'error': str(e)
}
@app.context_processor
def inject_globals():
"""Inject global variables into templates"""
return {
'app_name': config.APP_NAME,
'app_version': config.APP_VERSION,
'logo_url': config.LOGO_URL,
'logo_link': config.LOGO_LINK,
'footer_text': config.FOOTER_TEXT,
'footer_link': config.FOOTER_LINK,
'footer_link_text': config.FOOTER_LINK_TEXT,
'css_hash': get_file_hash(config.STATIC_DIR / 'css' / 'style.css'),
'js_hash': get_file_hash(config.STATIC_DIR / 'js' / 'app.js'),
'get_country_flag': config.get_country_flag,
'redis_enabled': config.REDIS_ENABLED,
'redis_connected': redis_cache.health_check()['connected'] if redis_cache else False,
}
@app.route('/')
def index():
"""Main page"""
if config.MAXMIND_AUTO_UPDATE:
handler.check_and_update()
return render_template('index.html', countries=config.COMMON_COUNTRIES)
@app.route('/api-docs')
def api_docs():
"""API documentation page"""
return render_template('api.html')
@app.route('/favicon.ico')
def favicon():
return '', 204
@app.route('/health')
def health():
"""Health check endpoint for HAProxy/monitoring"""
health_status = {
'status': 'healthy',
'timestamp': datetime.now().isoformat(),
'components': {}
}
health_status['components']['flask'] = {
'status': 'ok',
'version': config.APP_VERSION
}
maxmind_exists = handler.mmdb_file.exists()
health_status['components']['maxmind_db'] = {
'status': 'ok' if maxmind_exists else 'missing',
'exists': maxmind_exists
}
if not maxmind_exists:
health_status['status'] = 'degraded'
sqlite_status = get_sqlite_status()
health_status['components']['sqlite_cache'] = sqlite_status
if not sqlite_status['exists'] or sqlite_status.get('status') != 'ok':
health_status['status'] = 'degraded'
if config.REDIS_ENABLED and redis_cache:
redis_health = redis_cache.health_check()
if redis_health.get('connected'):
health_status['components']['redis'] = {
'status': 'ok',
'memory_mb': redis_health.get('memory_used_mb', 0),
'keys': redis_health.get('keys', 0)
}
else:
health_status['components']['redis'] = {
'status': 'error',
'error': redis_health.get('error', 'Connection failed')
}
health_status['status'] = 'degraded'
else:
health_status['components']['redis'] = {
'status': 'disabled',
'enabled': False
}
status_code = 200 if health_status['status'] in ['healthy', 'degraded'] else 503
return jsonify(health_status), status_code
@app.route('/api/stats/summary')
def stats_summary():
"""Combined stats endpoint for dashboard"""
stats = {
'maxmind': {
'exists': handler.mmdb_file.exists(),
'needs_update': handler.needs_update()
},
'sqlite': get_sqlite_status(),
'redis': {'enabled': False}
}
if config.REDIS_ENABLED and redis_cache:
redis_health = redis_cache.health_check()
stats['redis'] = {
'enabled': True,
'connected': redis_health.get('connected', False),
'memory_mb': redis_health.get('memory_used_mb', 0),
'keys': redis_health.get('keys', 0)
}
return jsonify(stats)
def cache_control(max_age: int = None):
"""Decorator for static file cache control"""
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
response = f(*args, **kwargs)
if isinstance(response, Response):
if max_age:
response.headers['Cache-Control'] = f'public, max-age={max_age}'
else:
response.headers['Cache-Control'] = 'no-cache, no-store'
response.headers['Pragma'] = 'no-cache'
response.headers['Expires'] = '0'
return response
return decorated_function
return decorator
@app.after_request
def add_headers(response):
"""Add cache control headers based on request path"""
if request.path == '/' or request.path.startswith('/api/'):
response.headers['Cache-Control'] = 'no-cache, no-store'
elif request.path.startswith('/static/'):
if 'Content-Disposition' in response.headers:
del response.headers['Content-Disposition']
if config.ENABLE_CACHE_BUSTING:
response.headers['Cache-Control'] = f'public, max-age={config.CACHE_TTL_SECONDS}, immutable'
else:
response.headers['Cache-Control'] = f'public, max-age={config.CACHE_TTL_SECONDS}'
elif request.path == '/api-docs':
response.headers['Cache-Control'] = 'public, max-age=300'
return response
if __name__ == '__main__':
app.run(
host=config.FLASK_HOST,
port=config.FLASK_PORT,
debug=config.FLASK_DEBUG
)

0
blocklist.txt Normal file
View File

254
config.py Normal file
View File

@@ -0,0 +1,254 @@
"""
Configuration file for GeoIP Ban Generator
"""
import os
from pathlib import Path
# Base paths
BASE_DIR = Path(__file__).parent
STATIC_DIR = BASE_DIR / 'static'
TEMPLATE_DIR = BASE_DIR / 'templates'
GEOIP_DB_DIR = BASE_DIR / 'geoip_db'
# Flask settings
FLASK_HOST = os.getenv('FLASK_HOST', '0.0.0.0')
FLASK_PORT = int(os.getenv('FLASK_PORT', 5000))
FLASK_DEBUG = os.getenv('FLASK_DEBUG', 'False').lower() == 'true'
SECRET_KEY = os.getenv('SECRET_KEY', 'change-me-in-production')
# Application settings
APP_NAME = os.getenv('APP_NAME', 'GeoIP Ban Generator')
APP_VERSION = '1.0.0'
# Logo settings
LOGO_URL = os.getenv('LOGO_URL', '')
LOGO_LINK = os.getenv('LOGO_LINK', '/')
# Footer settings
FOOTER_TEXT = os.getenv('FOOTER_TEXT', '© 2026 GeoIP Ban Generator')
FOOTER_LINK = os.getenv('FOOTER_LINK', '')
FOOTER_LINK_TEXT = os.getenv('FOOTER_LINK_TEXT', 'Documentation')
# MaxMind database settings
MAXMIND_PRIMARY_URL = os.getenv(
'MAXMIND_PRIMARY_URL',
'https://github.com/P3TERX/GeoLite.mmdb/releases/download/2026.02.07/GeoLite2-Country.mmdb'
)
MAXMIND_FALLBACK_URL = os.getenv(
'MAXMIND_FALLBACK_URL',
'https://git.io/GeoLite2-Country.mmdb'
)
MAXMIND_UPDATE_INTERVAL_DAYS = int(os.getenv('MAXMIND_UPDATE_INTERVAL_DAYS', 7))
MAXMIND_AUTO_UPDATE = os.getenv('MAXMIND_AUTO_UPDATE', 'True').lower() == 'true'
# IP range sources
IP_RANGE_SOURCES = {
'github': 'https://raw.githubusercontent.com/herrbischoff/country-ip-blocks/master/ipv4/{country_lower}.cidr',
'ipdeny': 'https://www.ipdeny.com/ipblocks/data/aggregated/{country_lower}-aggregated.zone'
}
# Cache settings
CACHE_ENABLED = os.getenv('CACHE_ENABLED', 'True').lower() == 'true'
CACHE_TTL_SECONDS = int(os.getenv('CACHE_TTL_SECONDS', 3600))
# Static file cache busting
ENABLE_CACHE_BUSTING = True
def get_country_flag(country_code: str) -> str:
"""
Convert ISO 3166-1 alpha-2 country code to flag emoji.
Uses Unicode Regional Indicator Symbols.
Example: 'PL' -> '🇵🇱'
"""
if not country_code or len(country_code) != 2:
return ''
# Convert to uppercase and get Unicode regional indicators
# Regional Indicator Symbol Letter A starts at 0x1F1E6
code = country_code.upper()
return chr(0x1F1E6 + ord(code[0]) - ord('A')) + chr(0x1F1E6 + ord(code[1]) - ord('A'))
# Available countries (ISO 3166-1 alpha-2)
# Focus on high-risk scammer countries and commonly blocked regions
COMMON_COUNTRIES = [
# High-risk Asian countries
{'code': 'CN', 'name': 'China'},
{'code': 'IN', 'name': 'India'},
{'code': 'PK', 'name': 'Pakistan'},
{'code': 'BD', 'name': 'Bangladesh'},
{'code': 'ID', 'name': 'Indonesia'},
{'code': 'PH', 'name': 'Philippines'},
{'code': 'VN', 'name': 'Vietnam'},
{'code': 'TH', 'name': 'Thailand'},
{'code': 'MY', 'name': 'Malaysia'},
{'code': 'SG', 'name': 'Singapore'},
{'code': 'KH', 'name': 'Cambodia'},
{'code': 'MM', 'name': 'Myanmar'},
{'code': 'LA', 'name': 'Laos'},
{'code': 'NP', 'name': 'Nepal'},
{'code': 'LK', 'name': 'Sri Lanka'},
# Middle East
{'code': 'IR', 'name': 'Iran'},
{'code': 'IQ', 'name': 'Iraq'},
{'code': 'SY', 'name': 'Syria'},
{'code': 'YE', 'name': 'Yemen'},
{'code': 'SA', 'name': 'Saudi Arabia'},
{'code': 'AE', 'name': 'United Arab Emirates'},
{'code': 'QA', 'name': 'Qatar'},
{'code': 'KW', 'name': 'Kuwait'},
{'code': 'BH', 'name': 'Bahrain'},
{'code': 'OM', 'name': 'Oman'},
{'code': 'JO', 'name': 'Jordan'},
{'code': 'LB', 'name': 'Lebanon'},
# Africa - West
{'code': 'NG', 'name': 'Nigeria'},
{'code': 'GH', 'name': 'Ghana'},
{'code': 'CI', 'name': 'Ivory Coast'},
{'code': 'SN', 'name': 'Senegal'},
{'code': 'BJ', 'name': 'Benin'},
{'code': 'TG', 'name': 'Togo'},
{'code': 'ML', 'name': 'Mali'},
{'code': 'BF', 'name': 'Burkina Faso'},
{'code': 'NE', 'name': 'Niger'},
{'code': 'LR', 'name': 'Liberia'},
{'code': 'SL', 'name': 'Sierra Leone'},
# Africa - East
{'code': 'KE', 'name': 'Kenya'},
{'code': 'ET', 'name': 'Ethiopia'},
{'code': 'TZ', 'name': 'Tanzania'},
{'code': 'UG', 'name': 'Uganda'},
{'code': 'SO', 'name': 'Somalia'},
{'code': 'SD', 'name': 'Sudan'},
{'code': 'SS', 'name': 'South Sudan'},
{'code': 'ER', 'name': 'Eritrea'},
{'code': 'DJ', 'name': 'Djibouti'},
# Africa - South
{'code': 'ZA', 'name': 'South Africa'},
{'code': 'ZW', 'name': 'Zimbabwe'},
{'code': 'MZ', 'name': 'Mozambique'},
{'code': 'AO', 'name': 'Angola'},
{'code': 'ZM', 'name': 'Zambia'},
{'code': 'MW', 'name': 'Malawi'},
{'code': 'BW', 'name': 'Botswana'},
# Africa - Central
{'code': 'CM', 'name': 'Cameroon'},
{'code': 'CD', 'name': 'DR Congo'},
{'code': 'CG', 'name': 'Congo'},
{'code': 'CF', 'name': 'Central African Republic'},
{'code': 'TD', 'name': 'Chad'},
{'code': 'GA', 'name': 'Gabon'},
# Africa - North
{'code': 'EG', 'name': 'Egypt'},
{'code': 'DZ', 'name': 'Algeria'},
{'code': 'MA', 'name': 'Morocco'},
{'code': 'TN', 'name': 'Tunisia'},
{'code': 'LY', 'name': 'Libya'},
# Eastern Europe
{'code': 'RU', 'name': 'Russia'},
{'code': 'UA', 'name': 'Ukraine'},
{'code': 'BY', 'name': 'Belarus'},
{'code': 'MD', 'name': 'Moldova'},
{'code': 'GE', 'name': 'Georgia'},
{'code': 'AM', 'name': 'Armenia'},
{'code': 'AZ', 'name': 'Azerbaijan'},
{'code': 'KZ', 'name': 'Kazakhstan'},
{'code': 'UZ', 'name': 'Uzbekistan'},
{'code': 'TM', 'name': 'Turkmenistan'},
{'code': 'KG', 'name': 'Kyrgyzstan'},
{'code': 'TJ', 'name': 'Tajikistan'},
# Balkans
{'code': 'RO', 'name': 'Romania'},
{'code': 'BG', 'name': 'Bulgaria'},
{'code': 'AL', 'name': 'Albania'},
{'code': 'RS', 'name': 'Serbia'},
{'code': 'BA', 'name': 'Bosnia and Herzegovina'},
{'code': 'MK', 'name': 'North Macedonia'},
{'code': 'XK', 'name': 'Kosovo'},
{'code': 'ME', 'name': 'Montenegro'},
# Latin America
{'code': 'BR', 'name': 'Brazil'},
{'code': 'MX', 'name': 'Mexico'},
{'code': 'CO', 'name': 'Colombia'},
{'code': 'VE', 'name': 'Venezuela'},
{'code': 'AR', 'name': 'Argentina'},
{'code': 'PE', 'name': 'Peru'},
{'code': 'CL', 'name': 'Chile'},
{'code': 'EC', 'name': 'Ecuador'},
{'code': 'BO', 'name': 'Bolivia'},
{'code': 'PY', 'name': 'Paraguay'},
# Caribbean
{'code': 'CU', 'name': 'Cuba'},
{'code': 'HT', 'name': 'Haiti'},
{'code': 'DO', 'name': 'Dominican Republic'},
{'code': 'JM', 'name': 'Jamaica'},
{'code': 'TT', 'name': 'Trinidad and Tobago'},
# Other high-risk
{'code': 'KP', 'name': 'North Korea'},
{'code': 'AF', 'name': 'Afghanistan'},
{'code': 'TR', 'name': 'Turkey'},
# Western countries (for reference/testing)
{'code': 'US', 'name': 'United States'},
{'code': 'GB', 'name': 'United Kingdom'},
{'code': 'DE', 'name': 'Germany'},
{'code': 'FR', 'name': 'France'},
{'code': 'IT', 'name': 'Italy'},
{'code': 'ES', 'name': 'Spain'},
{'code': 'PL', 'name': 'Poland'},
{'code': 'NL', 'name': 'Netherlands'},
{'code': 'BE', 'name': 'Belgium'},
{'code': 'SE', 'name': 'Sweden'},
{'code': 'NO', 'name': 'Norway'},
{'code': 'DK', 'name': 'Denmark'},
{'code': 'FI', 'name': 'Finland'},
{'code': 'CH', 'name': 'Switzerland'},
{'code': 'AT', 'name': 'Austria'},
{'code': 'CA', 'name': 'Canada'},
{'code': 'AU', 'name': 'Australia'},
{'code': 'NZ', 'name': 'New Zealand'},
{'code': 'JP', 'name': 'Japan'},
{'code': 'KR', 'name': 'South Korea'},
]
# Sort countries by name
COMMON_COUNTRIES = sorted(COMMON_COUNTRIES, key=lambda x: x['name'])
# Add flags dynamically to countries
for country in COMMON_COUNTRIES:
country['flag'] = get_country_flag(country['code'])
PRECACHE_APP_TYPES = [
'nginx_geo',
'nginx_deny',
'apache_24',
'haproxy_acl',
'raw-cidr_txt',
]
PRECACHE_AGGREGATE_VARIANTS = [True]
# Redis Configuration
REDIS_HOST = os.getenv('REDIS_HOST', 'localhost')
REDIS_PORT = int(os.getenv('REDIS_PORT', '6379'))
REDIS_DB = int(os.getenv('REDIS_DB', '0'))
REDIS_PASSWORD = os.getenv('REDIS_PASSWORD', None)
REDIS_CACHE_TTL = int(os.getenv('REDIS_CACHE_TTL', '86400')) # 24h default
REDIS_ENABLED = os.getenv('REDIS_ENABLED', 'true').lower() == 'true'
CACHE_MAX_AGE_HOURS = 168 # 7 dni (7 * 24h)

127
docker-compose.yml Normal file
View File

@@ -0,0 +1,127 @@
version: '3.8'
services:
redis:
image: redis:7-alpine
container_name: geoip-redis
restart: unless-stopped
command: redis-server --appendonly yes --maxmemory 512mb --maxmemory-policy allkeys-lru
ports:
- "127.0.0.1:6379:6379"
volumes:
- redis_data:/data
networks:
- geoip-network
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 10s
timeout: 3s
retries: 3
geoip-ban:
build: .
container_name: geoip-ban-generator
restart: unless-stopped
ports:
- "${FLASK_PORT:-5000}:${FLASK_PORT:-5000}"
environment:
- FLASK_HOST=0.0.0.0
- FLASK_PORT=${FLASK_PORT:-5000}
- FLASK_DEBUG=${FLASK_DEBUG:-False}
- SECRET_KEY=${SECRET_KEY:-change-me-in-production}
- APP_NAME=${APP_NAME:-GeoIP Ban Generator}
- LOGO_URL=${LOGO_URL:-}
- LOGO_LINK=${LOGO_LINK:-/}
- FOOTER_TEXT=${FOOTER_TEXT:-© 2026 GeoIP Ban Generator}
- FOOTER_LINK=${FOOTER_LINK:-}
- FOOTER_LINK_TEXT=${FOOTER_LINK_TEXT:-Documentation}
- MAXMIND_PRIMARY_URL=${MAXMIND_PRIMARY_URL:-https://github.com/P3TERX/GeoLite.mmdb/releases/download/2026.02.07/GeoLite2-Country.mmdb}
- MAXMIND_FALLBACK_URL=${MAXMIND_FALLBACK_URL:-https://git.io/GeoLite2-Country.mmdb}
- MAXMIND_UPDATE_INTERVAL_DAYS=${MAXMIND_UPDATE_INTERVAL_DAYS:-7}
- MAXMIND_AUTO_UPDATE=${MAXMIND_AUTO_UPDATE:-True}
- CACHE_ENABLED=${CACHE_ENABLED:-True}
- CACHE_TTL_SECONDS=${CACHE_TTL_SECONDS:-3600}
# Redis configuration
- REDIS_HOST=redis
- REDIS_PORT=6379
- REDIS_DB=${REDIS_DB:-0}
- REDIS_PASSWORD=${REDIS_PASSWORD:-}
- REDIS_ENABLED=${REDIS_ENABLED:-true}
- REDIS_CACHE_TTL=${REDIS_CACHE_TTL:-86400}
volumes:
- geoip_data:/app/geoip_db
networks:
- geoip-network
depends_on:
redis:
condition: service_healthy
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:${FLASK_PORT:-5000}/"]
interval: 30s
timeout: 10s
retries: 3
start_period: 40s
geoip-scheduler:
build: .
container_name: geoip-scheduler
restart: unless-stopped
environment:
- MAXMIND_PRIMARY_URL=${MAXMIND_PRIMARY_URL:-https://github.com/P3TERX/GeoLite.mmdb/releases/download/2026.02.07/GeoLite2-Country.mmdb}
- MAXMIND_FALLBACK_URL=${MAXMIND_FALLBACK_URL:-https://git.io/GeoLite2-Country.mmdb}
- MAXMIND_UPDATE_INTERVAL_DAYS=${MAXMIND_UPDATE_INTERVAL_DAYS:-7}
- SCHEDULER_ENABLED=${SCHEDULER_ENABLED:-true}
- SCAN_INTERVAL=${SCAN_INTERVAL:-30d}
- SCAN_TIME=${SCAN_TIME:-02:00}
- SCAN_ON_STARTUP=${SCAN_ON_STARTUP:-true}
- CACHE_MAX_AGE_HOURS=${CACHE_MAX_AGE_HOURS:-720}
- PARALLEL_WORKERS=${PARALLEL_WORKERS:-8}
# Redis configuration (for cache invalidation after scan)
- REDIS_HOST=redis
- REDIS_PORT=6379
- REDIS_DB=${REDIS_DB:-0}
- REDIS_PASSWORD=${REDIS_PASSWORD:-}
- REDIS_ENABLED=${REDIS_ENABLED:-true}
volumes:
- geoip_data:/app/geoip_db
networks:
- geoip-network
command: python scheduler.py
depends_on:
- geoip-ban
- redis
geoip-precache:
build: .
container_name: geoip-precache
restart: "no"
environment:
- REDIS_HOST=redis
- REDIS_PORT=6379
- REDIS_DB=${REDIS_DB:-0}
- REDIS_PASSWORD=${REDIS_PASSWORD:-}
- REDIS_ENABLED=true
volumes:
- geoip_data:/app/geoip_db
networks:
- geoip-network
command: python precache_configs.py
depends_on:
- redis
- geoip-ban
profiles:
- tools
volumes:
geoip_data:
driver: local
redis_data:
driver: local
networks:
geoip-network:
driver: bridge

648
generate_ban.py Normal file
View File

@@ -0,0 +1,648 @@
#!/usr/bin/env python3
import argparse
import sys
import os
import json
import ipaddress
import urllib.request
from pathlib import Path
from typing import List, Dict, Set
from datetime import datetime, timedelta
import geoip2.database
from geoip2.errors import AddressNotFoundError
class Config:
"""Configuration manager"""
DEFAULT_CONFIG = {
"database_url": "https://github.com/P3TERX/GeoLite.mmdb/releases/download/2026.02.07/GeoLite2-Country.mmdb",
"database_file": "GeoLite2-Country.mmdb",
"last_update": None,
"update_interval_days": 7,
"geoip_db_dir": "geoip_db",
"cache_enabled": True,
"auto_update": True,
"ip_range_sources": {
"github": "https://raw.githubusercontent.com/herrbischoff/country-ip-blocks/master/ipv4/{country_lower}.cidr",
"alternative": "https://www.ipdeny.com/ipblocks/data/aggregated/{country_lower}-aggregated.zone"
}
}
def __init__(self, config_path: str = "geoip_db/config.json"):
self.config_path = Path(config_path)
self.config = self.load()
def load(self) -> Dict:
"""Load configuration from file"""
if self.config_path.exists():
try:
with open(self.config_path, 'r') as f:
config = json.load(f)
return {**self.DEFAULT_CONFIG, **config}
except Exception as e:
print(f"Warning: Could not load config: {e}", file=sys.stderr)
return self.DEFAULT_CONFIG.copy()
def save(self):
"""Save configuration to file"""
self.config_path.parent.mkdir(parents=True, exist_ok=True)
with open(self.config_path, 'w') as f:
json.dump(self.config, f, indent=2, default=str)
def get(self, key: str, default=None):
"""Get configuration value"""
return self.config.get(key, default)
def set(self, key: str, value):
"""Set configuration value"""
self.config[key] = value
self.save()
def needs_update(self) -> bool:
"""Check if database needs update"""
if not self.config.get('auto_update', True):
return False
last_update = self.config.get('last_update')
if not last_update:
return True
try:
last_date = datetime.fromisoformat(last_update)
interval = timedelta(days=self.config.get('update_interval_days', 7))
return datetime.now() - last_date > interval
except:
return True
class GeoIPDatabase:
"""GeoIP database handler using MMDB format"""
def __init__(self, config: Config):
self.config = config
self.db_dir = Path(config.get('geoip_db_dir', 'geoip_db'))
self.db_dir.mkdir(parents=True, exist_ok=True)
self.mmdb_file = self.db_dir / config.get('database_file', 'GeoLite2-Country.mmdb')
self.cache = {}
self.reader = None
def download_database(self, url: str = None):
"""Download MMDB database"""
url = url or self.config.get('database_url')
print(f"Downloading database from: {url}", file=sys.stderr)
print(f"Saving to: {self.mmdb_file}", file=sys.stderr)
try:
urllib.request.urlretrieve(url, self.mmdb_file)
# Update config
self.config.set('last_update', datetime.now().isoformat())
print("Database downloaded successfully", file=sys.stderr)
print(f"File size: {self.mmdb_file.stat().st_size / 1024 / 1024:.2f} MB", file=sys.stderr)
return True
except Exception as e:
print(f"Error downloading database: {e}", file=sys.stderr)
return False
def check_and_update(self):
"""Check if update is needed and download if necessary"""
if not self.mmdb_file.exists():
print("Database not found, downloading...", file=sys.stderr)
return self.download_database()
if self.config.needs_update():
print("Database is outdated, updating...", file=sys.stderr)
return self.download_database()
return True
def open_reader(self):
"""Open MMDB reader"""
if self.reader is None:
try:
self.reader = geoip2.database.Reader(str(self.mmdb_file))
print(f"Opened database: {self.mmdb_file}", file=sys.stderr)
except Exception as e:
print(f"Error opening database: {e}", file=sys.stderr)
print("Install geoip2: pip install geoip2", file=sys.stderr)
sys.exit(1)
def close_reader(self):
"""Close MMDB reader"""
if self.reader:
self.reader.close()
self.reader = None
def get_country_networks_from_source(self, country_code: str) -> List[ipaddress.IPv4Network]:
"""Download IP ranges from external source (fallback method)"""
sources = self.config.get('ip_range_sources', {})
networks = []
country_lower = country_code.lower()
# Try multiple sources
for source_name, url_template in sources.items():
try:
url = url_template.format(country_lower=country_lower, country_upper=country_code.upper())
print(f"Fetching from {source_name}: {url}", file=sys.stderr)
response = urllib.request.urlopen(url, timeout=30)
data = response.read().decode('utf-8')
for line in data.strip().split('\n'):
line = line.strip()
if line and not line.startswith('#'):
try:
networks.append(ipaddress.IPv4Network(line))
except ValueError:
continue
if networks:
print(f"Loaded {len(networks)} networks from {source_name}", file=sys.stderr)
break
except Exception as e:
print(f"Could not fetch from {source_name}: {e}", file=sys.stderr)
continue
return networks
def get_country_networks(self, country_codes: List[str]) -> Dict[str, List[ipaddress.IPv4Network]]:
"""Get IP networks for specified countries"""
# Check cache
cache_key = ','.join(sorted(country_codes))
if self.config.get('cache_enabled') and cache_key in self.cache:
print(f"Using cached data for {cache_key}", file=sys.stderr)
return self.cache[cache_key]
country_networks = {code: [] for code in country_codes}
print(f"Loading networks for: {', '.join(country_codes)}", file=sys.stderr)
# Use external IP range sources (more efficient than scanning MMDB)
for country_code in country_codes:
networks = self.get_country_networks_from_source(country_code)
country_networks[country_code] = networks
print(f" {country_code}: {len(networks)} networks", file=sys.stderr)
# Cache results
if self.config.get('cache_enabled'):
self.cache[cache_key] = country_networks
return country_networks
class ConfigGenerator:
@staticmethod
def _aggregate_networks(networks: list) -> list:
"""Aggregate IP networks to minimize list size"""
if not networks:
return []
try:
ip_objects = []
for network in networks:
try:
ip_objects.append(ipaddress.IPv4Network(network, strict=False))
except:
continue
if ip_objects:
# Remove duplicates and aggregate
collapsed = list(ipaddress.collapse_addresses(ip_objects))
return sorted([str(net) for net in collapsed])
return sorted(list(set(networks))) # At least remove duplicates
except:
return sorted(list(set(networks)))
@staticmethod
def generate_nginx_geo(country_networks: dict, aggregate: bool = True) -> str:
timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
countries_list = ', '.join(sorted(country_networks.keys()))
all_networks = []
for networks in country_networks.values():
all_networks.extend(networks)
if aggregate:
all_networks = ConfigGenerator._aggregate_networks(all_networks)
else:
all_networks = sorted(list(set(all_networks))) # Remove duplicates anyway
config = f"""# Nginx Geo Module Configuration
# Generated: {timestamp}
# Countries: {countries_list}
# Total networks: {len(all_networks)}
geo $blocked_country {{
default 0;
"""
for network in all_networks:
config += f" {network} 1;\n"
config += "}\n"
return config
@staticmethod
def generate_nginx_map(country_networks: dict, aggregate: bool = True) -> str:
timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
countries_list = ', '.join(sorted(country_networks.keys()))
# Process each country separately
processed_networks = {}
for country_code, networks in country_networks.items():
if aggregate:
processed_networks[country_code] = ConfigGenerator._aggregate_networks(networks)
else:
processed_networks[country_code] = sorted(list(set(networks)))
# Calculate total
total_networks = sum(len(nets) for nets in processed_networks.values())
config = f"""# Nginx Map Module Configuration
# Generated: {timestamp}
# Countries: {countries_list}
# Total networks: {total_networks}
map $remote_addr $blocked_country {{
default 0;
"""
for country_code in sorted(processed_networks.keys()):
networks = processed_networks[country_code]
config += f" # {country_code} - {len(networks)} networks\n"
for network in networks:
config += f" {network} 1;\n"
config += "\n"
config += "}\n"
return config
@staticmethod
def generate_nginx_deny(country_networks: dict, aggregate: bool = True) -> str:
timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
countries_list = ', '.join(sorted(country_networks.keys()))
all_networks = []
for networks in country_networks.values():
all_networks.extend(networks)
if aggregate:
all_networks = ConfigGenerator._aggregate_networks(all_networks)
else:
all_networks = sorted(list(set(all_networks)))
config = f"""# Nginx Deny Directives Configuration
# Generated: {timestamp}
# Countries: {countries_list}
# Total networks: {len(all_networks)}
"""
for network in all_networks:
config += f"deny {network};\n"
config += "allow all;\n"
return config
@staticmethod
def generate_apache_24(country_networks: dict, aggregate: bool = True) -> str:
timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
countries_list = ', '.join(sorted(country_networks.keys()))
all_networks = []
for networks in country_networks.values():
all_networks.extend(networks)
if aggregate:
all_networks = ConfigGenerator._aggregate_networks(all_networks)
else:
all_networks = sorted(list(set(all_networks)))
config = f"""# Apache 2.4 Configuration
# Generated: {timestamp}
# Countries: {countries_list}
# Total networks: {len(all_networks)}
<RequireAll>
Require all granted
"""
for network in all_networks:
config += f" Require not ip {network}\n"
config += "</RequireAll>\n"
return config
@staticmethod
def generate_apache_22(country_networks: dict, aggregate: bool = True) -> str:
timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
countries_list = ', '.join(sorted(country_networks.keys()))
all_networks = []
for networks in country_networks.values():
all_networks.extend(networks)
if aggregate:
all_networks = ConfigGenerator._aggregate_networks(all_networks)
else:
all_networks = sorted(list(set(all_networks)))
config = f"""# Apache 2.2 Configuration
# Generated: {timestamp}
# Countries: {countries_list}
# Total networks: {len(all_networks)}
Order Allow,Deny
Allow from all
"""
for network in all_networks:
config += f"Deny from {network}\n"
return config
@staticmethod
def generate_haproxy_acl(country_networks: dict, aggregate: bool = True) -> str:
timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
countries_list = ', '.join(sorted(country_networks.keys()))
all_networks = []
for networks in country_networks.values():
all_networks.extend(networks)
if aggregate:
all_networks = ConfigGenerator._aggregate_networks(all_networks)
else:
all_networks = sorted(list(set(all_networks)))
config = f"""# HAProxy ACL Configuration
# Generated: {timestamp}
# Countries: {countries_list}
# Total networks: {len(all_networks)}
frontend http-in
bind *:80
"""
for network in all_networks:
config += f" acl blocked_ip src {network}\n"
config += """
http-request deny if blocked_ip
default_backend servers
"""
return config
@staticmethod
def generate_haproxy_lua(country_networks: dict, aggregate: bool = True) -> str:
timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
countries_list = ', '.join(sorted(country_networks.keys()))
all_networks = []
for networks in country_networks.values():
all_networks.extend(networks)
if aggregate:
all_networks = ConfigGenerator._aggregate_networks(all_networks)
else:
all_networks = sorted(list(set(all_networks)))
config = f"""-- HAProxy Lua Script
-- Generated: {timestamp}
-- Countries: {countries_list}
-- Total networks: {len(all_networks)}
local blocked_networks = {{
"""
for network in all_networks:
config += f' "{network}",\n'
config += """}}
function check_blocked(txn)
local src_ip = txn.f:src()
for _, network in ipairs(blocked_networks) do
if string.match(src_ip, network) then
return true
end
end
return false
end
core.register_fetches("is_blocked", check_blocked)
"""
return config
def main():
parser = argparse.ArgumentParser(
description='Advanced GeoIP ban configuration generator using MaxMind MMDB',
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog="""
Examples:
# Generate nginx config for China
%(prog)s --country CN --app nginx --output china.conf
# Multiple countries (comma-separated)
%(prog)s --country CN,RU,KP --app haproxy --output blocked.conf
# Update database manually
%(prog)s --update-db
# Use custom database URL
%(prog)s --db-url https://example.com/GeoLite2-Country.mmdb --country US --app nginx
# Disable aggregation for all original networks
%(prog)s --country CN --app nginx --no-aggregate
# Set custom configuration options
%(prog)s --set-config update_interval_days=14
%(prog)s --set-config auto_update=false
# Output to console
%(prog)s --country RU,BY --app nginx
"""
)
parser.add_argument(
'--country',
help='Country code(s) - comma-separated (e.g., CN,RU,KP)'
)
parser.add_argument(
'--app',
choices=['nginx', 'haproxy', 'apache'],
help='Target application type'
)
parser.add_argument(
'--output',
help='Output file path (default: stdout)'
)
parser.add_argument(
'--config',
default='geoip_db/config.json',
help='Config file path (default: geoip_db/config.json)'
)
parser.add_argument(
'--db-url',
help='Custom database URL (MMDB format)'
)
parser.add_argument(
'--update-db',
action='store_true',
help='Force database update'
)
parser.add_argument(
'--no-aggregate',
action='store_true',
help='Disable network aggregation'
)
parser.add_argument(
'--no-auto-update',
action='store_true',
help='Disable automatic database updates'
)
parser.add_argument(
'--set-config',
metavar='KEY=VALUE',
help='Set configuration option (e.g., update_interval_days=14)'
)
parser.add_argument(
'--show-config',
action='store_true',
help='Show current configuration'
)
parser.add_argument(
'--list-countries',
action='store_true',
help='List available country codes'
)
args = parser.parse_args()
# Load configuration
config = Config(args.config)
# Handle list-countries
if args.list_countries:
common_countries = [
"CN - China", "RU - Russia", "US - United States", "KP - North Korea",
"IR - Iran", "BY - Belarus", "SY - Syria", "VE - Venezuela",
"CU - Cuba", "SD - Sudan", "IQ - Iraq", "LY - Libya",
"IN - India", "BR - Brazil", "DE - Germany", "FR - France",
"GB - United Kingdom", "JP - Japan", "KR - South Korea"
]
print("Common country codes:")
for country in common_countries:
print(f" {country}")
print("\nUse ISO 3166-1 alpha-2 codes (2 letters)")
return
# Handle set-config
if args.set_config:
try:
key, value = args.set_config.split('=', 1)
try:
value = json.loads(value)
except:
pass
config.set(key, value)
print(f"Configuration updated: {key} = {value}", file=sys.stderr)
return
except ValueError:
print("Error: --set-config format should be KEY=VALUE", file=sys.stderr)
sys.exit(1)
# Handle show-config
if args.show_config:
print(json.dumps(config.config, indent=2, default=str))
return
# Override config with command line args
if args.db_url:
config.set('database_url', args.db_url)
if args.no_auto_update:
config.set('auto_update', False)
# Initialize database
db = GeoIPDatabase(config)
# Handle database update
if args.update_db:
db.download_database()
print("Database updated successfully", file=sys.stderr)
return
# Check if we need to generate config
if not args.country or not args.app:
if not args.update_db and not args.set_config and not args.show_config and not args.list_countries:
parser.print_help()
sys.exit(1)
return
# Auto-update database if needed
if not args.no_auto_update:
db.check_and_update()
# Parse countries
countries = [c.strip().upper() for c in args.country.split(',')]
print(f"Processing countries: {', '.join(countries)}", file=sys.stderr)
# Get networks
country_networks = db.get_country_networks(countries)
# Check if we got any data
if not any(country_networks.values()):
print("Error: No networks found for specified countries", file=sys.stderr)
sys.exit(1)
# Generate configuration
generators = {
'nginx': ConfigGenerator.generate_nginx,
'haproxy': ConfigGenerator.generate_haproxy,
'apache': ConfigGenerator.generate_apache
}
aggregate = not args.no_aggregate
config_output = generators[args.app](country_networks, aggregate)
# Output
if args.output:
output_path = Path(args.output)
output_path.parent.mkdir(parents=True, exist_ok=True)
with open(output_path, 'w') as f:
f.write(config_output)
print(f"Configuration written to: {output_path}", file=sys.stderr)
else:
print(config_output)
# Close database
db.close_reader()
if __name__ == '__main__':
main()

1367
geoip_handler.py Normal file

File diff suppressed because it is too large Load Diff

55
haproxy/haproxy.cfg Normal file
View File

@@ -0,0 +1,55 @@
global
log /dev/log local0
log /dev/log local1 notice
chroot /var/lib/haproxy
stats socket /var/lib/haproxy/admin.sock mode 660 level admin
stats timeout 30s
user haproxy
group haproxy
daemon
ca-base /etc/ssl/certs
crt-base /etc/ssl/private
ssl-default-bind-ciphers ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256
ssl-default-bind-options ssl-min-ver TLSv1.2 no-tls-tickets
defaults
log global
mode http
option httplog
option dontlognull
timeout connect 5000
timeout client 900000
timeout server 900000
listen stats
bind *:8404
stats enable
stats uri /stats
stats refresh 10s
stats admin if TRUE
stats auth admin:geoip2024
frontend http_front
bind *:80
option httplog
log-format "%ci:%cp [%tr] %ft %b/%s %TR/%Tw/%Tc/%Tr/%Ta %ST %B %CC %CS %tsc %ac/%fc/%bc/%sc/%rc %sq/%bq %hr %hs %{+Q}r"
default_backend webapp_backend
backend webapp_backend
balance source
option httpchk GET /health
http-response del-header Server
http-check expect status 200
retries 3
option redispatch
option http-server-close
option forwardfor
http-request add-header X-Forwarded-Proto http
compression algo gzip
compression type text/html text/plain text/css application/javascript application/json
server webapp1 127.0.0.1:5001 check inter 5s fall 3 rise 2 maxconn 50
server webapp2 127.0.0.1:5002 check inter 5s fall 3 rise 2 maxconn 50
server webapp3 127.0.0.1:5003 check inter 5s fall 3 rise 2 maxconn 50
server webapp4 127.0.0.1:5004 check inter 5s fall 3 rise 2 maxconn 50

246
precache_daemon.py Normal file
View File

@@ -0,0 +1,246 @@
#!/usr/bin/env python3
"""
Pre-cache individual countries in ALL config variants to Redis
"""
import sys
import os
import sqlite3
import json
from datetime import datetime
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, SCRIPT_DIR)
os.chdir(SCRIPT_DIR)
print(f"[PRE-CACHE] Working from: {SCRIPT_DIR}", flush=True)
from redis_cache import RedisCache
from geoip_handler import ConfigGenerator
import config
DB_PATH = config.GEOIP_DB_DIR / 'networks_cache.db'
if not DB_PATH.exists():
print(f"[ERROR] SQLite database not found: {DB_PATH}", flush=True)
sys.exit(1)
redis_cache = RedisCache()
health = redis_cache.health_check()
print(f"[PRE-CACHE] Redis: {health['status']} ({health.get('memory_used_mb', 0):.1f} MB used)", flush=True)
APP_TYPES = [
'nginx_geo',
'nginx_map',
'nginx_deny',
'apache_24',
'haproxy_acl',
'raw-cidr_txt',
'raw-newline_txt',
'raw-json',
'raw-csv',
]
def get_available_countries():
conn = sqlite3.connect(str(DB_PATH), timeout=30.0)
cursor = conn.cursor()
cursor.execute('SELECT country_code, network_count FROM cache_metadata ORDER BY country_code')
countries_info = {}
for row in cursor.fetchall():
countries_info[row[0]] = row[1]
conn.close()
return countries_info
def fetch_country_networks(country_code):
conn = sqlite3.connect(str(DB_PATH), timeout=600.0)
cursor = conn.cursor()
cursor.execute('SELECT network_count FROM cache_metadata WHERE country_code = ?', (country_code.upper(),))
row = cursor.fetchone()
if not row:
conn.close()
return []
total_count = row[0]
chunk_size = 100000
all_networks = []
offset = 0
while offset < total_count:
cursor.execute('SELECT network FROM networks_cache WHERE country_code = ? LIMIT ? OFFSET ?',
(country_code.upper(), chunk_size, offset))
chunk = [row[0] for row in cursor.fetchall()]
if not chunk:
break
all_networks.extend(chunk)
offset += chunk_size
conn.close()
return all_networks
start_time = datetime.now()
print(f"\n{'='*70}", flush=True)
print(f"[STRATEGY] Per-country cache (all config variants)", flush=True)
print(f" Each country: raw data + {len(APP_TYPES)} types × 2 aggregation = {len(APP_TYPES)*2} configs", flush=True)
print(f" Multi-country combos: generated on-demand", flush=True)
print(f"{'='*70}\n", flush=True)
available_countries = get_available_countries()
print(f"Found {len(available_countries)} countries\n", flush=True)
country_data_generated = 0
country_data_cached = 0
config_generated = 0
config_cached = 0
errors = 0
for idx, (country, count) in enumerate(available_countries.items(), 1):
print(f"[{idx}/{len(available_countries)}] {country}: {count:,} networks", flush=True)
redis_key_data = f"geoban:country:{country}"
data_exists = redis_cache.redis_client.exists(redis_key_data)
if data_exists:
country_data_cached += 1
print(f" ✓ Raw data: cached", flush=True)
try:
data = redis_cache.redis_client.get(redis_key_data)
if isinstance(data, bytes):
networks = json.loads(data.decode('utf-8'))
else:
networks = json.loads(data)
country_networks = {country: networks}
except Exception as e:
print(f" ✗ Error loading: {e}", flush=True)
errors += 1
continue
else:
networks = fetch_country_networks(country)
if not networks:
print(f" ✗ No data", flush=True)
errors += 1
continue
redis_cache.redis_client.setex(redis_key_data, 86400, json.dumps(networks))
country_data_generated += 1
print(f" ✓ Raw data: generated", flush=True)
country_networks = {country: networks}
configs_generated_this_country = 0
configs_cached_this_country = 0
for app_type in APP_TYPES:
for aggregate in [True, False]:
try:
cached_config = redis_cache.get_cached_config([country], app_type, aggregate)
if cached_config:
config_cached += 1
configs_cached_this_country += 1
continue
if app_type.startswith('raw-'):
format_type = app_type.split('-')[1]
if format_type == 'cidr_txt':
config_text = '\n'.join(networks)
elif format_type == 'newline_txt':
config_text = '\n'.join(networks)
elif format_type == 'json':
config_text = json.dumps({
'country': country,
'networks': networks,
'count': len(networks)
}, indent=2)
elif format_type == 'csv':
config_text = 'network\n' + '\n'.join(networks)
else:
print(f" ✗ Unknown raw format: {format_type}", flush=True)
continue
else:
generators = {
'nginx_geo': ConfigGenerator.generate_nginx_geo,
'nginx_map': ConfigGenerator.generate_nginx_map,
'nginx_deny': ConfigGenerator.generate_nginx_deny,
'apache_22': ConfigGenerator.generate_apache_22,
'apache_24': ConfigGenerator.generate_apache_24,
'haproxy_acl': ConfigGenerator.generate_haproxy_acl,
'haproxy_lua': ConfigGenerator.generate_haproxy_lua,
}
generator = generators.get(app_type)
if not generator:
continue
config_text = generator(country_networks, aggregate=aggregate, redis_ips=None)
stats = {
'countries': 1,
'total_networks': len(networks),
'per_country': {country: len(networks)}
}
success = redis_cache.save_config([country], app_type, aggregate, config_text, stats)
if success:
config_generated += 1
configs_generated_this_country += 1
else:
errors += 1
except Exception as e:
print(f"{app_type} ({aggregate}): {e}", flush=True)
errors += 1
if configs_generated_this_country > 0:
print(f" → New configs: {configs_generated_this_country}", flush=True)
if configs_cached_this_country > 0:
print(f" → Cached configs: {configs_cached_this_country}", flush=True)
progress_pct = (idx / len(available_countries)) * 100
print(f" → Progress: {progress_pct:.1f}%\n", flush=True)
duration = (datetime.now() - start_time).total_seconds()
print(f"{'='*70}", flush=True)
print(f"[SUMMARY] Complete in {duration/60:.1f} minutes", flush=True)
print(f"\n[Raw Country Data]", flush=True)
print(f" Generated: {country_data_generated}", flush=True)
print(f" Cached: {country_data_cached}", flush=True)
print(f"\n[Config Files]", flush=True)
print(f" Generated: {config_generated}", flush=True)
print(f" Cached: {config_cached}", flush=True)
print(f" Errors: {errors}", flush=True)
try:
total_keys = redis_cache.redis_client.dbsize()
cursor = 0
country_keys = 0
while True:
cursor, keys = redis_cache.redis_client.scan(cursor, match="geoban:country:*", count=1000)
country_keys += len(keys)
if cursor == 0:
break
cursor = 0
config_keys = 0
while True:
cursor, keys = redis_cache.redis_client.scan(cursor, match="geoip:config:*", count=1000)
config_keys += len(keys)
if cursor == 0:
break
health = redis_cache.health_check()
print(f"\n[REDIS]", flush=True)
print(f" Total keys: {total_keys}", flush=True)
print(f" Country keys: {country_keys}", flush=True)
print(f" Config keys: {config_keys}", flush=True)
print(f" Memory: {health.get('memory_used_mb', 0):.2f} MB", flush=True)
except Exception as e:
print(f"\n[REDIS] Error: {e}", flush=True)
print(f"{'='*70}\n", flush=True)

204
redis_cache.py Normal file
View File

@@ -0,0 +1,204 @@
"""
Redis Cache Handler for pre-generated GeoIP configs
"""
import redis
import json
import hashlib
from datetime import datetime
from typing import Optional, Dict, List
import config
class RedisCache:
def __init__(self):
self.redis_client = redis.Redis(
host=config.REDIS_HOST,
port=config.REDIS_PORT,
db=config.REDIS_DB,
password=config.REDIS_PASSWORD if config.REDIS_PASSWORD else None,
decode_responses=True,
socket_connect_timeout=5,
socket_timeout=5,
#decode_responses=True
)
self.default_ttl = config.REDIS_CACHE_TTL
def _generate_key(self, countries: List[str], app_type: str, aggregate: bool) -> str:
"""Generate cache key with normalization"""
normalized_countries = sorted([c.upper().strip() for c in countries])
normalized_app_type = app_type.lower().strip()
normalized_aggregate = bool(aggregate)
key_data = {
'countries': normalized_countries,
'app_type': normalized_app_type,
'aggregate': normalized_aggregate
}
key_str = json.dumps(key_data, sort_keys=True)
key_hash = hashlib.md5(key_str.encode()).hexdigest()[:16]
# DEBUG
#print(f"[CACHE KEY] {normalized_countries} + {normalized_app_type} + {normalized_aggregate} -> geoip:config:{key_hash}", flush=True)
return f"geoip:config:{key_hash}"
def get_cached_config(self, countries: List[str], app_type: str, aggregate: bool) -> Optional[Dict]:
"""Get pre-generated config from Redis"""
try:
cache_key = self._generate_key(countries, app_type, aggregate)
data = self.redis_client.get(cache_key)
if data:
cached = json.loads(data)
print(f"[REDIS] Cache HIT: {cache_key}", flush=True)
return cached
print(f"[REDIS] Cache MISS: {cache_key}", flush=True)
return None
except redis.RedisError as e:
print(f"[REDIS] Error getting cache: {e}", flush=True)
return None
def save_config(self, countries: List[str], app_type: str, aggregate: bool,
config_text: str, stats: Dict, ttl: Optional[int] = None) -> bool:
"""Save generated config to Redis"""
try:
cache_key = self._generate_key(countries, app_type, aggregate)
cache_data = {
'config': config_text,
'stats': stats,
'generated_at': datetime.now().isoformat(),
'countries': sorted(countries),
'app_type': app_type,
'aggregate': aggregate
}
ttl = ttl or self.default_ttl
self.redis_client.setex(
cache_key,
ttl,
json.dumps(cache_data, ensure_ascii=False)
)
print(f"[REDIS] Saved config: {cache_key} (TTL: {ttl}s)", flush=True)
return True
except redis.RedisError as e:
print(f"[REDIS] Error saving cache: {e}", flush=True)
return False
def invalidate_country(self, country_code: str) -> int:
"""Invalidate all cached configs containing specific country"""
try:
pattern = f"geoip:config:*"
deleted = 0
for key in self.redis_client.scan_iter(match=pattern, count=100):
data = self.redis_client.get(key)
if data:
try:
cached = json.loads(data)
if country_code in cached.get('countries', []):
self.redis_client.delete(key)
deleted += 1
except:
continue
print(f"[REDIS] Invalidated {deleted} cache entries for {country_code}", flush=True)
return deleted
except redis.RedisError as e:
print(f"[REDIS] Error invalidating cache: {e}", flush=True)
return 0
def get_cache_stats(self) -> Dict:
"""Get Redis cache statistics"""
try:
pattern = f"geoip:config:*"
keys = list(self.redis_client.scan_iter(match=pattern, count=1000))
total_size = 0
entries = []
for key in keys[:100]:
try:
data = self.redis_client.get(key)
ttl = self.redis_client.ttl(key)
if data:
cached = json.loads(data)
size = len(data)
total_size += size
entries.append({
'countries': cached.get('countries', []),
'app_type': cached.get('app_type'),
'aggregate': cached.get('aggregate'),
'generated_at': cached.get('generated_at'),
'size_bytes': size,
'ttl_seconds': ttl
})
except:
continue
return {
'total_entries': len(keys),
'total_size_bytes': total_size,
'total_size_mb': round(total_size / 1024 / 1024, 2),
'entries_sample': entries[:20]
}
except redis.RedisError as e:
print(f"[REDIS] Error getting stats: {e}", flush=True)
return {'error': str(e)}
def flush_all(self):
"""Flush all geoban-related keys from Redis"""
try:
patterns = [
'geoban:country:*',
'geoban:config:*',
'geoip:config:*'
]
deleted = 0
for pattern in patterns:
cursor = 0
while True:
cursor, keys = self.redis_client.scan(cursor, match=pattern, count=1000)
if keys:
deleted += self.redis_client.delete(*keys)
if cursor == 0:
break
print(f"[REDIS] Flushed {deleted} keys", flush=True)
return True
except Exception as e:
print(f"[REDIS] Flush error: {e}", flush=True)
return False
def health_check(self) -> Dict:
"""Check Redis connection health"""
try:
self.redis_client.ping()
info = self.redis_client.info('memory')
return {
'status': 'healthy',
'connected': True,
'memory_used_mb': round(info.get('used_memory', 0) / 1024 / 1024, 2),
'memory_peak_mb': round(info.get('used_memory_peak', 0) / 1024 / 1024, 2)
}
except redis.RedisError as e:
return {
'status': 'unhealthy',
'connected': False,
'error': str(e)
}

7
requirements.txt Normal file
View File

@@ -0,0 +1,7 @@
Flask
Werkzeug
gunicorn
geoip2
schedule
requests
redis

94
rescan_github_merge.py Normal file
View File

@@ -0,0 +1,94 @@
#!/usr/bin/env python3
"""
Rescan script - merges GitHub networks with existing MaxMind cache
Updates source to 'maxmind+github' without rescanning MaxMind
"""
import sys
sys.path.insert(0, '/opt/geoip_block_generator')
import sqlite3
from geoip_handler import GeoIPHandler
from pathlib import Path
handler = GeoIPHandler()
conn = sqlite3.connect(str(handler.cache_db), timeout=30)
cursor = conn.cursor()
cursor.execute("SELECT country_code, network_count, source FROM cache_metadata ORDER BY country_code")
countries = cursor.fetchall()
print(f"Found {len(countries)} countries in cache\n")
for country_code, current_count, current_source in countries:
print(f"[{country_code}] Current: {current_count:,} networks, source: {current_source}")
cursor.execute(
"SELECT network FROM networks_cache WHERE country_code = ?",
(country_code,)
)
maxmind_networks = [row[0] for row in cursor.fetchall()]
if not maxmind_networks:
print(f" ⚠ Empty cache, skipping...")
continue
github_networks = handler._fetch_from_github(country_code)
if not github_networks:
print(f" GitHub: no data")
if current_source in ['unknown', None]:
cursor.execute(
"UPDATE cache_metadata SET source = ? WHERE country_code = ?",
('maxmind', country_code)
)
conn.commit()
print(f" ✓ Updated source: unknown → maxmind")
continue
maxmind_set = set(maxmind_networks)
github_set = set(github_networks)
missing = github_set - maxmind_set
if missing:
print(f" + GitHub: {len(github_networks):,} networks, {len(missing):,} NEW")
import datetime
timestamp = datetime.datetime.now().isoformat()
cursor.executemany(
"INSERT OR IGNORE INTO networks_cache (country_code, network, source, created_at) VALUES (?, ?, ?, ?)",
[(country_code, net, 'github', timestamp) for net in missing]
)
new_count = current_count + len(missing)
cursor.execute(
"UPDATE cache_metadata SET network_count = ?, source = ?, last_scan = ? WHERE country_code = ?",
(new_count, 'maxmind+github', timestamp, country_code)
)
conn.commit()
print(f" ✓ Updated: {current_count:,}{new_count:,} networks, source: maxmind+github")
else:
print(f" GitHub: {len(github_networks):,} networks, 0 new (all covered by MaxMind)")
cursor.execute(
"UPDATE cache_metadata SET source = ? WHERE country_code = ?",
('maxmind+github', country_code)
)
conn.commit()
print(f" ✓ Updated source: {current_source} → maxmind+github")
conn.close()
print("\n=== Summary ===")
conn = sqlite3.connect(str(handler.cache_db), timeout=30)
cursor = conn.cursor()
cursor.execute("SELECT source, COUNT(*), SUM(network_count) FROM cache_metadata GROUP BY source")
for source, count, total in cursor.fetchall():
print(f"{source}: {count} countries, {total:,} networks")
conn.close()

395
scheduler.py Normal file
View File

@@ -0,0 +1,395 @@
#!/usr/bin/env python3
"""
GeoIP Country Scanner Daemon - Incremental Update Mode
"""
import schedule
import time
import sys
import signal
import os
import sqlite3
import concurrent.futures
from datetime import datetime
from pathlib import Path
from concurrent.futures import ThreadPoolExecutor, as_completed
from multiprocessing import cpu_count
import threading
sys.path.insert(0, str(Path(__file__).parent))
from geoip_handler import GeoIPHandler
import config
running = True
log_lock = threading.Lock()
write_lock = threading.Lock()
active_scans = {}
active_scans_lock = threading.Lock()
def signal_handler(signum, frame):
global running
print(f"\n[{datetime.now()}] Received signal {signum}, shutting down...", flush=True)
sys.stdout.flush()
running = False
def log_safe(message):
with log_lock:
print(message, flush=True)
sys.stdout.flush()
def update_scan_progress(country_code, progress_msg):
with active_scans_lock:
if country_code in active_scans:
active_scans[country_code]['progress'] = progress_msg
active_scans[country_code]['last_update'] = datetime.now()
def progress_callback_factory(country_code):
def callback(msg):
update_scan_progress(country_code, msg)
return callback
def print_active_scans():
with active_scans_lock:
if not active_scans:
return
print("\n" + "=" * 70, flush=True)
print("ACTIVE SCANS STATUS:", flush=True)
print("=" * 70, flush=True)
for country, info in sorted(active_scans.items()):
elapsed = (datetime.now() - info['start_time']).total_seconds()
progress = info.get('progress', 'Unknown')
is_update = info.get('is_update', False)
mode = "UPDATE" if is_update else "SCAN"
print(f" {country} [{mode}]: {progress} | {elapsed:.0f}s", flush=True)
print("=" * 70 + "\n", flush=True)
sys.stdout.flush()
def scan_single_country(country_code, is_update=False):
try:
with active_scans_lock:
active_scans[country_code] = {
'start_time': datetime.now(),
'progress': 'Starting...',
'last_update': datetime.now(),
'is_update': is_update
}
start_time = time.time()
mode = "INCREMENTAL UPDATE" if is_update else "FULL SCAN"
print(f"[START] {country_code} - {mode}...", flush=True)
sys.stdout.flush()
progress_cb = progress_callback_factory(country_code)
handler = GeoIPHandler()
print(f"[{country_code}] Scanning MaxMind + GitHub...", flush=True)
maxmind_networks = handler._scan_maxmind_for_country(country_code, progress_callback=progress_cb)
if maxmind_networks:
print(f"[{country_code}] MaxMind: {len(maxmind_networks):,} networks, checking GitHub...", flush=True)
github_networks = handler._fetch_from_github(country_code)
if github_networks:
maxmind_set = set(maxmind_networks)
github_set = set(github_networks)
missing = github_set - maxmind_set
if missing:
maxmind_networks.extend(missing)
print(f"[{country_code}] GitHub added {len(missing):,} new networks", flush=True)
else:
print(f"[{country_code}] GitHub: {len(github_networks):,} networks (no new)", flush=True)
source = 'maxmind+github'
else:
print(f"[{country_code}] GitHub: no data", flush=True)
source = 'maxmind'
networks = maxmind_networks
else:
print(f"[{country_code}] MaxMind found nothing, trying GitHub...", flush=True)
networks = handler._fetch_from_github(country_code)
source = 'github' if networks else None
if networks:
with write_lock:
print(f"[{country_code}] Acquired write lock, saving to database...", flush=True)
if is_update:
saved = handler._update_cache_incremental(country_code, networks, source)
else:
saved = handler._save_to_cache(country_code, networks, source)
print(f"[{country_code}] Released write lock", flush=True)
elapsed = time.time() - start_time
with active_scans_lock:
active_scans.pop(country_code, None)
if saved:
print(f"[DONE] {country_code}: {len(networks)} networks in {elapsed:.1f}s ({mode})", flush=True)
sys.stdout.flush()
return {'country': country_code, 'success': True, 'networks': len(networks), 'error': None, 'mode': mode}
else:
print(f"[ERROR] {country_code}: Failed to save to cache", flush=True)
sys.stdout.flush()
return {'country': country_code, 'success': False, 'networks': 0, 'error': 'Failed to save', 'mode': mode}
else:
with active_scans_lock:
active_scans.pop(country_code, None)
print(f"[ERROR] {country_code}: No data found", flush=True)
sys.stdout.flush()
return {'country': country_code, 'success': False, 'networks': 0, 'error': 'No data found', 'mode': mode}
except Exception as e:
with active_scans_lock:
active_scans.pop(country_code, None)
print(f"[ERROR] {country_code}: {e}", flush=True)
sys.stdout.flush()
import traceback
traceback.print_exc()
return {'country': country_code, 'success': False, 'networks': 0, 'error': str(e), 'mode': 'UNKNOWN'}
def scan_all_countries_incremental(parallel_workers=None, max_age_hours=168):
log_safe(f"[{datetime.now()}] Starting INCREMENTAL country scan...")
try:
handler = GeoIPHandler()
if handler.needs_update():
log_safe("Updating MaxMind database...")
result = handler.download_database()
if not result.get('success'):
log_safe(f"Warning: Database update failed - {result.get('error')}")
log_safe("\nChecking cache status...")
missing, stale = handler.get_countries_needing_scan(max_age_hours)
log_safe(f"Missing countries (never scanned): {len(missing)}")
log_safe(f"Stale countries (needs update): {len(stale)}")
if missing:
log_safe(f"Missing: {', '.join(sorted(missing))}")
if stale:
log_safe(f"Stale: {', '.join(sorted(stale))}")
total = len(missing) + len(stale)
if total == 0:
log_safe("\n✓ All countries are up to date!")
return True
if parallel_workers is None:
parallel_workers = min(cpu_count(), 16)
log_safe(f"\nProcessing {total} countries using {parallel_workers} parallel workers...")
log_safe(f" - {len(missing)} new countries (full scan)")
log_safe(f" - {len(stale)} stale countries (incremental update)")
log_safe(f"Note: Database writes are serialized with write lock")
log_safe(f"Estimated time: {total / parallel_workers * 3:.1f} minutes\n")
start_time = datetime.now()
completed = 0
success_count = 0
failed_countries = []
results_list = []
last_progress_time = time.time()
last_status_print = time.time()
def print_progress(force=False):
nonlocal last_progress_time
current_time = time.time()
if not force and (current_time - last_progress_time) < 30:
return
last_progress_time = current_time
elapsed = (datetime.now() - start_time).total_seconds()
avg_time = elapsed / completed if completed > 0 else 0
remaining = (total - completed) * avg_time if completed > 0 else 0
progress_bar = "" * int(completed / total * 40)
progress_bar += "" * (40 - int(completed / total * 40))
msg = (f"[{progress_bar}] {completed}/{total} ({100*completed/total:.1f}%) | "
f"Elapsed: {elapsed:.0f}s | ETA: {remaining:.0f}s")
print(msg, flush=True)
sys.stdout.flush()
log_safe("Starting parallel execution...")
sys.stdout.flush()
tasks = [(country, False) for country in missing] + [(country, True) for country in stale]
with ThreadPoolExecutor(max_workers=parallel_workers) as executor:
future_to_country = {
executor.submit(scan_single_country, country, is_update): country
for country, is_update in tasks
}
log_safe(f"Submitted {len(future_to_country)} tasks\n")
sys.stdout.flush()
pending = set(future_to_country.keys())
while pending:
done, pending = concurrent.futures.wait(
pending,
timeout=10,
return_when=concurrent.futures.FIRST_COMPLETED
)
for future in done:
result = future.result()
results_list.append(result)
completed += 1
if result['success']:
success_count += 1
else:
failed_countries.append(result['country'])
print_progress(force=bool(done))
current_time = time.time()
if current_time - last_status_print >= 30:
print_active_scans()
last_status_print = current_time
print("\n", flush=True)
sys.stdout.flush()
elapsed = (datetime.now() - start_time).total_seconds()
log_safe("=" * 70)
log_safe("SCAN RESULTS (sorted by country):")
log_safe("=" * 70)
for result in sorted(results_list, key=lambda x: x['country']):
mode_str = f"[{result.get('mode', 'UNKNOWN')}]"
if result['success']:
log_safe(f" {result['country']}: ✓ {result['networks']:,} networks {mode_str}")
else:
log_safe(f" {result['country']}: ✗ {result['error']} {mode_str}")
log_safe("=" * 70)
log_safe(f"\n[{datetime.now()}] Incremental scan complete!")
log_safe(f"✓ Success: {success_count}/{total} countries")
log_safe(f" - New countries: {len([r for r in results_list if r.get('mode') == 'FULL SCAN' and r['success']])}")
log_safe(f" - Updated countries: {len([r for r in results_list if r.get('mode') == 'INCREMENTAL UPDATE' and r['success']])}")
log_safe(f" Time: {elapsed:.1f}s ({elapsed/60:.1f} minutes)")
log_safe(f" Average: {elapsed/total:.1f}s per country\n")
if failed_countries:
log_safe(f"✗ Failed: {', '.join(failed_countries)}\n")
return True
except Exception as e:
log_safe(f"[{datetime.now()}] ERROR: {e}")
import traceback
traceback.print_exc()
sys.stdout.flush()
return False
if __name__ == '__main__':
print("=" * 70, flush=True)
print("GeoIP Country Scanner Daemon", flush=True)
print("=" * 70, flush=True)
print(f"Started: {datetime.now()}", flush=True)
print(f"Data dir: {config.GEOIP_DB_DIR}", flush=True)
print(f"CPU cores: {cpu_count()}", flush=True)
sys.stdout.flush()
scheduler_enabled = os.getenv('SCHEDULER_ENABLED', 'true').lower() == 'true'
if not scheduler_enabled:
print("\n[DISABLED] SCHEDULER_ENABLED=false - exiting", flush=True)
print("=" * 70, flush=True)
sys.stdout.flush()
sys.exit(0)
print("=" * 70, flush=True)
sys.stdout.flush()
signal.signal(signal.SIGTERM, signal_handler)
signal.signal(signal.SIGINT, signal_handler)
scan_time = os.getenv('SCAN_TIME', '02:00')
scan_interval = os.getenv('SCAN_INTERVAL', '7d')
scan_on_startup = os.getenv('SCAN_ON_STARTUP', 'true').lower() == 'true'
cache_max_age_hours = int(os.getenv('CACHE_MAX_AGE_HOURS', '168'))
parallel_workers = int(os.getenv('PARALLEL_WORKERS', '16'))
if parallel_workers == 0:
parallel_workers = min(cpu_count(), 16)
print(f"\n[CONFIG] Scheduler: enabled", flush=True)
print(f"[CONFIG] Parallel: {parallel_workers} workers", flush=True)
print(f"[CONFIG] Interval: {scan_interval}", flush=True)
print(f"[CONFIG] Time: {scan_time}", flush=True)
print(f"[CONFIG] Startup scan: {scan_on_startup}", flush=True)
print(f"[CONFIG] Cache max age: {cache_max_age_hours}h ({cache_max_age_hours/24:.1f} days)", flush=True)
sys.stdout.flush()
scan_function = lambda: scan_all_countries_incremental(parallel_workers, cache_max_age_hours)
if scan_on_startup:
print("\n[STARTUP] Running incremental scan...\n", flush=True)
sys.stdout.flush()
scan_function()
else:
print("\n[STARTUP] Skipping (SCAN_ON_STARTUP=false)", flush=True)
sys.stdout.flush()
if scan_interval == 'daily':
schedule.every().day.at(scan_time).do(scan_function)
print(f"\n[SCHEDULER] Daily at {scan_time}", flush=True)
elif scan_interval == 'weekly':
schedule.every().monday.at(scan_time).do(scan_function)
print(f"\n[SCHEDULER] Weekly (Monday at {scan_time})", flush=True)
elif scan_interval == 'monthly':
schedule.every(30).days.do(scan_function)
print(f"\n[SCHEDULER] Monthly (every 30 days)", flush=True)
elif scan_interval.endswith('h'):
hours = int(scan_interval[:-1])
schedule.every(hours).hours.do(scan_function)
print(f"\n[SCHEDULER] Every {hours} hours", flush=True)
elif scan_interval.endswith('d'):
days = int(scan_interval[:-1])
schedule.every(days).days.do(scan_function)
print(f"\n[SCHEDULER] Every {days} days", flush=True)
else:
print(f"\n[ERROR] Invalid SCAN_INTERVAL: {scan_interval}", flush=True)
sys.stdout.flush()
sys.exit(1)
next_run = schedule.next_run()
if next_run:
print(f"[SCHEDULER] Next run: {next_run}", flush=True)
print("\nScheduler running. Press Ctrl+C to stop.\n", flush=True)
sys.stdout.flush()
while running:
schedule.run_pending()
time.sleep(60)
print("\n[SHUTDOWN] Stopped gracefully.", flush=True)
sys.stdout.flush()
sys.exit(0)

60
start-instance.sh Executable file
View File

@@ -0,0 +1,60 @@
#!/bin/bash
set -e
PORT=$1
if [ -z "$PORT" ]; then
echo "ERROR: Port not specified"
echo "Usage: $0 <port>"
exit 1
fi
cd /opt/geoip_block_generator
# Safe .env parser
if [ -f /opt/geoip_block_generator/.env ]; then
echo "Loading environment from .env..."
while IFS='=' read -r key value || [ -n "$key" ]; do
[[ "$key" =~ ^[[:space:]]*# ]] && continue
[[ -z "$key" ]] && continue
key=$(echo "$key" | xargs)
value=$(echo "$value" | xargs)
if [[ "$value" =~ ^\"(.*)\"$ ]]; then
value="${BASH_REMATCH[1]}"
elif [[ "$value" =~ ^\'(.*)\'$ ]]; then
value="${BASH_REMATCH[1]}"
fi
export "$key=$value"
done < /opt/geoip_block_generator/.env
fi
# Override port for this instance
export FLASK_PORT=$PORT
export FLASK_HOST=127.0.0.1
# Create log directory if not exists
mkdir -p /var/log/geoip-ban
# Log startup
echo "========================================"
echo "GeoIP WebApp Instance Starting"
echo "Port: $PORT"
echo "User: $(whoami)"
echo "Time: $(date)"
echo "========================================"
exec /opt/geoip_block_generator/venv/bin/gunicorn \
--bind "127.0.0.1:${PORT}" \
--workers 1 \
--threads 8 \
--worker-class sync \
--timeout 900 \
--access-logfile - \
--error-logfile - \
--log-level info \
--access-logformat '%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s" [Instance:'"${PORT}"']' \
app:app

37
start.sh Executable file
View File

@@ -0,0 +1,37 @@
#!/bin/bash
set -e
if [ -f /opt/geoip_block_generator/.env ]; then
echo "Loading environment from .env..."
while IFS='=' read -r key value || [ -n "$key" ]; do
[[ "$key" =~ ^[[:space:]]*# ]] && continue
[[ -z "$key" ]] && continue
key=$(echo "$key" | xargs)
value=$(echo "$value" | xargs)
if [[ "$value" =~ ^\"(.*)\"$ ]]; then
value="${BASH_REMATCH[1]}"
elif [[ "$value" =~ ^\'(.*)\'$ ]]; then
value="${BASH_REMATCH[1]}"
fi
export "$key=$value"
done < /opt/geoip_block_generator/.env
fi
# Defaults
FLASK_HOST=${FLASK_HOST:-127.0.0.1}
FLASK_PORT=${FLASK_PORT:-5000}
# Start gunicorn
exec /opt/geoip_block_generator/venv/bin/gunicorn \
--bind "${FLASK_HOST}:${FLASK_PORT}" \
--workers 1 \
--threads 8 \
--worker-class sync \
--timeout 900 \
--access-logfile /var/log/geoip-ban/access.log \
--error-logfile /var/log/geoip-ban/error.log \
app:app

496
static/css/style.css Normal file
View File

@@ -0,0 +1,496 @@
body {
background-color: #f5f5f5;
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, sans-serif;
padding-bottom: 2rem;
}
html {
scroll-behavior: smooth;
}
.card {
border: 1px solid #e0e0e0;
border-radius: 0.5rem;
margin-bottom: 1.5rem;
animation: fadeIn 0.4s ease-out;
}
.card-header {
background-color: #ffffff;
border-bottom: 2px solid #e0e0e0;
padding: 1.25rem 1.5rem;
}
.card-header h4 {
color: #212529;
font-weight: 600;
}
.card-body {
background-color: #ffffff;
}
.shadow-sm {
box-shadow: 0 0.125rem 0.25rem rgba(0, 0, 0, 0.075) !important;
}
#countryList {
max-height: 600px;
overflow-y: auto;
padding: 0.75rem;
background-color: #fafafa;
border: 1px solid #e0e0e0;
border-radius: 0.375rem;
margin-bottom: 0.75rem;
}
#countryList::-webkit-scrollbar {
width: 8px;
}
#countryList::-webkit-scrollbar-track {
background: #f1f1f1;
border-radius: 4px;
}
#countryList::-webkit-scrollbar-thumb {
background: #888;
border-radius: 4px;
}
#countryList::-webkit-scrollbar-thumb:hover {
background: #555;
}
.form-check-compact {
padding: 0.25rem 0.5rem;
margin-bottom: 0.15rem;
transition: background-color 0.1s;
border-radius: 0.25rem;
display: flex;
align-items: center;
gap: 0.4rem;
}
.form-check-compact:hover {
background-color: #e9ecef;
}
.form-check-compact .form-check-input {
width: 1rem;
height: 1rem;
margin: 0;
cursor: pointer;
flex-shrink: 0;
position: relative;
}
.form-check-compact .form-check-label {
cursor: pointer;
user-select: none;
font-size: 0.75rem;
padding: 0;
margin: 0;
font-family: 'Courier New', monospace;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
line-height: 1.4;
flex: 1;
min-width: 0;
}
#countryList .col-lg-2,
#countryList .col-md-3,
#countryList .col-sm-4,
#countryList .col-6 {
padding: 0.15rem;
}
#countryList .form-check {
padding-left: 0;
min-height: auto;
}
.form-label {
color: #495057;
font-size: 1rem;
margin-bottom: 0.75rem;
}
.form-select,
.form-control {
border-radius: 0.375rem;
border: 1px solid #ced4da;
transition: border-color 0.15s ease-in-out, box-shadow 0.15s ease-in-out;
}
.form-select:focus,
.form-control:focus {
border-color: #80bdff;
box-shadow: 0 0 0 0.2rem rgba(0, 123, 255, 0.25);
}
.form-select-lg {
padding: 0.75rem 1rem;
font-size: 1.05rem;
}
.form-switch {
padding-left: 0;
min-height: auto;
}
.form-switch .form-check-input {
width: 3rem;
height: 1.5rem;
margin-left: 0;
margin-right: 1rem;
float: left;
cursor: pointer;
}
.form-switch .form-check-label {
display: inline-block;
padding-left: 0;
padding-top: 0.125rem;
}
.aggregate-card {
background-color: #f8f9fa;
border: 1px solid #e0e0e0;
border-radius: 0.375rem;
padding: 1rem;
}
.aggregate-card .form-check {
padding: 0;
margin-bottom: 0;
}
.btn {
border-radius: 0.375rem;
font-weight: 500;
transition: all 0.2s ease;
}
.btn-primary {
background-color: #0d6efd;
border-color: #0d6efd;
}
.btn-primary:hover {
background-color: #0b5ed7;
border-color: #0a58ca;
transform: translateY(-1px);
box-shadow: 0 4px 8px rgba(13, 110, 253, 0.3);
}
.btn-lg {
padding: 0.875rem 1.5rem;
font-size: 1.125rem;
}
.btn-outline-primary {
color: #0d6efd;
border-color: #0d6efd;
}
.btn-outline-primary:hover {
background-color: #0d6efd;
border-color: #0d6efd;
color: white;
}
.btn-outline-secondary {
color: #6c757d;
border-color: #6c757d;
}
.btn-outline-secondary:hover {
background-color: #6c757d;
border-color: #6c757d;
color: white;
}
.btn:disabled {
cursor: not-allowed;
opacity: 0.65;
}
.alert {
border-radius: 0.5rem;
border: none;
padding: 1rem 1.25rem;
}
.alert i {
font-size: 1.1rem;
vertical-align: middle;
}
.alert-info {
background-color: #d1ecf1;
color: #0c5460;
}
.alert-success {
background-color: #d4edda;
color: #155724;
}
.alert-warning {
background-color: #fff3cd;
color: #856404;
}
.alert-danger {
background-color: #f8d7da;
color: #721c24;
}
.progress {
border-radius: 0.5rem;
background-color: #e9ecef;
overflow: hidden;
}
.progress-bar {
font-size: 0.95rem;
font-weight: 500;
}
.navbar {
background-color: #ffffff;
border-bottom: 1px solid #e0e0e0;
padding: 1rem 0;
}
.navbar-brand {
font-weight: 600;
font-size: 1.25rem;
color: #212529;
}
.navbar-brand img {
max-height: 30px;
}
.footer {
background-color: #ffffff;
border-top: 1px solid #e0e0e0;
padding: 1.5rem 0;
margin-top: 3rem;
}
.footer a {
color: #0d6efd;
text-decoration: none;
transition: color 0.2s;
}
.footer a:hover {
color: #0a58ca;
text-decoration: underline;
}
.modal-xl {
max-width: 90%;
}
#previewContent {
background-color: #282c34 !important;
color: #abb2bf !important;
padding: 1.5rem;
border-radius: 0.375rem;
font-family: 'Courier New', Consolas, Monaco, monospace;
font-size: 0.875rem;
line-height: 1.5;
max-height: 70vh;
overflow: auto;
white-space: pre;
word-wrap: normal;
display: block !important;
}
.modal-body {
padding: 1.5rem;
}
.modal-body pre {
margin-bottom: 0;
background-color: transparent;
}
.modal-body pre code {
display: block;
background-color: #282c34;
color: #abb2bf;
}
#previewContent::-webkit-scrollbar {
width: 10px;
height: 10px;
}
#previewContent::-webkit-scrollbar-track {
background: #21252b;
}
#previewContent::-webkit-scrollbar-thumb {
background: #4b5263;
border-radius: 5px;
}
#previewContent::-webkit-scrollbar-thumb:hover {
background: #5c6370;
}
.api-header-get {
background-color: #e7f3ff;
border-left: 4px solid #0dcaf0;
cursor: pointer;
transition: background-color 0.2s;
}
.api-header-get:hover {
background-color: #d1ecf1;
}
.api-header-post {
background-color: #d4edda;
border-left: 4px solid #198754;
cursor: pointer;
transition: background-color 0.2s;
}
.api-header-post:hover {
background-color: #c3e6cb;
}
.api-path {
font-size: 1rem;
font-weight: 600;
color: #212529;
}
.api-endpoint pre {
background-color: #282c34;
color: #abb2bf;
padding: 1rem;
border-radius: 0.375rem;
overflow-x: auto;
margin-bottom: 0;
}
.api-endpoint pre code {
background-color: transparent;
color: inherit;
padding: 0;
font-size: 0.9rem;
line-height: 1.6;
}
.api-endpoint .text-success {
color: #98c379 !important;
}
.api-endpoint .text-warning {
color: #e5c07b !important;
}
.api-endpoint .text-info {
color: #61afef !important;
}
.api-endpoint .text-danger {
color: #e06c75 !important;
}
@keyframes fadeIn {
from {
opacity: 0;
transform: translateY(20px);
}
to {
opacity: 1;
transform: translateY(0);
}
}
@media (min-width: 1400px) {
#countryList .col-lg-2 {
flex: 0 0 12.5%;
max-width: 12.5%;
}
}
@media (max-width: 1199px) {
#countryList .col-lg-2 {
flex: 0 0 16.666%;
max-width: 16.666%;
}
}
@media (max-width: 991px) {
#countryList .col-md-3 {
flex: 0 0 20%;
max-width: 20%;
}
}
@media (max-width: 767px) {
.card-body {
padding: 1.5rem !important;
}
#countryList {
max-height: 300px;
}
#countryList .col-sm-4 {
flex: 0 0 25%;
max-width: 25%;
}
.form-check-compact .form-check-label {
font-size: 0.75rem;
}
.form-select-lg,
.btn-lg {
font-size: 1rem;
padding: 0.75rem 1.25rem;
}
.navbar-brand {
font-size: 1rem;
}
.modal-xl {
max-width: 95%;
}
}
@media (max-width: 575px) {
#countryList .col-6 {
flex: 0 0 33.333%;
max-width: 33.333%;
}
}
@media (max-width: 399px) {
#countryList .col-6 {
flex: 0 0 50%;
max-width: 50%;
}
}
#variantDescription {
animation: fadeIn 0.3s ease-in;
}
@keyframes fadeIn {
from { opacity: 0; transform: translateY(-5px); }
to { opacity: 1; transform: translateY(0); }
}

75
static/js/api.js Normal file
View File

@@ -0,0 +1,75 @@
const baseUrl = window.location.origin;
document.addEventListener('DOMContentLoaded', function() {
document.getElementById('baseUrl').textContent = baseUrl;
document.querySelectorAll('[id^="curlUrl"]').forEach(element => {
element.textContent = baseUrl;
});
});
function toggleEndpoint(id) {
const element = document.getElementById(id);
const bsCollapse = new bootstrap.Collapse(element, {
toggle: true
});
}
function tryEndpoint(endpoint, method = 'GET') {
const url = baseUrl + '/api/' + endpoint;
const responseId = 'response-' + endpoint.replace(/\//g, '-');
const responseDiv = document.getElementById(responseId);
const responseBody = document.getElementById(responseId + '-body');
responseDiv.style.display = 'block';
responseBody.textContent = 'Loading...';
const options = {
method: method,
headers: {
'Content-Type': 'application/json'
}
};
fetch(url, options)
.then(response => response.json())
.then(data => {
responseBody.textContent = JSON.stringify(data, null, 2);
})
.catch(error => {
responseBody.textContent = 'Error: ' + error.message;
});
}
function tryInvalidateCountry() {
const countryInput = document.getElementById('invalidateCountry');
const country = countryInput.value.trim().toUpperCase();
if (!country || country.length !== 2) {
alert('Please enter a valid 2-letter country code (e.g., CN, RU, US)');
return;
}
const url = baseUrl + '/api/cache/invalidate/' + country;
const responseDiv = document.getElementById('response-cache-invalidate');
const responseBody = document.getElementById('response-cache-invalidate-body');
responseDiv.style.display = 'block';
responseBody.textContent = 'Loading...';
fetch(url, {
method: 'POST',
headers: {
'Content-Type': 'application/json'
}
})
.then(response => response.json())
.then(data => {
responseBody.textContent = JSON.stringify(data, null, 2);
if (data.success) {
countryInput.value = '';
}
})
.catch(error => {
responseBody.textContent = 'Error: ' + error.message;
});
}

375
static/js/app.js Normal file
View File

@@ -0,0 +1,375 @@
const BASE_URL = window.location.origin;
const APP_VARIANTS = {
'raw-cidr': [
{
value: 'txt',
text: 'Plain Text (.txt)',
description: 'Simple list of CIDR blocks, one per line'
},
{
value: 'csv',
text: 'CSV Format (.csv)',
description: 'Structured CSV with country codes and networks'
}
],
nginx: [
{
value: 'deny',
text: 'Deny Directives',
description: 'Simple and fast. Works everywhere. Recommended for large lists.',
recommended: true
},
{
value: 'geo',
text: 'Geo Module',
description: 'Fast with native CIDR support. Requires http_geo_module compiled in nginx.'
},
{
value: 'map',
text: 'Map Module (regex)',
description: 'Slow with 10k+ rules. Uses regex patterns. Not recommended for production.',
warning: true
}
],
apache: [
{
value: '24',
text: 'Apache 2.4 (Require)',
description: 'Modern Apache 2.4+ syntax using Require directives'
},
{
value: '22',
text: 'Apache 2.2 (Allow/Deny)',
description: 'Legacy Apache 2.2 syntax with Allow/Deny directives'
}
],
haproxy: [
{
value: 'acl',
text: 'ACL Rules',
description: 'Native HAProxy ACL rules for frontend/backend blocking'
},
{
value: 'lua',
text: 'Lua Script',
description: 'Lua-based blocking script for advanced HAProxy setups'
}
]
};
document.addEventListener('DOMContentLoaded', function() {
updateVariants();
checkDatabaseStatus();
});
function updateVariants() {
const appType = document.getElementById('appType').value;
const variantSelect = document.getElementById('appVariant');
const variantSection = document.getElementById('variantSection');
const variants = APP_VARIANTS[appType] || [];
variantSelect.innerHTML = '';
variantSection.style.display = 'block';
variants.forEach(variant => {
const option = document.createElement('option');
option.value = variant.value;
option.textContent = variant.text;
option.dataset.description = variant.description || '';
option.dataset.warning = variant.warning || false;
option.dataset.recommended = variant.recommended || false;
variantSelect.appendChild(option);
});
updateVariantDescription();
}
function updateVariantDescription() {
const variantSelect = document.getElementById('appVariant');
const descriptionDiv = document.getElementById('variantDescription');
if (!descriptionDiv) return;
const selectedOption = variantSelect.options[variantSelect.selectedIndex];
if (selectedOption && selectedOption.dataset.description) {
const isWarning = selectedOption.dataset.warning === 'true';
const isRecommended = selectedOption.dataset.recommended === 'true';
let alertClass = 'alert-info';
let borderClass = 'border-info';
let icon = 'fa-info-circle';
if (isRecommended) {
alertClass = 'alert-success';
borderClass = 'border-success';
icon = 'fa-check-circle';
} else if (isWarning) {
alertClass = 'alert-warning';
borderClass = 'border-warning';
icon = 'fa-exclamation-triangle';
}
descriptionDiv.innerHTML = `
<div class="alert ${alertClass} border-start border-4 ${borderClass} mb-0 py-2">
<small>
<i class="fas ${icon} me-2"></i>
${selectedOption.dataset.description}
</small>
</div>
`;
descriptionDiv.style.display = 'block';
} else {
descriptionDiv.style.display = 'none';
}
}
function checkDatabaseStatus() {
fetch(BASE_URL + '/api/database/status')
.then(response => response.json())
.then(data => {
const statusDiv = document.getElementById('dbStatus');
if (data.success) {
if (data.exists && !data.needs_update) {
statusDiv.className = 'alert alert-success mb-0';
statusDiv.innerHTML = '<i class="fas fa-check-circle me-2"></i>Database ready (Last update: ' + formatDate(data.last_update) + ')';
} else if (data.needs_update) {
statusDiv.className = 'alert alert-warning mb-0';
statusDiv.innerHTML = '<i class="fas fa-exclamation-triangle me-2"></i>Database needs update <button class="btn btn-sm btn-warning ms-2" onclick="updateDatabase()">Update Now</button>';
} else {
statusDiv.className = 'alert alert-info mb-0';
statusDiv.innerHTML = '<i class="fas fa-download me-2"></i>Downloading database...';
}
}
})
.catch(error => {
console.error('Error:', error);
});
}
function updateDatabase() {
const statusDiv = document.getElementById('dbStatus');
statusDiv.className = 'alert alert-info mb-0';
statusDiv.innerHTML = '<i class="fas fa-spinner fa-spin me-2"></i>Updating database...';
fetch(BASE_URL + '/api/database/update', { method: 'POST' })
.then(response => response.json())
.then(data => {
if (data.success) {
statusDiv.className = 'alert alert-success mb-0';
statusDiv.innerHTML = '<i class="fas fa-check-circle me-2"></i>Database updated successfully';
setTimeout(checkDatabaseStatus, 2000);
} else {
statusDiv.className = 'alert alert-danger mb-0';
statusDiv.innerHTML = '<i class="fas fa-times-circle me-2"></i>Update failed: ' + data.error;
}
});
}
function selectAll() {
const checkboxes = document.querySelectorAll('input[name="countries"]');
checkboxes.forEach(cb => cb.checked = true);
}
function deselectAll() {
const checkboxes = document.querySelectorAll('input[name="countries"]');
checkboxes.forEach(cb => cb.checked = false);
}
function formatDate(dateString) {
if (!dateString) return 'Never';
try {
const date = new Date(dateString);
return date.toLocaleDateString() + ' ' + date.toLocaleTimeString();
} catch (e) {
return dateString;
}
}
function copyToClipboard() {
const content = document.getElementById('previewContent').textContent;
navigator.clipboard.writeText(content).then(() => {
showResult('Copied to clipboard!', 'success');
}).catch(err => {
const textarea = document.createElement('textarea');
textarea.value = content;
textarea.style.position = 'fixed';
textarea.style.opacity = '0';
document.body.appendChild(textarea);
textarea.select();
try {
document.execCommand('copy');
showResult('Copied to clipboard!', 'success');
} catch (e) {
showResult('Failed to copy to clipboard', 'danger');
}
document.body.removeChild(textarea);
});
}
function getFormData() {
const countries = Array.from(document.querySelectorAll('input[name="countries"]:checked'))
.map(input => input.value);
if (countries.length === 0) {
return null;
}
const useCacheCheckbox = document.getElementById('useCache');
return {
countries: countries,
app_type: document.getElementById('appType').value,
app_variant: document.getElementById('appVariant').value,
aggregate: document.getElementById('aggregate').checked,
use_cache: useCacheCheckbox ? useCacheCheckbox.checked : true
};
}
function showCacheBadge(fromCache, generatedAt) {
if (fromCache) {
const badge = document.createElement('div');
badge.className = 'alert alert-success alert-dismissible fade show mt-3';
badge.innerHTML = `
<i class="fas fa-bolt me-2"></i>
<strong>Lightning fast!</strong> Config loaded from Redis cache in &lt;100ms
<small class="d-block mt-1">Generated: ${new Date(generatedAt).toLocaleString()}</small>
<button type="button" class="btn-close" data-bs-dismiss="alert"></button>
`;
const container = document.querySelector('.container > .row > .col-lg-10');
container.insertBefore(badge, container.firstChild);
setTimeout(() => {
badge.classList.remove('show');
setTimeout(() => badge.remove(), 150);
}, 5000);
}
}
async function previewConfiguration() {
const formData = getFormData();
if (!formData) {
showResult('Please select at least one country to continue', 'warning');
return;
}
showProgress();
try {
const endpoint = formData.app_type === 'raw-cidr'
? '/api/generate/raw'
: '/api/generate/preview';
const response = await fetch(endpoint, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(formData)
});
hideProgress();
if (!response.ok) {
const errorData = await response.json().catch(() => ({ error: 'Unknown error' }));
showResult('Error: ' + (errorData.error || 'Request failed'), 'danger');
return;
}
if (formData.app_type === 'raw-cidr') {
const text = await response.text();
const fromCache = response.headers.get('X-From-Cache') === 'true';
const generatedAt = response.headers.get('X-Generated-At');
const contentDisposition = response.headers.get('Content-Disposition');
let filename = 'blocklist.txt';
if (contentDisposition) {
const matches = /filename="?(.+)"?/.exec(contentDisposition);
if (matches) filename = matches[1];
}
document.getElementById('previewContent').textContent = text;
const cacheIndicator = document.getElementById('cacheIndicator');
if (fromCache) {
cacheIndicator.innerHTML = '<span class="badge bg-success ms-2"><i class="fas fa-bolt"></i> From Cache</span>';
showCacheBadge(true, generatedAt);
} else {
cacheIndicator.innerHTML = '<span class="badge bg-info ms-2"><i class="fas fa-sync"></i> Fresh</span>';
}
window.lastGeneratedConfig = text;
window.lastGeneratedFilename = filename;
const modal = new bootstrap.Modal(document.getElementById('previewModal'));
modal.show();
} else {
const result = await response.json();
if (result.success) {
document.getElementById('previewContent').textContent = result.config;
const cacheIndicator = document.getElementById('cacheIndicator');
if (result.from_cache) {
cacheIndicator.innerHTML = '<span class="badge bg-success ms-2"><i class="fas fa-bolt"></i> From Cache</span>';
showCacheBadge(true, result.generated_at);
} else {
cacheIndicator.innerHTML = '<span class="badge bg-info ms-2"><i class="fas fa-sync"></i> Fresh</span>';
}
if (result.stats) {
const statsText = `${result.stats.countries} countries, ${result.stats.total_networks.toLocaleString()} networks`;
document.getElementById('previewStats').textContent = statsText;
}
window.lastGeneratedConfig = result.config;
window.currentStats = result.stats;
const modal = new bootstrap.Modal(document.getElementById('previewModal'));
modal.show();
} else {
showResult(result.error || 'An error occurred while generating the preview', 'danger');
}
}
} catch (error) {
hideProgress();
showResult('Network error: ' + error.message, 'danger');
console.error('Preview error:', error);
}
}
async function downloadFromPreview() {
const formData = getFormData();
if (!formData) {
showResult('Please select at least one country', 'warning');
return;
}
const modal = bootstrap.Modal.getInstance(document.getElementById('previewModal'));
if (modal) {
modal.hide();
}
await downloadConfiguration(formData);
}

276
static/js/cache.js Normal file
View File

@@ -0,0 +1,276 @@
async function loadCacheStats() {
const container = document.getElementById('cacheStatsContent');
if (!container) return;
container.innerHTML = `
<div class="text-center py-3">
<div class="spinner-border spinner-border-sm text-primary" role="status"></div>
<span class="ms-2">Loading statistics...</span>
</div>
`;
try {
const [cacheResponse, sqliteResponse] = await Promise.all([
fetch('/api/cache/status'),
fetch('/api/database/sqlite/status')
]);
const cacheData = await cacheResponse.json();
const sqliteData = await sqliteResponse.json();
if (!cacheData.success) {
container.innerHTML = `<div class="alert alert-warning mb-0">Redis cache unavailable: ${cacheData.error || 'Unknown error'}</div>`;
return;
}
const stats = cacheData.stats || {};
const health = cacheData.health || {};
let html = `
<h6 class="mb-3"><i class="fas fa-bolt text-warning me-2"></i>Redis Cache</h6>
<div class="row g-3 mb-4">
<div class="col-md-3">
<div class="text-center p-3 bg-light rounded">
<h4 class="mb-1">${stats.country_keys || 0}</h4>
<small class="text-muted">Country Keys</small>
</div>
</div>
<div class="col-md-3">
<div class="text-center p-3 bg-light rounded">
<h4 class="mb-1">${stats.config_keys || 0}</h4>
<small class="text-muted">Config Keys</small>
</div>
</div>
<div class="col-md-3">
<div class="text-center p-3 bg-light rounded">
<h4 class="mb-1">${stats.total_size_mb || 0} MB</h4>
<small class="text-muted">Cache Size</small>
</div>
</div>
<div class="col-md-3">
<div class="text-center p-3 bg-light rounded">
<h4 class="mb-1">${health.memory_used_mb || 0} MB</h4>
<small class="text-muted">Memory Used</small>
</div>
</div>
</div>
`;
if (sqliteData.success && sqliteData.exists) {
const modifiedDate = new Date(sqliteData.modified).toLocaleString();
html += `
<h6 class="mb-3"><i class="fas fa-database text-primary me-2"></i>SQLite Cache Database</h6>
<div class="row g-3 mb-3">
<div class="col-md-3">
<div class="text-center p-3 bg-light rounded">
<h4 class="mb-1">${sqliteData.total_countries || 0}</h4>
<small class="text-muted">Countries</small>
</div>
</div>
<div class="col-md-3">
<div class="text-center p-3 bg-light rounded">
<h4 class="mb-1">${(sqliteData.total_networks || 0).toLocaleString()}</h4>
<small class="text-muted">Total Networks</small>
</div>
</div>
<div class="col-md-3">
<div class="text-center p-3 bg-light rounded">
<h4 class="mb-1">${sqliteData.file_size_mb || 0} MB</h4>
<small class="text-muted">Database Size</small>
</div>
</div>
<div class="col-md-3">
<div class="text-center p-3 bg-light rounded">
<small class="text-muted d-block mb-1">Last Modified</small>
<small><strong>${modifiedDate}</strong></small>
</div>
</div>
</div>
`;
if (sqliteData.top_countries && sqliteData.top_countries.length > 0) {
html += `
<div class="alert alert-info mb-0">
<strong><i class="fas fa-star me-1"></i>Top countries:</strong>
${sqliteData.top_countries.map(c =>
`<span class="badge bg-secondary ms-2">${c.code}: ${c.networks.toLocaleString()}</span>`
).join('')}
</div>
`;
}
} else {
html += `
<div class="alert alert-warning mb-0">
<i class="fas fa-exclamation-triangle me-2"></i>
SQLite cache database not available
</div>
`;
}
container.innerHTML = html;
} catch (error) {
console.error('Error loading cache stats:', error);
container.innerHTML = `
<div class="alert alert-danger mb-0">
<i class="fas fa-times-circle me-2"></i>
Failed to load statistics: ${error.message}
</div>
`;
}
}
async function flushCache() {
const confirmed = await showConfirmModal(
'Flush Redis Cache',
'Are you sure you want to flush ALL Redis cache?<br><br>' +
'<strong>This will delete:</strong><br>' +
'• All cached country data<br>' +
'• All cached configurations<br>' +
'• Force regeneration for future requests<br><br>' +
'<span class="text-danger">This action cannot be undone!</span>'
);
if (!confirmed) return;
try {
showFlushingIndicator();
const response = await fetch('/api/cache/flush', {
method: 'POST',
headers: {'Content-Type': 'application/json'}
});
const data = await response.json();
hideFlushingIndicator();
if (data.success) {
showToast('success', 'Cache Flushed', 'All Redis cache has been cleared successfully!');
loadCacheStats();
} else {
showToast('danger', 'Error', 'Failed to flush cache: ' + (data.error || 'Unknown error'));
}
} catch (error) {
hideFlushingIndicator();
showToast('danger', 'Error', 'Network error: ' + error.message);
}
}
function showConfirmModal(title, message) {
return new Promise((resolve) => {
const modalId = 'confirmModal_' + Date.now();
const modalHtml = `
<div class="modal fade" id="${modalId}" tabindex="-1" data-bs-backdrop="static">
<div class="modal-dialog modal-dialog-centered">
<div class="modal-content">
<div class="modal-header bg-warning text-dark">
<h5 class="modal-title">
<i class="fas fa-exclamation-triangle me-2"></i>${title}
</h5>
<button type="button" class="btn-close" data-bs-dismiss="modal"></button>
</div>
<div class="modal-body">
${message}
</div>
<div class="modal-footer">
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">
<i class="fas fa-times me-1"></i>Cancel
</button>
<button type="button" class="btn btn-danger" id="confirmBtn">
<i class="fas fa-trash me-1"></i>Flush Cache
</button>
</div>
</div>
</div>
</div>
`;
document.body.insertAdjacentHTML('beforeend', modalHtml);
const modalEl = document.getElementById(modalId);
const modal = new bootstrap.Modal(modalEl);
modalEl.querySelector('#confirmBtn').addEventListener('click', () => {
modal.hide();
resolve(true);
});
modalEl.addEventListener('hidden.bs.modal', () => {
modalEl.remove();
resolve(false);
});
modal.show();
});
}
function showFlushingIndicator() {
const indicator = document.createElement('div');
indicator.id = 'flushingIndicator';
indicator.className = 'position-fixed top-50 start-50 translate-middle';
indicator.style.zIndex = '9999';
indicator.innerHTML = `
<div class="card shadow-lg">
<div class="card-body text-center p-4">
<div class="spinner-border text-warning mb-3" role="status" style="width: 3rem; height: 3rem;">
<span class="visually-hidden">Flushing...</span>
</div>
<h5>Flushing Cache...</h5>
<p class="text-muted mb-0">Please wait</p>
</div>
</div>
`;
document.body.appendChild(indicator);
}
function hideFlushingIndicator() {
const indicator = document.getElementById('flushingIndicator');
if (indicator) {
indicator.remove();
}
}
function showToast(type, title, message) {
const toastId = 'toast_' + Date.now();
const bgClass = type === 'success' ? 'bg-success' : type === 'danger' ? 'bg-danger' : 'bg-warning';
const toastHtml = `
<div class="position-fixed top-0 end-0 p-3" style="z-index: 9999">
<div id="${toastId}" class="toast ${bgClass} text-white" role="alert">
<div class="toast-header ${bgClass} text-white">
<i class="fas fa-${type === 'success' ? 'check-circle' : 'exclamation-circle'} me-2"></i>
<strong class="me-auto">${title}</strong>
<button type="button" class="btn-close btn-close-white" data-bs-dismiss="toast"></button>
</div>
<div class="toast-body">
${message}
</div>
</div>
</div>
`;
document.body.insertAdjacentHTML('beforeend', toastHtml);
const toastEl = document.getElementById(toastId);
const toast = new bootstrap.Toast(toastEl, { delay: 5000 });
toast.show();
toastEl.addEventListener('hidden.bs.toast', () => {
toastEl.parentElement.remove();
});
}
function showAlert(type, message) {
showToast(type, type === 'success' ? 'Success' : 'Error', message);
}
document.addEventListener('DOMContentLoaded', function() {
const statsPanel = document.getElementById('cacheStatsPanel');
if (statsPanel) {
statsPanel.addEventListener('shown.bs.collapse', function() {
loadCacheStats();
});
}
});

197
static/js/progress.js Normal file
View File

@@ -0,0 +1,197 @@
let progressInterval = null;
function startProgressPolling() {
if (progressInterval) {
clearInterval(progressInterval);
}
progressInterval = setInterval(async () => {
try {
const response = await fetch('/api/progress');
const data = await response.json();
if (data.active) {
updateProgressUI(data.message, data.progress, data.total);
} else {
stopProgressPolling();
}
} catch (error) {
console.error('Progress polling error:', error);
}
}, 500);
}
function stopProgressPolling() {
if (progressInterval) {
clearInterval(progressInterval);
progressInterval = null;
}
}
function updateProgressUI(message, progress, total) {
const progressSection = document.getElementById('progressSection');
const progressBar = progressSection.querySelector('.progress-bar');
const progressMessage = document.getElementById('progressMessage');
const progressPercentage = document.getElementById('progressPercentage');
const percentage = total > 0 ? Math.round((progress / total) * 100) : 0;
progressBar.style.width = percentage + '%';
if (progressPercentage) {
progressPercentage.textContent = percentage + '%';
}
progressBar.setAttribute('aria-valuenow', percentage);
if (progressMessage) {
progressMessage.textContent = message;
}
}
function showProgress() {
const progressSection = document.getElementById('progressSection');
const progressBar = progressSection.querySelector('.progress-bar');
const progressMessage = document.getElementById('progressMessage');
const progressPercentage = document.getElementById('progressPercentage');
progressBar.style.width = '0%';
if (progressPercentage) {
progressPercentage.textContent = '0%';
}
if (progressMessage) {
progressMessage.textContent = 'Initializing...';
}
document.getElementById('resultSection').style.display = 'none';
progressSection.style.display = 'block';
document.getElementById('generateBtn').disabled = true;
startProgressPolling();
}
function hideProgress() {
const progressSection = document.getElementById('progressSection');
progressSection.style.display = 'none';
document.getElementById('generateBtn').disabled = false;
stopProgressPolling();
}
function showResult(message, type = 'danger') {
const resultSection = document.getElementById('resultSection');
const resultMessage = document.getElementById('resultMessage');
const alertDiv = resultSection.querySelector('.alert');
const iconMap = {
'success': 'check-circle',
'danger': 'exclamation-circle',
'warning': 'exclamation-triangle',
'info': 'info-circle'
};
const icon = iconMap[type] || 'info-circle';
alertDiv.className = `alert alert-${type}`;
resultMessage.innerHTML = `<i class="fas fa-${icon} me-2"></i>${message}`;
resultSection.style.display = 'block';
if (type === 'success') {
setTimeout(() => {
resultSection.style.display = 'none';
}, 5000);
}
}
document.addEventListener('DOMContentLoaded', function() {
const form = document.getElementById('generateForm');
if (form) {
form.addEventListener('submit', async function(e) {
e.preventDefault();
const formData = getFormData();
if (!formData) {
showResult('Please select at least one country to continue', 'warning');
return;
}
await downloadConfiguration(formData);
});
}
});
async function downloadConfiguration(formData) {
showProgress();
try {
const endpoint = formData.app_type === 'raw-cidr'
? '/api/generate/raw'
: '/api/generate';
const response = await fetch(endpoint, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(formData)
});
hideProgress();
if (response.ok) {
const fromCache = response.headers.get('X-From-Cache') === 'true';
const generatedAt = response.headers.get('X-Generated-At');
const blob = await response.blob();
const url = window.URL.createObjectURL(blob);
const a = document.createElement('a');
a.href = url;
const contentDisposition = response.headers.get('Content-Disposition');
let filename = 'geoblock_config.conf';
if (contentDisposition) {
const filenameMatch = contentDisposition.match(/filename="?(.+)"?/);
if (filenameMatch) {
filename = filenameMatch[1];
}
}
a.download = filename;
document.body.appendChild(a);
a.click();
window.URL.revokeObjectURL(url);
document.body.removeChild(a);
if (fromCache) {
showResult(`<i class="fas fa-bolt"></i> <strong>Lightning fast!</strong> Downloaded from cache: ${filename}`, 'success');
} else {
showResult(`Configuration downloaded successfully: ${filename}`, 'success');
}
} else {
const error = await response.json();
showResult(error.error || 'An error occurred during download', 'danger');
}
} catch (error) {
hideProgress();
showResult('Network error: ' + error.message, 'danger');
}
}
function getFormData() {
const countries = Array.from(document.querySelectorAll('input[name="countries"]:checked'))
.map(input => input.value);
if (countries.length === 0) {
return null;
}
const useCacheCheckbox = document.getElementById('useCache');
return {
countries: countries,
app_type: document.getElementById('appType').value,
app_variant: document.getElementById('appVariant').value,
aggregate: document.getElementById('aggregate').checked,
use_cache: useCacheCheckbox ? useCacheCheckbox.checked : true
};
}

27
systemd/geoip-ban.service Normal file
View File

@@ -0,0 +1,27 @@
[Unit]
Description=GeoIP Ban Configuration Generator
After=network-online.target
Wants=network-online.target
[Service]
Type=simple
User=www-data
Group=www-data
WorkingDirectory=/opt/geoip_block_generator
ExecStart=/opt/geoip_block_generator/start.sh
Restart=always
RestartSec=10
StandardOutput=journal
StandardError=journal
SyslogIdentifier=geoip-ban
NoNewPrivileges=true
PrivateTmp=true
ProtectSystem=strict
ProtectHome=true
ReadWritePaths=/opt/geoip_block_generator/geoip_db /var/log/geoip-ban
[Install]
WantedBy=multi-user.target

View File

@@ -0,0 +1,31 @@
[Unit]
Description=GeoIP Country Pre-Cache Daemon
After=network-online.target redis-server.service
Wants=network-online.target
Requires=redis-server.service
[Service]
Type=simple
User=www-data
Group=www-data
WorkingDirectory=/opt/geoip_block_generator
EnvironmentFile=/opt/geoip_block_generator/.env
ExecStart=/opt/geoip_block_generator/venv/bin/python3 /opt/geoip_block_generator/precache_daemon.py
Restart=always
RestartSec=30
StandardOutput=journal
StandardError=journal
SyslogIdentifier=geoip-precache
PrivateTmp=true
NoNewPrivileges=true
ProtectSystem=strict
ProtectHome=true
ReadWritePaths=/opt/geoip_block_generator/geoip_db
[Install]
WantedBy=multi-user.target

View File

@@ -0,0 +1,34 @@
[Unit]
Description=GeoIP Country Pre-Scanner Daemon
After=network-online.target
Wants=network-online.target
[Service]
Type=simple
User=www-data
Group=www-data
WorkingDirectory=/opt/geoip_block_generator
EnvironmentFile=/opt/geoip_block_generator/.env
# Python executable
ExecStart=/opt/geoip_block_generator/venv/bin/python /opt/geoip_block_generator/scheduler.py
# Restart policy
Restart=always
RestartSec=10
# Logging
StandardOutput=journal
StandardError=journal
SyslogIdentifier=geoip-scheduler
# Security
PrivateTmp=true
NoNewPrivileges=true
ProtectSystem=strict
ProtectHome=true
ReadWritePaths=/opt/geoip_block_generator/geoip_db
[Install]
WantedBy=multi-user.target

View File

@@ -0,0 +1,31 @@
[Unit]
Description=GeoIP Ban Generator WebApp (Instance %i)
After=network-online.target
Wants=network-online.target
[Service]
Type=simple
User=www-data
Group=www-data
WorkingDirectory=/opt/geoip_block_generator
# Pass instance port as argument
ExecStart=/opt/geoip_block_generator/start-instance.sh %i
Restart=always
RestartSec=10
# Logging per instance
StandardOutput=append:/var/log/geoip-ban/webapp-%i.log
StandardError=append:/var/log/geoip-ban/webapp-%i-error.log
SyslogIdentifier=geoip-webapp-%i
# Security
NoNewPrivileges=true
PrivateTmp=true
ProtectSystem=strict
ProtectHome=true
ReadWritePaths=/opt/geoip_block_generator/geoip_db /var/log/geoip-ban
[Install]
WantedBy=multi-user.target

625
templates/api.html Normal file
View File

@@ -0,0 +1,625 @@
{% extends "base.html" %}
{% block title %}API Documentation - {{ app_name }}{% endblock %}
{% block content %}
<div class="container mt-4">
<div class="row">
<div class="col-lg-10 mx-auto">
<div class="mb-4">
<h2>API Documentation</h2>
<p class="lead">RESTful API for programmatic access to geo-blocking configuration generation.</p>
<div class="alert alert-info">
<i class="fas fa-info-circle me-2"></i>
<strong>Base URL:</strong> <code id="baseUrl"></code>
</div>
</div>
<!-- Endpoint 1: Get Countries -->
<div class="card mb-3">
<div class="card-header api-header-get" onclick="toggleEndpoint('endpoint1')">
<div class="d-flex justify-content-between align-items-center">
<div>
<span class="badge bg-info me-2">GET</span>
<code class="api-path">/api/countries</code>
<span class="ms-3 text-muted">Get available countries</span>
</div>
<i class="fas fa-chevron-down"></i>
</div>
</div>
<div class="card-body collapse" id="endpoint1">
<h6 class="fw-bold">Description</h6>
<p>Returns a list of all available countries with their ISO codes and flag emojis.</p>
<h6 class="fw-bold mt-3">Response Schema</h6>
<pre><code>{
"success": <span class="text-success">true</span>,
"countries": [
{
"code": <span class="text-warning">"CN"</span>,
"name": <span class="text-warning">"China"</span>,
"flag": <span class="text-warning">"🇨🇳"</span>
}
]
}</code></pre>
<h6 class="fw-bold mt-3">Try it out</h6>
<button class="btn btn-sm btn-primary" onclick="tryEndpoint('countries')">
<i class="fas fa-play me-1"></i>Execute
</button>
<div id="response-countries" class="mt-3" style="display:none;">
<h6 class="fw-bold">Response</h6>
<pre><code id="response-countries-body"></code></pre>
</div>
</div>
</div>
<!-- Endpoint 2: Database Status -->
<div class="card mb-3">
<div class="card-header api-header-get" onclick="toggleEndpoint('endpoint2')">
<div class="d-flex justify-content-between align-items-center">
<div>
<span class="badge bg-info me-2">GET</span>
<code class="api-path">/api/database/status</code>
<span class="ms-3 text-muted">Check database status</span>
</div>
<i class="fas fa-chevron-down"></i>
</div>
</div>
<div class="card-body collapse" id="endpoint2">
<h6 class="fw-bold">Description</h6>
<p>Returns the current status of the MaxMind GeoIP database, including last update time and whether an update is needed.</p>
<h6 class="fw-bold mt-3">Response Schema</h6>
<pre><code>{
"success": <span class="text-success">true</span>,
"exists": <span class="text-success">true</span>,
"needs_update": <span class="text-danger">false</span>,
"last_update": <span class="text-warning">"2026-02-10T08:00:00"</span>,
"file_size": <span class="text-info">5242880</span>,
"auto_update": <span class="text-success">true</span>
}</code></pre>
<h6 class="fw-bold mt-3">Try it out</h6>
<button class="btn btn-sm btn-primary" onclick="tryEndpoint('database/status')">
<i class="fas fa-play me-1"></i>Execute
</button>
<div id="response-database-status" class="mt-3" style="display:none;">
<h6 class="fw-bold">Response</h6>
<pre><code id="response-database-status-body"></code></pre>
</div>
</div>
</div>
<!-- Endpoint 3: Update Database -->
<div class="card mb-3">
<div class="card-header api-header-post" onclick="toggleEndpoint('endpoint3')">
<div class="d-flex justify-content-between align-items-center">
<div>
<span class="badge bg-success me-2">POST</span>
<code class="api-path">/api/database/update</code>
<span class="ms-3 text-muted">Update database manually</span>
</div>
<i class="fas fa-chevron-down"></i>
</div>
</div>
<div class="card-body collapse" id="endpoint3">
<h6 class="fw-bold">Description</h6>
<p>Manually triggers a download and update of the MaxMind GeoIP database from configured sources.</p>
<h6 class="fw-bold mt-3">Response Schema</h6>
<pre><code>{
"success": <span class="text-success">true</span>,
"url": <span class="text-warning">"https://github.com/..."</span>,
"size": <span class="text-info">5242880</span>
}</code></pre>
<h6 class="fw-bold mt-3">Try it out</h6>
<button class="btn btn-sm btn-success" onclick="tryEndpoint('database/update', 'POST')">
<i class="fas fa-play me-1"></i>Execute
</button>
<div id="response-database-update" class="mt-3" style="display:none;">
<h6 class="fw-bold">Response</h6>
<pre><code id="response-database-update-body"></code></pre>
</div>
</div>
</div>
<!-- Endpoint 4: Progress Status -->
<div class="card mb-3">
<div class="card-header api-header-get" onclick="toggleEndpoint('endpoint4')">
<div class="d-flex justify-content-between align-items-center">
<div>
<span class="badge bg-info me-2">GET</span>
<code class="api-path">/api/progress</code>
<span class="ms-3 text-muted">Get current generation progress</span>
</div>
<i class="fas fa-chevron-down"></i>
</div>
</div>
<div class="card-body collapse" id="endpoint4">
<h6 class="fw-bold">Description</h6>
<p>Returns the current progress status of any active configuration generation process. Poll this endpoint to monitor long-running operations.</p>
<h6 class="fw-bold mt-3">Response Schema</h6>
<pre><code>{
"active": <span class="text-success">true</span>,
"message": <span class="text-warning">"[1/3] CN: Scanning MaxMind: 234 networks found"</span>,
"progress": <span class="text-info">30</span>,
"total": <span class="text-info">100</span>
}</code></pre>
<h6 class="fw-bold mt-3">Fields</h6>
<table class="table table-sm">
<thead>
<tr>
<th>Name</th>
<th>Type</th>
<th>Description</th>
</tr>
</thead>
<tbody>
<tr>
<td><code>active</code></td>
<td>boolean</td>
<td>Whether a generation process is currently active</td>
</tr>
<tr>
<td><code>message</code></td>
<td>string</td>
<td>Current progress message with detailed status</td>
</tr>
<tr>
<td><code>progress</code></td>
<td>integer</td>
<td>Current progress value (0-100)</td>
</tr>
<tr>
<td><code>total</code></td>
<td>integer</td>
<td>Total progress value (always 100)</td>
</tr>
</tbody>
</table>
<h6 class="fw-bold mt-3">Try it out</h6>
<button class="btn btn-sm btn-primary" onclick="tryEndpoint('progress')">
<i class="fas fa-play me-1"></i>Execute
</button>
<div id="response-progress" class="mt-3" style="display:none;">
<h6 class="fw-bold">Response</h6>
<pre><code id="response-progress-body"></code></pre>
</div>
<h6 class="fw-bold mt-3">Polling Example</h6>
<pre><code>// Poll every 500ms during generation
const pollProgress = setInterval(async () => {
const response = await fetch('/api/progress');
const data = await response.json();
if (data.active) {
console.log(`Progress: ${data.progress}% - ${data.message}`);
} else {
clearInterval(pollProgress);
console.log('Generation complete!');
}
}, 500);</code></pre>
</div>
</div>
<!-- Endpoint 5: Generate Preview -->
<div class="card mb-3">
<div class="card-header api-header-post" onclick="toggleEndpoint('endpoint5')">
<div class="d-flex justify-content-between align-items-center">
<div>
<span class="badge bg-success me-2">POST</span>
<code class="api-path">/api/generate/preview</code>
<span class="ms-3 text-muted">Preview configuration (JSON response)</span>
</div>
<i class="fas fa-chevron-down"></i>
</div>
</div>
<div class="card-body collapse" id="endpoint5">
<h6 class="fw-bold">Description</h6>
<p>Generates configuration and returns it as JSON (instead of file download). Perfect for previewing or integrating into other applications.</p>
<h6 class="fw-bold mt-3">Request Body</h6>
<pre><code>{
"countries": [<span class="text-warning">"CN"</span>, <span class="text-warning">"RU"</span>],
"app_type": <span class="text-warning">"nginx"</span>,
"app_variant": <span class="text-warning">"map"</span>,
"aggregate": <span class="text-success">true</span>,
"use_cache": <span class="text-success">true</span>
}</code></pre>
<h6 class="fw-bold mt-3">Parameters</h6>
<table class="table table-sm">
<thead>
<tr>
<th>Name</th>
<th>Type</th>
<th>Required</th>
<th>Description</th>
</tr>
</thead>
<tbody>
<tr>
<td><code>countries</code></td>
<td>array</td>
<td><span class="badge bg-danger">required</span></td>
<td>List of ISO 3166-1 alpha-2 country codes</td>
</tr>
<tr>
<td><code>app_type</code></td>
<td>string</td>
<td><span class="badge bg-danger">required</span></td>
<td>One of: <code>nginx</code>, <code>apache</code>, <code>haproxy</code>, <code>raw-cidr</code></td>
</tr>
<tr>
<td><code>app_variant</code></td>
<td>string</td>
<td><span class="badge bg-danger">required</span></td>
<td>Configuration style (depends on app_type)</td>
</tr>
<tr>
<td><code>aggregate</code></td>
<td>boolean</td>
<td><span class="badge bg-secondary">optional</span></td>
<td>Aggregate IP networks to reduce count (default: true)</td>
</tr>
<tr>
<td><code>use_cache</code></td>
<td>boolean</td>
<td><span class="badge bg-secondary">optional</span></td>
<td>Use Redis cache if available (default: true). Set to <code>false</code> to force fresh data from SQLite/MaxMind</td>
</tr>
</tbody>
</table>
<h6 class="fw-bold mt-3">Response Schema</h6>
<pre><code>{
"success": <span class="text-success">true</span>,
"config": <span class="text-warning">"# Nginx Map Module Configuration\n..."</span>,
"stats": {
"countries": <span class="text-info">2</span>,
"total_networks": <span class="text-info">4567</span>,
"per_country": {
"CN": <span class="text-info">2834</span>,
"RU": <span class="text-info">1733</span>
}
},
"from_cache": <span class="text-success">true</span>,
"cache_type": <span class="text-warning">"redis"</span>,
"generated_at": <span class="text-warning">"2026-02-16T10:30:00"</span>
}</code></pre>
<h6 class="fw-bold mt-3">Response Fields</h6>
<table class="table table-sm">
<thead>
<tr>
<th>Name</th>
<th>Type</th>
<th>Description</th>
</tr>
</thead>
<tbody>
<tr>
<td><code>from_cache</code></td>
<td>boolean</td>
<td>Whether the config was served from cache or freshly generated</td>
</tr>
<tr>
<td><code>cache_type</code></td>
<td>string</td>
<td><code>redis</code> (from Redis cache) or <code>sqlite</code> (from SQLite/fresh scan)</td>
</tr>
<tr>
<td><code>generated_at</code></td>
<td>string</td>
<td>ISO 8601 timestamp when the config was generated</td>
</tr>
</tbody>
</table>
<h6 class="fw-bold mt-3">cURL Examples</h6>
<p class="mb-2"><strong>With cache (default):</strong></p>
<pre><code>curl -X POST <span id="curlUrl1"></span>/api/generate/preview \
-H "Content-Type: application/json" \
-d '{
"countries": ["CN", "RU"],
"app_type": "nginx",
"app_variant": "map",
"aggregate": true,
"use_cache": true
}' | jq .</code></pre>
<p class="mb-2 mt-3"><strong>Force fresh data (bypass cache):</strong></p>
<pre><code>curl -X POST <span id="curlUrl1b"></span>/api/generate/preview \
-H "Content-Type: application/json" \
-d '{
"countries": ["CN", "RU"],
"app_type": "nginx",
"app_variant": "map",
"aggregate": true,
"use_cache": false
}' | jq .</code></pre>
</div>
</div>
<!-- Endpoint 6: Generate Raw CIDR -->
<div class="card mb-3">
<div class="card-header api-header-post" onclick="toggleEndpoint('endpoint6')">
<div class="d-flex justify-content-between align-items-center">
<div>
<span class="badge bg-success me-2">POST</span>
<code class="api-path">/api/generate/raw</code>
<span class="ms-3 text-muted">Generate raw CIDR blocklist</span>
</div>
<i class="fas fa-chevron-down"></i>
</div>
</div>
<div class="card-body collapse" id="endpoint6">
<h6 class="fw-bold">Description</h6>
<p>Generates a raw CIDR blocklist without application-specific configuration. Perfect for iptables, fail2ban, or custom implementations.</p>
<h6 class="fw-bold mt-3">Request Body</h6>
<pre><code>{
"countries": [<span class="text-warning">"CN"</span>, <span class="text-warning">"RU"</span>],
"app_variant": <span class="text-warning">"txt"</span>,
"aggregate": <span class="text-success">true</span>,
"use_cache": <span class="text-success">true</span>
}</code></pre>
<h6 class="fw-bold mt-3">Parameters</h6>
<table class="table table-sm">
<thead>
<tr>
<th>Name</th>
<th>Type</th>
<th>Required</th>
<th>Description</th>
</tr>
</thead>
<tbody>
<tr>
<td><code>countries</code></td>
<td>array</td>
<td><span class="badge bg-danger">required</span></td>
<td>List of ISO 3166-1 alpha-2 country codes</td>
</tr>
<tr>
<td><code>app_variant</code></td>
<td>string</td>
<td><span class="badge bg-secondary">optional</span></td>
<td>Output format: <code>txt</code> (default) or <code>csv</code></td>
</tr>
<tr>
<td><code>aggregate</code></td>
<td>boolean</td>
<td><span class="badge bg-secondary">optional</span></td>
<td>Aggregate IP networks (default: true)</td>
</tr>
<tr>
<td><code>use_cache</code></td>
<td>boolean</td>
<td><span class="badge bg-secondary">optional</span></td>
<td>Use Redis cache if available (default: true)</td>
</tr>
</tbody>
</table>
<h6 class="fw-bold mt-3">Response</h6>
<p>Returns plain text file with CIDR blocks (one per line) or CSV with CIDR and country columns.</p>
<h6 class="fw-bold mt-3">Response Headers</h6>
<table class="table table-sm">
<thead>
<tr>
<th>Header</th>
<th>Description</th>
</tr>
</thead>
<tbody>
<tr>
<td><code>X-From-Cache</code></td>
<td><code>true</code> or <code>false</code> - indicates if served from Redis</td>
</tr>
<tr>
<td><code>X-Cache-Type</code></td>
<td><code>redis</code> or <code>sqlite</code> - data source type</td>
</tr>
<tr>
<td><code>X-Generated-At</code></td>
<td>Timestamp when config was generated</td>
</tr>
</tbody>
</table>
<h6 class="fw-bold mt-3">cURL Examples</h6>
<p class="mb-2"><strong>With cache (faster):</strong></p>
<pre><code>curl -X POST <span id="curlUrl2"></span>/api/generate/raw \
-H "Content-Type: application/json" \
-d '{
"countries": ["CN", "RU"],
"app_variant": "txt",
"aggregate": true,
"use_cache": true
}' \
-o blocklist.txt</code></pre>
<p class="mb-2 mt-3"><strong>Force fresh data (slower but guaranteed up-to-date):</strong></p>
<pre><code>curl -X POST <span id="curlUrl2b"></span>/api/generate/raw \
-H "Content-Type: application/json" \
-d '{
"countries": ["CN", "RU"],
"app_variant": "txt",
"aggregate": true,
"use_cache": false
}' \
-o blocklist_fresh.txt</code></pre>
</div>
</div>
<!-- Endpoint 7: Generate Configuration -->
<div class="card mb-3">
<div class="card-header api-header-post" onclick="toggleEndpoint('endpoint7')">
<div class="d-flex justify-content-between align-items-center">
<div>
<span class="badge bg-success me-2">POST</span>
<code class="api-path">/api/generate</code>
<span class="ms-3 text-muted">Generate application configuration</span>
</div>
<i class="fas fa-chevron-down"></i>
</div>
</div>
<div class="card-body collapse" id="endpoint7">
<h6 class="fw-bold">Description</h6>
<p>Generates application-specific geo-blocking configuration for Nginx, Apache, or HAProxy and returns it as a downloadable file.</p>
<h6 class="fw-bold mt-3">Request Body</h6>
<pre><code>{
"countries": [<span class="text-warning">"CN"</span>, <span class="text-warning">"RU"</span>],
"app_type": <span class="text-warning">"nginx"</span>,
"app_variant": <span class="text-warning">"map"</span>,
"aggregate": <span class="text-success">true</span>,
"use_cache": <span class="text-success">true</span>
}</code></pre>
<h6 class="fw-bold mt-3">Parameters</h6>
<table class="table table-sm">
<thead>
<tr>
<th>Name</th>
<th>Type</th>
<th>Required</th>
<th>Description</th>
</tr>
</thead>
<tbody>
<tr>
<td><code>countries</code></td>
<td>array</td>
<td><span class="badge bg-danger">required</span></td>
<td>List of ISO 3166-1 alpha-2 country codes</td>
</tr>
<tr>
<td><code>app_type</code></td>
<td>string</td>
<td><span class="badge bg-danger">required</span></td>
<td>One of: <code>nginx</code>, <code>apache</code>, <code>haproxy</code></td>
</tr>
<tr>
<td><code>app_variant</code></td>
<td>string</td>
<td><span class="badge bg-danger">required</span></td>
<td>Configuration style (depends on app_type)</td>
</tr>
<tr>
<td><code>aggregate</code></td>
<td>boolean</td>
<td><span class="badge bg-secondary">optional</span></td>
<td>Aggregate IP networks (default: true)</td>
</tr>
<tr>
<td><code>use_cache</code></td>
<td>boolean</td>
<td><span class="badge bg-secondary">optional</span></td>
<td>Use Redis cache if available (default: true)</td>
</tr>
</tbody>
</table>
<h6 class="fw-bold mt-3">Available Variants</h6>
<ul>
<li><strong>nginx:</strong> <code>geo</code>, <code>map</code>, <code>deny</code></li>
<li><strong>apache:</strong> <code>22</code> (Apache 2.2), <code>24</code> (Apache 2.4)</li>
<li><strong>haproxy:</strong> <code>acl</code>, <code>lua</code></li>
</ul>
<h6 class="fw-bold mt-3">Response</h6>
<p>Returns configuration file as <code>text/plain</code> with Content-Disposition header for download.</p>
<h6 class="fw-bold mt-3">Response Headers</h6>
<table class="table table-sm">
<thead>
<tr>
<th>Header</th>
<th>Description</th>
</tr>
</thead>
<tbody>
<tr>
<td><code>X-From-Cache</code></td>
<td><code>true</code> or <code>false</code></td>
</tr>
<tr>
<td><code>X-Cache-Type</code></td>
<td><code>redis</code> or <code>sqlite</code></td>
</tr>
<tr>
<td><code>X-Generated-At</code></td>
<td>ISO 8601 timestamp</td>
</tr>
</tbody>
</table>
<h6 class="fw-bold mt-3">Cache Behavior</h6>
<div class="alert alert-info">
<strong>With Redis enabled:</strong>
<ul class="mb-0">
<li><code>use_cache: true</code> - Check Redis first, return cached config if available (fast, <1s)</li>
<li><code>use_cache: false</code> - Bypass Redis, fetch from SQLite cache or scan MaxMind (slower, 5-30s)</li>
</ul>
</div>
<h6 class="fw-bold mt-3">cURL Examples</h6>
<p class="mb-2"><strong>With cache (recommended for production):</strong></p>
<pre><code>curl -X POST <span id="curlUrl3"></span>/api/generate \
-H "Content-Type: application/json" \
-d '{
"countries": ["CN", "RU"],
"app_type": "nginx",
"app_variant": "map",
"aggregate": true,
"use_cache": true
}' \
-o geoblock.conf
# Check if it was cached:
curl -I -X POST <span id="curlUrl3b"></span>/api/generate \
-H "Content-Type: application/json" \
-d '{"countries":["CN"],"app_type":"nginx","app_variant":"map"}' \
| grep "X-From-Cache"</code></pre>
<p class="mb-2 mt-3"><strong>Force fresh scan (for testing or updates):</strong></p>
<pre><code>curl -X POST <span id="curlUrl3c"></span>/api/generate \
-H "Content-Type: application/json" \
-d '{
"countries": ["CN", "RU"],
"app_type": "nginx",
"app_variant": "map",
"aggregate": true,
"use_cache": false
}' \
-o geoblock_fresh.conf</code></pre>
</div>
</div>
</div>
</div>
</div>
{% endblock %}
{% block scripts %}
<script src="{{ url_for('static', filename='js/api.js') }}?v={{ js_hash }}"></script>
{% endblock %}

47
templates/base.html Normal file
View File

@@ -0,0 +1,47 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>{% block title %}{{ app_name }}{% endblock %}</title>
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/css/bootstrap.min.css" rel="stylesheet">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.0/css/all.min.css">
<link rel="stylesheet" href="{{ url_for('static', filename='css/style.css') }}?v={{ css_hash }}">
</head>
<body>
<nav class="navbar navbar-light bg-light border-bottom">
<div class="container">
<a class="navbar-brand" href="{{ logo_link }}">
{% if logo_url %}
<img src="{{ logo_url }}" alt="{{ app_name }}" height="30">
{% else %}
{{ app_name }}
{% endif %}
</a>
<div>
<a href="/" class="btn btn-sm btn-outline-secondary me-2">Home</a>
<a href="/api-docs" class="btn btn-sm btn-outline-primary">API Docs</a>
</div>
</div>
</nav>
<main>
{% block content %}{% endblock %}
</main>
<footer class="footer mt-auto">
<div class="container text-center">
<span class="text-muted">{{ footer_text }}</span>
{% if footer_link %}
<span class="text-muted mx-2">|</span>
<a href="{{ footer_link }}">{{ footer_link_text }}</a>
{% endif %}
</div>
</footer>
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/js/bootstrap.bundle.min.js"></script>
<script src="{{ url_for('static', filename='js/app.js') }}?v={{ js_hash }}"></script>
{% block scripts %}{% endblock %}
</body>
</html>

237
templates/index.html Normal file
View File

@@ -0,0 +1,237 @@
{% extends "base.html" %}
{% block content %}
<div class="container mt-4">
<div class="row justify-content-center">
<div class="col-lg-10">
<!-- Database & Redis Status -->
<div class="row g-2 mb-3">
<div class="col-md-8">
<div id="dbStatus" class="alert alert-info mb-0">
<i class="fas fa-database me-2"></i>
<span>Checking database status...</span>
</div>
</div>
<div class="col-md-4">
{% if redis_enabled %}
{% if redis_connected %}
<div class="alert alert-success mb-0">
<i class="fas fa-bolt me-2"></i>Redis Cache: <strong>Active</strong>
</div>
{% else %}
<div class="alert alert-warning mb-0">
<i class="fas fa-exclamation-triangle me-2"></i>Redis: <strong>Offline</strong>
</div>
{% endif %}
{% else %}
<div class="alert alert-secondary mb-0">
<i class="fas fa-bolt me-2"></i>Redis: <strong>Disabled</strong>
</div>
{% endif %}
</div>
</div>
<!-- Cache Stats Panel (Collapsible) -->
{% if redis_enabled and redis_connected %}
<div class="card mb-3">
<div class="card-header bg-light" style="cursor: pointer;" data-bs-toggle="collapse" data-bs-target="#cacheStatsPanel">
<div class="d-flex justify-content-between align-items-center">
<span>
<i class="fas fa-chart-bar me-2"></i>Cache Statistics
</span>
<i class="fas fa-chevron-down"></i>
</div>
</div>
<div id="cacheStatsPanel" class="collapse">
<div class="card-body">
<div id="cacheStatsContent">
<div class="text-center py-3">
<div class="spinner-border spinner-border-sm text-primary" role="status"></div>
<span class="ms-2">Loading cache statistics...</span>
</div>
</div>
<div class="mt-3 border-top pt-3">
<button class="btn btn-sm btn-warning" onclick="flushCache()">
<i class="fas fa-trash me-1"></i>Flush Cache (redis)
</button>
<button class="btn btn-sm btn-info" onclick="loadCacheStats()">
<i class="fas fa-sync me-1"></i>Refresh Stats
</button>
</div>
</div>
</div>
</div>
{% endif %}
<!-- Main Card -->
<div class="card shadow-sm">
<div class="card-header bg-white">
<h4 class="mb-0">
<i class="fas fa-globe me-2"></i>Generate Geo-Blocking Configuration
</h4>
</div>
<div class="card-body p-4">
<form id="generateForm">
<!-- Country Selection -->
<div class="mb-4">
<label class="form-label fw-bold">
<i class="fas fa-flag me-2"></i>Select Countries to Block
</label>
<div class="row g-1 mb-2" id="countryList">
{% for country in countries %}
<div class="col-xxl-1 col-xl-2 col-lg-2 col-md-3 col-sm-4 col-6">
<div class="form-check form-check-compact">
<input class="form-check-input" type="checkbox"
name="countries" value="{{ country.code }}"
id="country_{{ country.code }}">
<label class="form-check-label" for="country_{{ country.code }}" title="{{ country.name }}">
{{ country.flag }} {{ country.code }}
</label>
</div>
</div>
{% endfor %}
</div>
<div>
<button type="button" class="btn btn-sm btn-outline-primary" onclick="selectAll()">
<i class="fas fa-check-double me-1"></i>Select All
</button>
<button type="button" class="btn btn-sm btn-outline-secondary" onclick="deselectAll()">
<i class="fas fa-times me-1"></i>Deselect All
</button>
</div>
</div>
<!-- Application Type -->
<div class="mb-4">
<label class="form-label fw-bold">
<i class="fas fa-server me-2"></i>Output Format
</label>
<select class="form-select form-select-lg" id="appType" name="app_type" onchange="updateVariants()">
<option value="raw-cidr">Raw CIDR List</option>
<option value="nginx">Nginx</option>
<option value="apache">Apache</option>
<option value="haproxy">HAProxy</option>
</select>
</div>
<!-- Variant Selection -->
<div class="mb-4" id="variantSection">
<label class="form-label fw-bold">
<i class="fas fa-cog me-2"></i>Configuration Style
</label>
<select class="form-select form-select-lg" id="appVariant" name="app_variant" onchange="updateVariantDescription()">
</select>
<div id="variantDescription" class="mt-2" style="display: none;">
</div>
</div>
<!-- Aggregate Option -->
<div class="mb-4">
<label class="form-label fw-bold">
<i class="fas fa-sliders-h me-2"></i>Options
</label>
<div class="aggregate-card">
<div class="form-check form-switch mb-3">
<input class="form-check-input" type="checkbox" role="switch"
id="aggregate" name="aggregate" checked>
<label class="form-check-label" for="aggregate">
<strong>Aggregate IP networks</strong><br>
<small class="text-muted">Combines adjacent networks into larger CIDR blocks for smaller files</small>
</label>
</div>
{% if redis_enabled and redis_connected %}
<div class="form-check form-switch">
<input class="form-check-input" type="checkbox" role="switch"
id="useCache" name="use_cache" checked>
<label class="form-check-label" for="useCache">
<strong><i class="fas fa-bolt text-warning"></i> Use Redis Cache</strong><br>
<small class="text-muted">Instant generation for previously cached configurations (&lt;100ms)</small>
</label>
</div>
{% endif %}
</div>
</div>
<!-- Action Buttons -->
<div class="row g-2">
<div class="col-md-6">
<button type="button" class="btn btn-outline-primary btn-lg w-100" onclick="previewConfiguration()">
<i class="fas fa-eye me-2"></i>Preview
</button>
</div>
<div class="col-md-6">
<button type="submit" class="btn btn-primary btn-lg w-100" id="generateBtn">
<i class="fas fa-download me-2"></i>Download
</button>
</div>
</div>
</form>
<!-- Progress -->
<div id="progressSection" class="mt-3" style="display: none;">
<div class="progress" style="height: 2rem;">
<div class="progress-bar progress-bar-striped progress-bar-animated"
role="progressbar"
style="width: 0%"
aria-valuenow="0"
aria-valuemin="0"
aria-valuemax="100">
<span id="progressPercentage">0%</span>
</div>
</div>
<div class="text-center mt-2">
<small class="text-muted" id="progressMessage">Initializing...</small>
</div>
</div>
<!-- Result -->
<div id="resultSection" class="mt-3" style="display: none;">
<div class="alert" role="alert">
<span id="resultMessage"></span>
</div>
</div>
</div>
</div>
<!-- Preview Modal -->
<div class="modal fade" id="previewModal" tabindex="-1" aria-hidden="true">
<div class="modal-dialog modal-xl modal-dialog-scrollable">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title">
<i class="fas fa-file-code me-2"></i>Configuration Preview
<span id="cacheIndicator"></span>
</h5>
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
</div>
<div class="modal-body">
<div class="mb-3">
<button class="btn btn-sm btn-outline-secondary" onclick="copyToClipboard()">
<i class="fas fa-copy me-1"></i>Copy to Clipboard
</button>
<span id="previewStats" class="ms-3 text-muted"></span>
</div>
<pre class="mb-0"><code id="previewContent"></code></pre>
</div>
<div class="modal-footer">
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Close</button>
<button type="button" class="btn btn-primary" onclick="downloadFromPreview()">
<i class="fas fa-download me-2"></i>Download
</button>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
{% endblock %}
{% block scripts %}
<script src="{{ url_for('static', filename='js/progress.js') }}?v={{ js_hash }}"></script>
<script src="{{ url_for('static', filename='js/cache.js') }}?v={{ js_hash }}"></script>
{% endblock %}