Compare commits
14 Commits
88a0574e86
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
83c3564bd1 | ||
|
|
45fc1c6d55 | ||
|
|
ab06cbc0b7 | ||
|
|
013a73e02d | ||
|
|
721ad44960 | ||
|
|
b3a16303d2 | ||
|
|
0d9dcecda7 | ||
|
|
49222517e5 | ||
|
|
a02ceaad64 | ||
|
|
1d8071966b | ||
|
|
9ccb1651b6 | ||
|
|
98acbc0119 | ||
|
|
166b55a632 | ||
|
|
a789289330 |
15
.env.example
15
.env.example
@@ -35,9 +35,6 @@ SCAN_TIME=02:00
|
|||||||
SCAN_ON_STARTUP=true
|
SCAN_ON_STARTUP=true
|
||||||
CACHE_MAX_AGE_HOURS=168
|
CACHE_MAX_AGE_HOURS=168
|
||||||
|
|
||||||
# Parallel scanning
|
|
||||||
PARALLEL_WORKERS=8 # 0=auto
|
|
||||||
|
|
||||||
# Redis
|
# Redis
|
||||||
REDIS_HOST=redis
|
REDIS_HOST=redis
|
||||||
REDIS_PORT=6379
|
REDIS_PORT=6379
|
||||||
@@ -50,3 +47,15 @@ REDIS_CACHE_TTL=86400
|
|||||||
PRECACHE_INTERVAL_HOURS=168
|
PRECACHE_INTERVAL_HOURS=168
|
||||||
PRECACHE_CHECK_INTERVAL=3600
|
PRECACHE_CHECK_INTERVAL=3600
|
||||||
PRECACHE_MIN_TTL_HOURS=7
|
PRECACHE_MIN_TTL_HOURS=7
|
||||||
|
|
||||||
|
# MaxMind chunking
|
||||||
|
MAXMIND_CHUNK_TASKS_PER_WORKER=16
|
||||||
|
MAXMIND_CHUNK_MIN=200
|
||||||
|
MAXMIND_CHUNK_MAX=4000
|
||||||
|
|
||||||
|
# cap MaxMind workers per-country
|
||||||
|
MAXMIND_WORKERS_MAX=48
|
||||||
|
MAXMIND_WORKERS_MIN=6
|
||||||
|
|
||||||
|
# Parallel scanning
|
||||||
|
PARALLEL_WORKERS=8 # 0=auto
|
||||||
115
api.py
115
api.py
@@ -139,7 +139,7 @@ def get_countries():
|
|||||||
'countries': config.COMMON_COUNTRIES
|
'countries': config.COMMON_COUNTRIES
|
||||||
})
|
})
|
||||||
|
|
||||||
@api_blueprint.route('/api/cache/status', methods=['GET'])
|
@api_blueprint.route('/api/cache/redis/status', methods=['GET'])
|
||||||
def cache_status():
|
def cache_status():
|
||||||
if not redis_cache:
|
if not redis_cache:
|
||||||
return jsonify({
|
return jsonify({
|
||||||
@@ -148,19 +148,35 @@ def cache_status():
|
|||||||
'message': 'Redis cache is not enabled'
|
'message': 'Redis cache is not enabled'
|
||||||
})
|
})
|
||||||
|
|
||||||
|
health = None
|
||||||
try:
|
try:
|
||||||
health = redis_cache.health_check()
|
health = redis_cache.health_check()
|
||||||
|
except Exception as health_error:
|
||||||
|
print(f"[REDIS] Health check failed: {health_error}", flush=True)
|
||||||
|
health = {'connected': False, 'status': 'disconnected', 'error': str(health_error)}
|
||||||
|
|
||||||
country_keys_count = 0
|
country_keys_count = 0
|
||||||
config_keys_count = 0
|
config_keys_count = 0
|
||||||
total_size_bytes = 0
|
total_size_bytes = 0
|
||||||
|
|
||||||
|
patterns = [
|
||||||
|
("geoban:country:*", "country"),
|
||||||
|
("geoip:config:*", "config"),
|
||||||
|
("geoban:config:*", "config")
|
||||||
|
]
|
||||||
|
|
||||||
|
for pattern, key_type in patterns:
|
||||||
try:
|
try:
|
||||||
pattern_country = "geoban:country:*"
|
|
||||||
cursor = 0
|
cursor = 0
|
||||||
while True:
|
while True:
|
||||||
cursor, keys = redis_cache.redis_client.scan(cursor, match=pattern_country, count=1000)
|
cursor, keys = redis_cache.redis_client.scan(cursor, match=pattern, count=1000)
|
||||||
country_keys_count += len(keys)
|
|
||||||
|
key_count = len(keys)
|
||||||
|
if key_type == "country":
|
||||||
|
country_keys_count += key_count
|
||||||
|
else:
|
||||||
|
config_keys_count += key_count
|
||||||
|
|
||||||
for key in keys:
|
for key in keys:
|
||||||
try:
|
try:
|
||||||
size = redis_cache.redis_client.memory_usage(key)
|
size = redis_cache.redis_client.memory_usage(key)
|
||||||
@@ -168,43 +184,12 @@ def cache_status():
|
|||||||
total_size_bytes += size
|
total_size_bytes += size
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if cursor == 0:
|
if cursor == 0:
|
||||||
break
|
break
|
||||||
|
except Exception as pattern_error:
|
||||||
pattern_config = "geoip:config:*"
|
print(f"[REDIS] Pattern '{pattern}' failed: {pattern_error}", flush=True)
|
||||||
cursor = 0
|
continue
|
||||||
while True:
|
|
||||||
cursor, keys = redis_cache.redis_client.scan(cursor, match=pattern_config, count=1000)
|
|
||||||
config_keys_count += len(keys)
|
|
||||||
for key in keys:
|
|
||||||
try:
|
|
||||||
size = redis_cache.redis_client.memory_usage(key)
|
|
||||||
if size:
|
|
||||||
total_size_bytes += size
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
if cursor == 0:
|
|
||||||
break
|
|
||||||
|
|
||||||
pattern_config_new = "geoban:config:*"
|
|
||||||
cursor = 0
|
|
||||||
while True:
|
|
||||||
cursor, keys = redis_cache.redis_client.scan(cursor, match=pattern_config_new, count=1000)
|
|
||||||
config_keys_count += len(keys)
|
|
||||||
for key in keys:
|
|
||||||
try:
|
|
||||||
size = redis_cache.redis_client.memory_usage(key)
|
|
||||||
if size:
|
|
||||||
total_size_bytes += size
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
if cursor == 0:
|
|
||||||
break
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"[REDIS] Error counting keys: {e}", flush=True)
|
|
||||||
import traceback
|
|
||||||
traceback.print_exc()
|
|
||||||
|
|
||||||
return jsonify({
|
return jsonify({
|
||||||
'success': True,
|
'success': True,
|
||||||
@@ -215,18 +200,10 @@ def cache_status():
|
|||||||
'config_keys': config_keys_count,
|
'config_keys': config_keys_count,
|
||||||
'total_keys': country_keys_count + config_keys_count,
|
'total_keys': country_keys_count + config_keys_count,
|
||||||
'total_size_mb': round(total_size_bytes / 1024 / 1024, 2),
|
'total_size_mb': round(total_size_bytes / 1024 / 1024, 2),
|
||||||
'memory_used_mb': health.get('memory_used_mb', 0),
|
'memory_used_mb': health.get('memory_used_mb', 0) if isinstance(health, dict) else 0,
|
||||||
'total_keys_in_db': health.get('keys', 0)
|
'total_keys_in_db': health.get('keys', 0) if isinstance(health, dict) else 0
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
except Exception as e:
|
|
||||||
import traceback
|
|
||||||
traceback.print_exc()
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'enabled': True,
|
|
||||||
'error': str(e)
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
@api_blueprint.route('/api/cache/flush', methods=['POST'])
|
@api_blueprint.route('/api/cache/flush', methods=['POST'])
|
||||||
def cache_flush():
|
def cache_flush():
|
||||||
@@ -462,30 +439,17 @@ def generate_raw_cidr():
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
if handler.needs_update():
|
update_progress('Loading networks...', 10, 100)
|
||||||
handler.check_and_update()
|
|
||||||
|
|
||||||
update_progress(f'Loading data for {len(countries)} countries...', 0, 100)
|
|
||||||
|
|
||||||
country_networks = {}
|
country_networks = {}
|
||||||
cache_sources = {}
|
cache_sources = {}
|
||||||
total_countries = len(countries)
|
|
||||||
|
|
||||||
for idx, country in enumerate(countries, 1):
|
for i, country in enumerate(countries):
|
||||||
base_progress = int((idx - 1) / total_countries * 80)
|
update_progress(f'Processing {country}...', 10 + (i * 60 // max(1, len(countries))), 100)
|
||||||
update_progress(f'[{idx}/{total_countries}] Loading {country}...', base_progress, 100)
|
nets, source = get_country_networks_cached(country, use_cache=use_cache)
|
||||||
|
if nets:
|
||||||
networks, source = get_country_networks_cached(country, use_cache=use_cache)
|
country_networks[country] = nets
|
||||||
|
|
||||||
if networks:
|
|
||||||
country_networks[country] = networks
|
|
||||||
cache_sources[country] = source
|
cache_sources[country] = source
|
||||||
next_progress = int(idx / total_countries * 80)
|
|
||||||
update_progress(
|
|
||||||
f'[{idx}/{total_countries}] {country}: {len(networks):,} networks ({source})',
|
|
||||||
next_progress,
|
|
||||||
100
|
|
||||||
)
|
|
||||||
|
|
||||||
if not country_networks:
|
if not country_networks:
|
||||||
clear_progress()
|
clear_progress()
|
||||||
@@ -493,12 +457,7 @@ def generate_raw_cidr():
|
|||||||
|
|
||||||
update_progress('Generating file...', 85, 100)
|
update_progress('Generating file...', 85, 100)
|
||||||
|
|
||||||
if 'txt' in app_type or 'cidr' in app_type or 'newline' in app_type:
|
if 'json' in app_type:
|
||||||
config_text = ConfigGenerator.generate_raw_cidr(country_networks, aggregate=aggregate, redis_ips=None)
|
|
||||||
filename = f"blocklist_{'_'.join(sorted(countries))}.txt"
|
|
||||||
mimetype = 'text/plain'
|
|
||||||
|
|
||||||
elif 'json' in app_type:
|
|
||||||
all_networks = []
|
all_networks = []
|
||||||
for nets in country_networks.values():
|
for nets in country_networks.values():
|
||||||
all_networks.extend(nets)
|
all_networks.extend(nets)
|
||||||
@@ -530,8 +489,10 @@ def generate_raw_cidr():
|
|||||||
mimetype = 'text/csv'
|
mimetype = 'text/csv'
|
||||||
|
|
||||||
else:
|
else:
|
||||||
clear_progress()
|
# TXT / CIDR / newline (default)
|
||||||
return jsonify({'success': False, 'error': f'Unknown format: {app_type}'}), 400
|
config_text = ConfigGenerator.generate_raw_cidr(country_networks, aggregate=aggregate, redis_ips=None)
|
||||||
|
filename = f"blocklist_{'_'.join(sorted(countries))}.txt"
|
||||||
|
mimetype = 'text/plain'
|
||||||
|
|
||||||
total_networks = sum(len(nets) for nets in country_networks.values())
|
total_networks = sum(len(nets) for nets in country_networks.values())
|
||||||
stats = {
|
stats = {
|
||||||
@@ -700,7 +661,7 @@ def generate_config():
|
|||||||
clear_progress()
|
clear_progress()
|
||||||
return jsonify({'success': False, 'error': str(e)}), 500
|
return jsonify({'success': False, 'error': str(e)}), 500
|
||||||
|
|
||||||
@api_blueprint.route('/api/database/sqlite/status', methods=['GET'])
|
@api_blueprint.route('/api/cache/sqlite/status', methods=['GET'])
|
||||||
def sqlite_status():
|
def sqlite_status():
|
||||||
"""Get SQLite cache database statistics"""
|
"""Get SQLite cache database statistics"""
|
||||||
db_path = config.GEOIP_DB_DIR / 'networks_cache.db'
|
db_path = config.GEOIP_DB_DIR / 'networks_cache.db'
|
||||||
|
|||||||
103
app.py
103
app.py
@@ -3,16 +3,16 @@ GeoIP Ban Generator - Web Application
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
from flask import Flask, render_template, request, Response, jsonify
|
from flask import Flask, render_template, request, Response, jsonify
|
||||||
import hashlib
|
from geoip_handler import GeoIPHandler
|
||||||
import os
|
from flask import jsonify, render_template, request
|
||||||
import sqlite3
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
import config
|
|
||||||
from api import api
|
from api import api
|
||||||
from geoip_handler import GeoIPHandler
|
import hashlib
|
||||||
|
import os
|
||||||
|
import sqlite3
|
||||||
|
import config
|
||||||
|
|
||||||
app = Flask(__name__,
|
app = Flask(__name__,
|
||||||
static_folder=str(config.STATIC_DIR),
|
static_folder=str(config.STATIC_DIR),
|
||||||
@@ -23,6 +23,18 @@ app.register_blueprint(api)
|
|||||||
|
|
||||||
handler = GeoIPHandler()
|
handler = GeoIPHandler()
|
||||||
|
|
||||||
|
CACHEABLE_PAGES = {
|
||||||
|
"/api-docs",
|
||||||
|
"/generator",
|
||||||
|
}
|
||||||
|
|
||||||
|
NO_CACHE_PREFIXES = (
|
||||||
|
"/api/",
|
||||||
|
)
|
||||||
|
|
||||||
|
STATIC_PREFIX = "/static/"
|
||||||
|
|
||||||
|
|
||||||
redis_cache = None
|
redis_cache = None
|
||||||
if config.REDIS_ENABLED:
|
if config.REDIS_ENABLED:
|
||||||
try:
|
try:
|
||||||
@@ -105,6 +117,54 @@ def inject_globals():
|
|||||||
'redis_connected': redis_cache.health_check()['connected'] if redis_cache else False,
|
'redis_connected': redis_cache.health_check()['connected'] if redis_cache else False,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def _wants_json():
|
||||||
|
if request.path.startswith("/api/"):
|
||||||
|
return True
|
||||||
|
accept = (request.headers.get("Accept") or "").lower()
|
||||||
|
return "application/json" in accept
|
||||||
|
|
||||||
|
def _render_4xx(code, title, message):
|
||||||
|
payload = {
|
||||||
|
"success": False,
|
||||||
|
"error": title,
|
||||||
|
"message": message,
|
||||||
|
"path": request.path,
|
||||||
|
"status": code,
|
||||||
|
}
|
||||||
|
|
||||||
|
if _wants_json():
|
||||||
|
return jsonify(payload), code
|
||||||
|
|
||||||
|
return render_template(
|
||||||
|
"error.html",
|
||||||
|
status=code,
|
||||||
|
title=title,
|
||||||
|
message=message,
|
||||||
|
path=request.path
|
||||||
|
), code
|
||||||
|
|
||||||
|
@app.errorhandler(400)
|
||||||
|
def bad_request(e):
|
||||||
|
return _render_4xx(400, "Bad Request", "Request is invalid or missing required fields.")
|
||||||
|
|
||||||
|
@app.errorhandler(401)
|
||||||
|
def unauthorized(e):
|
||||||
|
return _render_4xx(401, "Unauthorized", "Authentication is required for this resource.")
|
||||||
|
|
||||||
|
@app.errorhandler(403)
|
||||||
|
def forbidden(e):
|
||||||
|
return _render_4xx(403, "Forbidden", "You don't have permission to access this resource.")
|
||||||
|
|
||||||
|
@app.errorhandler(404)
|
||||||
|
def not_found(e):
|
||||||
|
return _render_4xx(404, "Not Found", "The requested endpoint/page does not exist.")
|
||||||
|
|
||||||
|
@app.errorhandler(405)
|
||||||
|
def method_not_allowed(e):
|
||||||
|
return _render_4xx(405, "Method Not Allowed", "The HTTP method is not allowed for this endpoint.")
|
||||||
|
|
||||||
@app.route('/')
|
@app.route('/')
|
||||||
def index():
|
def index():
|
||||||
"""Main page"""
|
"""Main page"""
|
||||||
@@ -118,6 +178,11 @@ def api_docs():
|
|||||||
"""API documentation page"""
|
"""API documentation page"""
|
||||||
return render_template('api.html')
|
return render_template('api.html')
|
||||||
|
|
||||||
|
@app.route("/generator")
|
||||||
|
def generator():
|
||||||
|
"""Script gwnerator"""
|
||||||
|
return render_template("generator.html")
|
||||||
|
|
||||||
@app.route('/favicon.ico')
|
@app.route('/favicon.ico')
|
||||||
def favicon():
|
def favicon():
|
||||||
return '', 204
|
return '', 204
|
||||||
@@ -218,19 +283,27 @@ def cache_control(max_age: int = None):
|
|||||||
def add_headers(response):
|
def add_headers(response):
|
||||||
"""Add cache control headers based on request path"""
|
"""Add cache control headers based on request path"""
|
||||||
|
|
||||||
if request.path == '/' or request.path.startswith('/api/'):
|
path = request.path
|
||||||
response.headers['Cache-Control'] = 'no-cache, no-store'
|
if path.startswith(STATIC_PREFIX):
|
||||||
|
if "Content-Disposition" in response.headers:
|
||||||
|
del response.headers["Content-Disposition"]
|
||||||
|
|
||||||
elif request.path.startswith('/static/'):
|
|
||||||
if 'Content-Disposition' in response.headers:
|
|
||||||
del response.headers['Content-Disposition']
|
|
||||||
if config.ENABLE_CACHE_BUSTING:
|
if config.ENABLE_CACHE_BUSTING:
|
||||||
response.headers['Cache-Control'] = f'public, max-age={config.CACHE_TTL_SECONDS}, immutable'
|
response.headers["Cache-Control"] = (
|
||||||
|
f"public, max-age={config.CACHE_TTL_SECONDS}, immutable"
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
response.headers['Cache-Control'] = f'public, max-age={config.CACHE_TTL_SECONDS}'
|
response.headers["Cache-Control"] = f"public, max-age={config.CACHE_TTL_SECONDS}"
|
||||||
|
|
||||||
elif request.path == '/api-docs':
|
return response
|
||||||
response.headers['Cache-Control'] = 'public, max-age=300'
|
|
||||||
|
if path in CACHEABLE_PAGES:
|
||||||
|
response.headers["Cache-Control"] = "public, max-age=300"
|
||||||
|
return response
|
||||||
|
|
||||||
|
if path == "/" or any(path.startswith(p) for p in NO_CACHE_PREFIXES):
|
||||||
|
response.headers["Cache-Control"] = "no-cache, no-store"
|
||||||
|
return response
|
||||||
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|||||||
72
config.py
72
config.py
@@ -189,6 +189,8 @@ COMMON_COUNTRIES = [
|
|||||||
{'code': 'EC', 'name': 'Ecuador'},
|
{'code': 'EC', 'name': 'Ecuador'},
|
||||||
{'code': 'BO', 'name': 'Bolivia'},
|
{'code': 'BO', 'name': 'Bolivia'},
|
||||||
{'code': 'PY', 'name': 'Paraguay'},
|
{'code': 'PY', 'name': 'Paraguay'},
|
||||||
|
{'code': 'NI', 'name': 'Nicaragua'},
|
||||||
|
|
||||||
|
|
||||||
# Caribbean
|
# Caribbean
|
||||||
{'code': 'CU', 'name': 'Cuba'},
|
{'code': 'CU', 'name': 'Cuba'},
|
||||||
@@ -196,6 +198,7 @@ COMMON_COUNTRIES = [
|
|||||||
{'code': 'DO', 'name': 'Dominican Republic'},
|
{'code': 'DO', 'name': 'Dominican Republic'},
|
||||||
{'code': 'JM', 'name': 'Jamaica'},
|
{'code': 'JM', 'name': 'Jamaica'},
|
||||||
{'code': 'TT', 'name': 'Trinidad and Tobago'},
|
{'code': 'TT', 'name': 'Trinidad and Tobago'},
|
||||||
|
{'code': 'UY', 'name': 'Uruguay'},
|
||||||
|
|
||||||
# Other high-risk
|
# Other high-risk
|
||||||
{'code': 'KP', 'name': 'North Korea'},
|
{'code': 'KP', 'name': 'North Korea'},
|
||||||
@@ -223,6 +226,60 @@ COMMON_COUNTRIES = [
|
|||||||
{'code': 'NZ', 'name': 'New Zealand'},
|
{'code': 'NZ', 'name': 'New Zealand'},
|
||||||
{'code': 'JP', 'name': 'Japan'},
|
{'code': 'JP', 'name': 'Japan'},
|
||||||
{'code': 'KR', 'name': 'South Korea'},
|
{'code': 'KR', 'name': 'South Korea'},
|
||||||
|
|
||||||
|
# Asia
|
||||||
|
{'code': 'HK', 'name': 'Hong Kong'},
|
||||||
|
{'code': 'TW', 'name': 'Taiwan'},
|
||||||
|
{'code': 'MN', 'name': 'Mongolia'},
|
||||||
|
|
||||||
|
# Baltic states
|
||||||
|
{'code': 'LT', 'name': 'Lithuania'},
|
||||||
|
{'code': 'LV', 'name': 'Latvia'},
|
||||||
|
{'code': 'EE', 'name': 'Estonia'},
|
||||||
|
|
||||||
|
|
||||||
|
# Europe
|
||||||
|
{'code': 'CZ', 'name': 'Czechia'},
|
||||||
|
{'code': 'SK', 'name': 'Slovakia'},
|
||||||
|
{'code': 'HU', 'name': 'Hungary'},
|
||||||
|
{'code': 'GR', 'name': 'Greece'},
|
||||||
|
{'code': 'IS', 'name': 'Iceland'},
|
||||||
|
{'code': 'CY', 'name': 'Cyprus'},
|
||||||
|
{'code': 'MT', 'name': 'Malta'},
|
||||||
|
{'code': 'LU', 'name': 'Luxembourg'},
|
||||||
|
{'code': 'IE', 'name': 'Ireland'},
|
||||||
|
{'code': 'PT', 'name': 'Portugal'},
|
||||||
|
{'code': 'SI', 'name': 'Slovenia'},
|
||||||
|
{'code': 'HR', 'name': 'Croatia'},
|
||||||
|
|
||||||
|
# Middle East
|
||||||
|
{'code': 'IL', 'name': 'Israel'},
|
||||||
|
{'code': 'PS', 'name': 'Palestine'},
|
||||||
|
|
||||||
|
# Latin America
|
||||||
|
{'code': 'CR', 'name': 'Costa Rica'},
|
||||||
|
{'code': 'PA', 'name': 'Panama'},
|
||||||
|
|
||||||
|
{'code': 'BM', 'name': 'Bermuda'},
|
||||||
|
{'code': 'LC', 'name': 'Saint Lucia'},
|
||||||
|
{'code': 'MU', 'name': 'Mauritius'},
|
||||||
|
{'code': 'GD', 'name': 'Grenada'},
|
||||||
|
{'code': 'BN', 'name': 'Brunei Darussalam'},
|
||||||
|
{'code': 'RW', 'name': 'Rwanda'},
|
||||||
|
{'code': 'MG', 'name': 'Madagascar'},
|
||||||
|
{'code': 'BZ', 'name': 'Belize'},
|
||||||
|
{'code': 'MR', 'name': 'Mauritania'},
|
||||||
|
{'code': 'BS', 'name': 'Bahamas'},
|
||||||
|
{'code': 'SR', 'name': 'Suriname'},
|
||||||
|
{'code': 'GY', 'name': 'Guyana'},
|
||||||
|
{'code': 'PR', 'name': 'Puerto Rico'},
|
||||||
|
{'code': 'GT', 'name': 'Guatemala'},
|
||||||
|
{'code': 'SV', 'name': 'El Salvador'},
|
||||||
|
{'code': 'BB', 'name': 'Barbados'},
|
||||||
|
{'code': 'HN', 'name': 'Honduras'},
|
||||||
|
{'code': 'SC', 'name': 'Seychelles'},
|
||||||
|
|
||||||
|
|
||||||
]
|
]
|
||||||
|
|
||||||
# Sort countries by name
|
# Sort countries by name
|
||||||
@@ -234,10 +291,17 @@ for country in COMMON_COUNTRIES:
|
|||||||
|
|
||||||
PRECACHE_APP_TYPES = [
|
PRECACHE_APP_TYPES = [
|
||||||
'nginx_geo',
|
'nginx_geo',
|
||||||
|
'nginx_map',
|
||||||
'nginx_deny',
|
'nginx_deny',
|
||||||
'apache_24',
|
'apache_24',
|
||||||
|
'apache_22',
|
||||||
'haproxy_acl',
|
'haproxy_acl',
|
||||||
|
'haproxy_lua',
|
||||||
|
'haproxy_map',
|
||||||
'raw-cidr_txt',
|
'raw-cidr_txt',
|
||||||
|
'raw-newline_txt',
|
||||||
|
'raw-json',
|
||||||
|
'raw-csv',
|
||||||
]
|
]
|
||||||
|
|
||||||
PRECACHE_AGGREGATE_VARIANTS = [True]
|
PRECACHE_AGGREGATE_VARIANTS = [True]
|
||||||
@@ -257,3 +321,11 @@ CACHE_MAX_AGE_HOURS = 168
|
|||||||
PRECACHE_INTERVAL_HOURS = int(os.getenv('PRECACHE_INTERVAL_HOURS', 168))
|
PRECACHE_INTERVAL_HOURS = int(os.getenv('PRECACHE_INTERVAL_HOURS', 168))
|
||||||
PRECACHE_CHECK_INTERVAL = int(os.getenv('PRECACHE_CHECK_INTERVAL', 3600))
|
PRECACHE_CHECK_INTERVAL = int(os.getenv('PRECACHE_CHECK_INTERVAL', 3600))
|
||||||
PRECACHE_MIN_TTL_HOURS = int(os.getenv('PRECACHE_MIN_TTL_HOURS', 7))
|
PRECACHE_MIN_TTL_HOURS = int(os.getenv('PRECACHE_MIN_TTL_HOURS', 7))
|
||||||
|
|
||||||
|
# MaxMind scan chunking
|
||||||
|
MAXMIND_CHUNK_TASKS_PER_WORKER = int(os.getenv('MAXMIND_CHUNK_TASKS_PER_WORKER', '16'))
|
||||||
|
MAXMIND_CHUNK_MIN = int(os.getenv('MAXMIND_CHUNK_MIN', '200'))
|
||||||
|
MAXMIND_CHUNK_MAX = int(os.getenv('MAXMIND_CHUNK_MAX', '4000'))
|
||||||
|
|
||||||
|
MAXMIND_WORKERS_MIN = int(os.getenv('MAXMIND_WORKERS_MIN', '6'))
|
||||||
|
MAXMIND_WORKERS_MAX = int(os.getenv('MAXMIND_WORKERS_MAX', '48'))
|
||||||
|
|||||||
145
geoip_handler.py
145
geoip_handler.py
@@ -13,7 +13,8 @@ from concurrent.futures import ThreadPoolExecutor, as_completed
|
|||||||
import threading
|
import threading
|
||||||
import config
|
import config
|
||||||
import ipaddress
|
import ipaddress
|
||||||
|
import math
|
||||||
|
from multiprocessing import cpu_count
|
||||||
|
|
||||||
def generate_metadata(countries: list, country_data: dict, redis_stats: dict = None, handler: 'GeoIPHandler' = None) -> dict:
|
def generate_metadata(countries: list, country_data: dict, redis_stats: dict = None, handler: 'GeoIPHandler' = None) -> dict:
|
||||||
"""
|
"""
|
||||||
@@ -568,70 +569,100 @@ class GeoIPHandler:
|
|||||||
|
|
||||||
return scan_ranges
|
return scan_ranges
|
||||||
|
|
||||||
def _scan_maxmind_for_country(self, country_code: str, progress_callback=None) -> list:
|
def _scan_maxmind_for_country(self, country_code: str, progress_callback=None, workers=None) -> list:
|
||||||
if not self.mmdb_file.exists():
|
if not self.mmdb_file.exists():
|
||||||
return []
|
return []
|
||||||
|
|
||||||
country_code = country_code.upper()
|
country_code = country_code.upper()
|
||||||
found_networks = set()
|
|
||||||
found_networks_lock = threading.Lock()
|
|
||||||
|
|
||||||
try:
|
|
||||||
if progress_callback:
|
|
||||||
progress_callback(f"Starting parallel MaxMind scan with 32 workers...")
|
|
||||||
|
|
||||||
scan_ranges = self._get_scan_ranges()
|
scan_ranges = self._get_scan_ranges()
|
||||||
total_ranges = len(scan_ranges)
|
total_ranges = len(scan_ranges)
|
||||||
|
|
||||||
|
# workers default
|
||||||
|
if workers is None or int(workers) <= 0:
|
||||||
|
workers = min(32, max(4, cpu_count() * 2))
|
||||||
|
else:
|
||||||
|
workers = int(workers)
|
||||||
|
|
||||||
|
tasks_per_worker = getattr(config, "MAXMIND_CHUNK_TASKS_PER_WORKER", 12)
|
||||||
|
chunk_min = getattr(config, "MAXMIND_CHUNK_MIN", 50)
|
||||||
|
chunk_max = getattr(config, "MAXMIND_CHUNK_MAX", 2000)
|
||||||
|
|
||||||
|
target_tasks = max(workers * int(tasks_per_worker), workers)
|
||||||
|
chunk = int(math.ceil(total_ranges / float(target_tasks)))
|
||||||
|
CHUNK = max(int(chunk_min), min(int(chunk_max), chunk))
|
||||||
|
|
||||||
if progress_callback:
|
if progress_callback:
|
||||||
|
progress_callback(f"Starting parallel MaxMind scan with {workers} workers...")
|
||||||
progress_callback(f"Scanning {total_ranges} IP ranges...")
|
progress_callback(f"Scanning {total_ranges} IP ranges...")
|
||||||
|
progress_callback(f"Chunking: {CHUNK} ranges/task (~{int(math.ceil(total_ranges/float(CHUNK)))} tasks)")
|
||||||
|
|
||||||
|
found_networks = set()
|
||||||
|
found_networks_lock = threading.Lock()
|
||||||
|
|
||||||
completed = 0
|
completed = 0
|
||||||
completed_lock = threading.Lock()
|
completed_lock = threading.Lock()
|
||||||
|
|
||||||
def scan_range(network_str):
|
tls = threading.local()
|
||||||
nonlocal completed
|
|
||||||
|
|
||||||
reader = geoip2.database.Reader(str(self.mmdb_file))
|
def get_reader():
|
||||||
local_networks = set()
|
r = getattr(tls, "reader", None)
|
||||||
|
if r is None:
|
||||||
|
tls.reader = geoip2.database.Reader(str(self.mmdb_file))
|
||||||
|
return tls.reader
|
||||||
|
|
||||||
|
def scan_one_range(reader, network_str: str):
|
||||||
|
local = set()
|
||||||
try:
|
try:
|
||||||
network = ipaddress.IPv4Network(network_str, strict=False)
|
network = ipaddress.IPv4Network(network_str, strict=False)
|
||||||
|
|
||||||
for subnet in network.subnets(new_prefix=24):
|
for subnet in network.subnets(new_prefix=24):
|
||||||
sample_ip = str(subnet.network_address + 1)
|
sample_ip = str(subnet.network_address + 1)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = reader.country(sample_ip)
|
resp = reader.country(sample_ip)
|
||||||
if response.country.iso_code == country_code:
|
if resp.country.iso_code == country_code:
|
||||||
local_networks.add(str(subnet))
|
local.add(subnet) # mniej alokacji niż str() w pętli
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return local
|
||||||
|
|
||||||
except Exception as e:
|
def scan_chunk(ranges):
|
||||||
pass
|
nonlocal completed
|
||||||
finally:
|
reader = get_reader()
|
||||||
reader.close()
|
local_chunk = set()
|
||||||
|
|
||||||
|
for r in ranges:
|
||||||
|
local_chunk.update(scan_one_range(reader, r))
|
||||||
|
|
||||||
with completed_lock:
|
with completed_lock:
|
||||||
completed += 1
|
completed += 1
|
||||||
if completed % 2000 == 0 and progress_callback:
|
c = completed
|
||||||
|
|
||||||
|
# progres częściej (diagnostyka), nie wpływa na wynik
|
||||||
|
if progress_callback and (c % 500 == 0 or c == total_ranges):
|
||||||
with found_networks_lock:
|
with found_networks_lock:
|
||||||
progress_pct = (completed / total_ranges) * 100
|
found_cnt = len(found_networks)
|
||||||
progress_callback(f"Scanning: {completed}/{total_ranges} ranges ({progress_pct:.1f}%), found {len(found_networks)} networks")
|
pct = (c / float(total_ranges)) * 100.0
|
||||||
|
progress_callback(
|
||||||
|
f"Scanning: {c}/{total_ranges} ranges ({pct:.1f}%), found {found_cnt} networks"
|
||||||
|
)
|
||||||
|
|
||||||
return local_networks
|
return local_chunk
|
||||||
|
|
||||||
with ThreadPoolExecutor(max_workers=32) as executor:
|
try:
|
||||||
futures = {executor.submit(scan_range, r): r for r in scan_ranges}
|
chunks = [scan_ranges[i:i + CHUNK] for i in range(0, total_ranges, CHUNK)]
|
||||||
|
|
||||||
|
with ThreadPoolExecutor(max_workers=workers) as executor:
|
||||||
|
futures = [executor.submit(scan_chunk, ch) for ch in chunks]
|
||||||
for future in as_completed(futures):
|
for future in as_completed(futures):
|
||||||
local_nets = future.result()
|
local_nets = future.result()
|
||||||
|
if local_nets:
|
||||||
with found_networks_lock:
|
with found_networks_lock:
|
||||||
found_networks.update(local_nets)
|
found_networks.update(local_nets)
|
||||||
|
|
||||||
result = list(found_networks)
|
# konwersja na string na końcu (wynik ten sam co wcześniej)
|
||||||
|
result = [str(n) for n in found_networks]
|
||||||
|
|
||||||
if progress_callback:
|
if progress_callback:
|
||||||
progress_callback(f"MaxMind scan complete: {len(result)} networks")
|
progress_callback(f"MaxMind scan complete: {len(result)} networks")
|
||||||
@@ -1147,21 +1178,26 @@ class ConfigGenerator:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def generate_haproxy_map(country_networks: dict, aggregate: bool = True, redis_ips: set = None) -> str:
|
def generate_haproxy_map(country_networks: dict, aggregate: bool = True, redis_ips: set = None) -> str:
|
||||||
"""
|
"""
|
||||||
Generate HAProxy MAP file (IP COUNTRY format)
|
Generate HAProxy MAP file (CIDR COUNTRY format)
|
||||||
"""
|
"""
|
||||||
|
# Get metadata
|
||||||
countries = sorted(country_networks.keys())
|
countries = sorted(country_networks.keys())
|
||||||
|
redis_stats = None
|
||||||
redisstats = None
|
|
||||||
if redis_ips:
|
if redis_ips:
|
||||||
redisstats = {"total": len(redis_ips), "unique": len(redis_ips), "deduped": 0}
|
redis_stats = {
|
||||||
|
'total': len(redis_ips),
|
||||||
|
'unique': len(redis_ips),
|
||||||
|
'deduped': 0
|
||||||
|
}
|
||||||
|
|
||||||
handler = GeoIPHandler()
|
handler = GeoIPHandler()
|
||||||
metadata = generate_metadata(countries, country_networks, redisstats, handler)
|
metadata = generate_metadata(countries, country_networks, redis_stats, handler)
|
||||||
|
|
||||||
# Aggregate networks
|
# Aggregate networks (for header stats only, same style as ACL)
|
||||||
all_networks = []
|
all_networks = []
|
||||||
for networks in country_networks.values():
|
for nets in country_networks.values():
|
||||||
all_networks.extend(networks)
|
all_networks.extend(nets)
|
||||||
|
|
||||||
if redis_ips:
|
if redis_ips:
|
||||||
all_networks.extend(redis_ips)
|
all_networks.extend(redis_ips)
|
||||||
|
|
||||||
@@ -1170,7 +1206,7 @@ class ConfigGenerator:
|
|||||||
else:
|
else:
|
||||||
all_networks = sorted(list(set(all_networks)))
|
all_networks = sorted(list(set(all_networks)))
|
||||||
|
|
||||||
# Generate header
|
# Generate header (same style as ACL)
|
||||||
config = "# " + "="*77 + "\n"
|
config = "# " + "="*77 + "\n"
|
||||||
config += "# HAProxy MAP Configuration\n"
|
config += "# HAProxy MAP Configuration\n"
|
||||||
config += f"# Generated: {metadata['timestamp']}\n"
|
config += f"# Generated: {metadata['timestamp']}\n"
|
||||||
@@ -1192,20 +1228,37 @@ class ConfigGenerator:
|
|||||||
config += f"# Database: {metadata['cache_db_path']}\n"
|
config += f"# Database: {metadata['cache_db_path']}\n"
|
||||||
config += "# \n"
|
config += "# \n"
|
||||||
config += "# Usage in HAProxy:\n"
|
config += "# Usage in HAProxy:\n"
|
||||||
config += "# acl banned_ips src -f /path/to/this_file.acl\n"
|
config += "# map_beg(/path/to/geo.map) -m ip $src var(txn.country)\n"
|
||||||
config += "# http-request deny if banned_ips\n"
|
|
||||||
config += "# \n"
|
config += "# \n"
|
||||||
config += "# " + "="*77 + "\n"
|
config += "# " + "="*77 + "\n"
|
||||||
config += "\n"
|
config += "\n"
|
||||||
|
|
||||||
# MAP BODY
|
# MAP BODY (per-country aggregation => poprawny country, brak XX, brak pustych wyników)
|
||||||
for network in all_networks:
|
for country_code, nets in sorted(country_networks.items()):
|
||||||
country = next((c for c, nets in country_networks.items() if network in nets), 'XX')
|
if not nets:
|
||||||
config += f"{network} {country}\n"
|
continue
|
||||||
|
|
||||||
|
if aggregate:
|
||||||
|
nets = ConfigGenerator._aggregate_networks(nets)
|
||||||
|
else:
|
||||||
|
nets = sorted(list(set(nets)))
|
||||||
|
|
||||||
|
for network in nets:
|
||||||
|
config += f"{network} {country_code}\n"
|
||||||
|
|
||||||
|
# Redis IPs (opcjonalnie jako osobna etykieta)
|
||||||
|
if redis_ips:
|
||||||
|
redis_list = list(redis_ips)
|
||||||
|
if aggregate:
|
||||||
|
redis_list = ConfigGenerator._aggregate_networks(redis_list)
|
||||||
|
else:
|
||||||
|
redis_list = sorted(list(set(redis_list)))
|
||||||
|
|
||||||
|
for network in redis_list:
|
||||||
|
config += f"{network} REDIS\n"
|
||||||
|
|
||||||
return config
|
return config
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def generate_haproxy_lua(country_networks: dict, aggregate: bool = True, redis_ips: set = None) -> str:
|
def generate_haproxy_lua(country_networks: dict, aggregate: bool = True, redis_ips: set = None) -> str:
|
||||||
"""Generate HAProxy Lua script with detailed metadata header"""
|
"""Generate HAProxy Lua script with detailed metadata header"""
|
||||||
|
|||||||
@@ -176,18 +176,23 @@ def process_country(country, networks_count, force=False):
|
|||||||
if app_type.startswith('raw-'):
|
if app_type.startswith('raw-'):
|
||||||
format_type = app_type.split('-')[1]
|
format_type = app_type.split('-')[1]
|
||||||
|
|
||||||
|
if aggregate:
|
||||||
|
nets_out = ConfigGenerator._aggregate_networks(networks)
|
||||||
|
else:
|
||||||
|
nets_out = sorted(set(networks))
|
||||||
|
|
||||||
if format_type == 'cidr_txt':
|
if format_type == 'cidr_txt':
|
||||||
config_text = '\n'.join(networks)
|
config_text = '\n'.join(nets_out)
|
||||||
elif format_type == 'newline_txt':
|
elif format_type == 'newline_txt':
|
||||||
config_text = '\n'.join(networks)
|
config_text = '\n'.join(nets_out)
|
||||||
elif format_type == 'json':
|
elif format_type == 'json':
|
||||||
config_text = json.dumps({
|
config_text = json.dumps({
|
||||||
'country': country,
|
'country': country,
|
||||||
'networks': networks,
|
'networks': nets_out,
|
||||||
'count': len(networks)
|
'count': len(nets_out)
|
||||||
}, indent=2)
|
}, indent=2)
|
||||||
elif format_type == 'csv':
|
elif format_type == 'csv':
|
||||||
config_text = 'network\n' + '\n'.join(networks)
|
config_text = 'network\n' + '\n'.join(nets_out)
|
||||||
else:
|
else:
|
||||||
errors += 1
|
errors += 1
|
||||||
continue
|
continue
|
||||||
|
|||||||
34
scheduler.py
34
scheduler.py
@@ -14,7 +14,7 @@ from pathlib import Path
|
|||||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||||
from multiprocessing import cpu_count
|
from multiprocessing import cpu_count
|
||||||
import threading
|
import threading
|
||||||
|
import traceback
|
||||||
|
|
||||||
sys.path.insert(0, str(Path(__file__).parent))
|
sys.path.insert(0, str(Path(__file__).parent))
|
||||||
|
|
||||||
@@ -29,6 +29,21 @@ write_lock = threading.Lock()
|
|||||||
active_scans = {}
|
active_scans = {}
|
||||||
active_scans_lock = threading.Lock()
|
active_scans_lock = threading.Lock()
|
||||||
|
|
||||||
|
def heartbeat():
|
||||||
|
log_safe(f"[{datetime.now()}] HEARTBEAT running=True next_run={schedule.next_run()} jobs={len(schedule.jobs)}")
|
||||||
|
|
||||||
|
def compute_maxmind_workers():
|
||||||
|
with active_scans_lock:
|
||||||
|
active = max(1, len(active_scans))
|
||||||
|
|
||||||
|
cpu = cpu_count()
|
||||||
|
total_budget = max(32, cpu * 6) # 16*6 = 96
|
||||||
|
per_country = max(4, total_budget // active)
|
||||||
|
|
||||||
|
min_w = int(os.getenv('MAXMIND_WORKERS_MIN', '6'))
|
||||||
|
max_w = int(os.getenv('MAXMIND_WORKERS_MAX', '48'))
|
||||||
|
|
||||||
|
return max(min_w, min(max_w, per_country))
|
||||||
|
|
||||||
def signal_handler(signum, frame):
|
def signal_handler(signum, frame):
|
||||||
global running
|
global running
|
||||||
@@ -96,7 +111,14 @@ def scan_single_country(country_code, is_update=False):
|
|||||||
|
|
||||||
print(f"[{country_code}] Scanning MaxMind + GitHub...", flush=True)
|
print(f"[{country_code}] Scanning MaxMind + GitHub...", flush=True)
|
||||||
|
|
||||||
maxmind_networks = handler._scan_maxmind_for_country(country_code, progress_callback=progress_cb)
|
maxmind_workers = compute_maxmind_workers()
|
||||||
|
print(f"[{country_code}] MaxMind workers: {maxmind_workers} (active scans: {len(active_scans)})", flush=True)
|
||||||
|
|
||||||
|
maxmind_networks = handler._scan_maxmind_for_country(
|
||||||
|
country_code,
|
||||||
|
progress_callback=progress_cb,
|
||||||
|
workers=maxmind_workers
|
||||||
|
)
|
||||||
|
|
||||||
if maxmind_networks:
|
if maxmind_networks:
|
||||||
print(f"[{country_code}] MaxMind: {len(maxmind_networks):,} networks, checking GitHub...", flush=True)
|
print(f"[{country_code}] MaxMind: {len(maxmind_networks):,} networks, checking GitHub...", flush=True)
|
||||||
@@ -386,8 +408,16 @@ if __name__ == '__main__':
|
|||||||
print("\nScheduler running. Press Ctrl+C to stop.\n", flush=True)
|
print("\nScheduler running. Press Ctrl+C to stop.\n", flush=True)
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
|
|
||||||
|
# heartbeat
|
||||||
|
schedule.every(15).minutes.do(heartbeat)
|
||||||
|
|
||||||
while running:
|
while running:
|
||||||
|
try:
|
||||||
schedule.run_pending()
|
schedule.run_pending()
|
||||||
|
except Exception as e:
|
||||||
|
log_safe(f"[{datetime.now()}] ERROR in run_pending: {e}")
|
||||||
|
traceback.print_exc()
|
||||||
|
sys.stdout.flush()
|
||||||
time.sleep(60)
|
time.sleep(60)
|
||||||
|
|
||||||
print("\n[SHUTDOWN] Stopped gracefully.", flush=True)
|
print("\n[SHUTDOWN] Stopped gracefully.", flush=True)
|
||||||
|
|||||||
57
script.py
Normal file
57
script.py
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
import requests
|
||||||
|
|
||||||
|
BASE_URL = "http://geo-block.krk.itg.demo-ht.iadm"
|
||||||
|
ENDPOINT = "/api/generate/raw"
|
||||||
|
|
||||||
|
payload_json = """{
|
||||||
|
"countries": [
|
||||||
|
"PL"
|
||||||
|
],
|
||||||
|
"aggregate": true,
|
||||||
|
"use_cache": true,
|
||||||
|
"app_type": "raw-cidr_json",
|
||||||
|
"as_js": false
|
||||||
|
}"""
|
||||||
|
payload = json.loads(payload_json)
|
||||||
|
|
||||||
|
resp = requests.post(BASE_URL + ENDPOINT, json=payload, timeout=120)
|
||||||
|
|
||||||
|
print("Status:", resp.status_code)
|
||||||
|
print("X-From-Cache:", resp.headers.get("X-From-Cache"))
|
||||||
|
print("X-Cache-Type:", resp.headers.get("X-Cache-Type"))
|
||||||
|
print("X-Generated-At:", resp.headers.get("X-Generated-At"))
|
||||||
|
|
||||||
|
ct = (resp.headers.get("Content-Type") or "").lower()
|
||||||
|
|
||||||
|
if resp.status_code >= 400:
|
||||||
|
try:
|
||||||
|
print(json.dumps(resp.json(), indent=2))
|
||||||
|
except Exception:
|
||||||
|
print(resp.text)
|
||||||
|
raise SystemExit(1)
|
||||||
|
|
||||||
|
if "application/json" in ct:
|
||||||
|
print(json.dumps(resp.json(), indent=2))
|
||||||
|
else:
|
||||||
|
filename = "output"
|
||||||
|
cd = resp.headers.get("Content-Disposition") or ""
|
||||||
|
m = re.search(r'filename="?([^"]+)"?', cd)
|
||||||
|
if m:
|
||||||
|
filename = m.group(1)
|
||||||
|
else:
|
||||||
|
if "text/csv" in ct:
|
||||||
|
filename += ".csv"
|
||||||
|
elif "javascript" in ct:
|
||||||
|
filename += ".js"
|
||||||
|
elif "text/plain" in ct:
|
||||||
|
filename += ".txt"
|
||||||
|
else:
|
||||||
|
filename += ".bin"
|
||||||
|
|
||||||
|
with open(filename, "wb") as f:
|
||||||
|
f.write(resp.content)
|
||||||
|
|
||||||
|
print("Saved to:", filename)
|
||||||
@@ -1,13 +1,13 @@
|
|||||||
|
html {
|
||||||
|
scroll-behavior: smooth;
|
||||||
|
}
|
||||||
|
|
||||||
body {
|
body {
|
||||||
background-color: #f5f5f5;
|
background-color: #f5f5f5;
|
||||||
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, sans-serif;
|
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, sans-serif;
|
||||||
padding-bottom: 2rem;
|
padding-bottom: 2rem;
|
||||||
}
|
}
|
||||||
|
|
||||||
html {
|
|
||||||
scroll-behavior: smooth;
|
|
||||||
}
|
|
||||||
|
|
||||||
.card {
|
.card {
|
||||||
border: 1px solid #e0e0e0;
|
border: 1px solid #e0e0e0;
|
||||||
border-radius: 0.5rem;
|
border-radius: 0.5rem;
|
||||||
@@ -407,6 +407,115 @@ html {
|
|||||||
color: #e06c75 !important;
|
color: #e06c75 !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.api-card pre,
|
||||||
|
.api-endpoint pre {
|
||||||
|
background-color: #282c34;
|
||||||
|
color: #abb2bf;
|
||||||
|
border-radius: 0.5rem;
|
||||||
|
border: 1px solid rgba(224, 224, 224, 0.25);
|
||||||
|
padding: 1rem 1.25rem;
|
||||||
|
overflow-x: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-card pre code,
|
||||||
|
.api-endpoint pre code {
|
||||||
|
background: transparent;
|
||||||
|
color: inherit;
|
||||||
|
padding: 0;
|
||||||
|
font-size: 0.9rem;
|
||||||
|
line-height: 1.6;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-card .api-kv code,
|
||||||
|
.api-card p code,
|
||||||
|
.api-card td code,
|
||||||
|
.api-card li code,
|
||||||
|
.api-card .alert code {
|
||||||
|
background-color: rgba(13, 110, 253, 0.08);
|
||||||
|
border: 1px solid rgba(13, 110, 253, 0.12);
|
||||||
|
color: #0b5ed7;
|
||||||
|
padding: 0.1rem 0.3rem;
|
||||||
|
border-radius: 0.35rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-trybox {
|
||||||
|
border: 1px dashed rgba(0, 0, 0, 0.18);
|
||||||
|
border-radius: 0.5rem;
|
||||||
|
padding: 1rem;
|
||||||
|
background-color: #fafafa;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-trybox pre {
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-card textarea.form-control {
|
||||||
|
font-family: 'Courier New', Consolas, Monaco, monospace;
|
||||||
|
font-size: 0.9rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.api-download-link {
|
||||||
|
display: inline-block;
|
||||||
|
margin-top: 0.5rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-dot {
|
||||||
|
width: 10px;
|
||||||
|
height: 10px;
|
||||||
|
border-radius: 50%;
|
||||||
|
display: inline-block;
|
||||||
|
transition: background-color 0.3s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-dot.bg-success {
|
||||||
|
background-color: #198754 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-dot.bg-danger {
|
||||||
|
background-color: #dc3545 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-dot.bg-secondary {
|
||||||
|
background-color: #6c757d !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
#variantDescription {
|
||||||
|
animation: fadeIn 0.3s ease-in;
|
||||||
|
}
|
||||||
|
|
||||||
|
.code-wrap {
|
||||||
|
border-radius: .75rem;
|
||||||
|
border: 1px solid rgba(0,0,0,.1);
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
.code-wrap-header {
|
||||||
|
background: #111827;
|
||||||
|
color: #e5e7eb;
|
||||||
|
padding: .5rem .75rem;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: space-between;
|
||||||
|
font-size: .9rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.code-wrap-body {
|
||||||
|
margin: 0;
|
||||||
|
background: #282c34;
|
||||||
|
color: #abb2bf;
|
||||||
|
padding: 1rem 1.25rem;
|
||||||
|
overflow-x: auto;
|
||||||
|
font-family: ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace;
|
||||||
|
font-size: .9rem;
|
||||||
|
line-height: 1.6;
|
||||||
|
}
|
||||||
|
|
||||||
|
.code-wrap-body code {
|
||||||
|
background: transparent;
|
||||||
|
color: inherit;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
@keyframes fadeIn {
|
@keyframes fadeIn {
|
||||||
from {
|
from {
|
||||||
opacity: 0;
|
opacity: 0;
|
||||||
@@ -486,11 +595,3 @@ html {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#variantDescription {
|
|
||||||
animation: fadeIn 0.3s ease-in;
|
|
||||||
}
|
|
||||||
|
|
||||||
@keyframes fadeIn {
|
|
||||||
from { opacity: 0; transform: translateY(-5px); }
|
|
||||||
to { opacity: 1; transform: translateY(0); }
|
|
||||||
}
|
|
||||||
355
static/js/api.js
355
static/js/api.js
@@ -1,75 +1,342 @@
|
|||||||
const baseUrl = window.location.origin;
|
const baseUrl = window.location.origin;
|
||||||
|
|
||||||
document.addEventListener('DOMContentLoaded', function() {
|
document.addEventListener('DOMContentLoaded', () => {
|
||||||
document.getElementById('baseUrl').textContent = baseUrl;
|
const baseEl = document.getElementById('baseUrl');
|
||||||
document.querySelectorAll('[id^="curlUrl"]').forEach(element => {
|
if (baseEl) baseEl.textContent = baseUrl;
|
||||||
element.textContent = baseUrl;
|
|
||||||
|
document.querySelectorAll('[id^="curlUrl"]').forEach((el) => {
|
||||||
|
el.textContent = baseUrl;
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
function toggleEndpoint(id) {
|
function toggleEndpoint(id) {
|
||||||
const element = document.getElementById(id);
|
const el = document.getElementById(id);
|
||||||
const bsCollapse = new bootstrap.Collapse(element, {
|
if (!el) return;
|
||||||
toggle: true
|
new bootstrap.Collapse(el, { toggle: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
function showResponse(id) {
|
||||||
|
const div = document.getElementById(id);
|
||||||
|
const body = document.getElementById(id + '-body');
|
||||||
|
if (div) div.style.display = 'block';
|
||||||
|
if (body) body.textContent = 'Loading...';
|
||||||
|
return { div, body };
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatHeaders(headers) {
|
||||||
|
const entries = [];
|
||||||
|
for (const [k, v] of headers.entries()) entries.push([k, v]);
|
||||||
|
entries.sort((a, b) => a[0].localeCompare(b[0]));
|
||||||
|
return entries.map(([k, v]) => `${k}: ${v}`).join('\n');
|
||||||
|
}
|
||||||
|
|
||||||
|
function cacheHeaderSummary(headers) {
|
||||||
|
const keys = ['x-from-cache', 'x-cache-type', 'x-generated-at', 'content-type', 'content-disposition'];
|
||||||
|
const out = [];
|
||||||
|
for (const k of keys) {
|
||||||
|
const v = headers.get(k);
|
||||||
|
if (v !== null) out.push(`${k}: ${v}`);
|
||||||
|
}
|
||||||
|
return out.join('\n');
|
||||||
|
}
|
||||||
|
|
||||||
|
async function readBodyAuto(response) {
|
||||||
|
const ct = (response.headers.get('content-type') || '').toLowerCase();
|
||||||
|
const raw = await response.text();
|
||||||
|
const isJson = ct.includes('application/json');
|
||||||
|
if (isJson) {
|
||||||
|
try {
|
||||||
|
return { kind: 'json', data: JSON.parse(raw), contentType: ct, raw };
|
||||||
|
} catch {
|
||||||
|
return { kind: 'text', data: raw, contentType: ct, raw };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return { kind: 'text', data: raw, contentType: ct, raw };
|
||||||
|
}
|
||||||
|
|
||||||
|
function safeParseJsonFromTextarea(textareaId) {
|
||||||
|
const el = document.getElementById(textareaId);
|
||||||
|
if (!el) throw new Error(`Missing textarea: ${textareaId}`);
|
||||||
|
const raw = el.value;
|
||||||
|
try {
|
||||||
|
return JSON.parse(raw);
|
||||||
|
} catch (e) {
|
||||||
|
throw new Error(`Invalid JSON in textarea "${textareaId}": ${e.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function guessExtensionFromContentType(ct) {
|
||||||
|
const s = (ct || '').toLowerCase();
|
||||||
|
if (s.includes('application/json')) return 'json';
|
||||||
|
if (s.includes('text/csv')) return 'csv';
|
||||||
|
if (s.includes('application/javascript')) return 'js';
|
||||||
|
if (s.includes('text/plain')) return 'txt';
|
||||||
|
return 'txt';
|
||||||
|
}
|
||||||
|
|
||||||
|
function firstLines(text, maxLines = 80) {
|
||||||
|
const lines = String(text || '').split(/\r?\n/);
|
||||||
|
return lines.slice(0, maxLines).join('\n');
|
||||||
|
}
|
||||||
|
|
||||||
|
function tryApi(endpoint, method = 'GET', body = null) {
|
||||||
|
const responseId = 'response-' + endpoint.replace(/\//g, '-');
|
||||||
|
const { body: out } = showResponse(responseId);
|
||||||
|
|
||||||
|
const url = baseUrl + '/api/' + endpoint;
|
||||||
|
const opts = { method, headers: {} };
|
||||||
|
|
||||||
|
if (method !== 'GET' && method !== 'HEAD') {
|
||||||
|
opts.headers['Content-Type'] = 'application/json';
|
||||||
|
opts.body = body == null ? '{}' : (typeof body === 'string' ? body : JSON.stringify(body));
|
||||||
|
}
|
||||||
|
|
||||||
|
fetch(url, opts)
|
||||||
|
.then(async (resp) => {
|
||||||
|
const parsed = await readBodyAuto(resp);
|
||||||
|
|
||||||
|
const meta = [
|
||||||
|
`HTTP ${resp.status} ${resp.statusText}`,
|
||||||
|
cacheHeaderSummary(resp.headers),
|
||||||
|
'\n--- Headers ---\n' + formatHeaders(resp.headers),
|
||||||
|
'\n--- Body ---\n'
|
||||||
|
].filter(Boolean).join('\n');
|
||||||
|
|
||||||
|
if (!resp.ok) {
|
||||||
|
const msg = parsed.kind === 'json'
|
||||||
|
? (parsed.data?.error || JSON.stringify(parsed.data, null, 2))
|
||||||
|
: String(parsed.data || '');
|
||||||
|
throw new Error(`${meta}${msg}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const pretty = parsed.kind === 'json'
|
||||||
|
? JSON.stringify(parsed.data, null, 2)
|
||||||
|
: String(parsed.data ?? '');
|
||||||
|
|
||||||
|
out.textContent = meta + pretty;
|
||||||
|
})
|
||||||
|
.catch((err) => {
|
||||||
|
out.textContent = 'Error: ' + err.message;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function tryEndpoint(endpoint, method = 'GET') {
|
function tryPath(path, method = 'GET') {
|
||||||
const url = baseUrl + '/api/' + endpoint;
|
const normalized = String(path || '').trim();
|
||||||
const responseId = 'response-' + endpoint.replace(/\//g, '-');
|
const key = normalized.replace(/[^\w-]/g, '-').replace(/-+/g, '-').replace(/^-|-$/g, '');
|
||||||
const responseDiv = document.getElementById(responseId);
|
const responseId = 'response-' + (key || 'path');
|
||||||
const responseBody = document.getElementById(responseId + '-body');
|
const { body: out } = showResponse(responseId);
|
||||||
|
|
||||||
responseDiv.style.display = 'block';
|
const url = baseUrl + normalized;
|
||||||
responseBody.textContent = 'Loading...';
|
fetch(url, { method })
|
||||||
|
.then(async (resp) => {
|
||||||
|
const parsed = await readBodyAuto(resp);
|
||||||
|
|
||||||
const options = {
|
const meta = [
|
||||||
method: method,
|
`HTTP ${resp.status} ${resp.statusText}`,
|
||||||
headers: {
|
cacheHeaderSummary(resp.headers),
|
||||||
'Content-Type': 'application/json'
|
'\n--- Headers ---\n' + formatHeaders(resp.headers),
|
||||||
|
'\n--- Body ---\n'
|
||||||
|
].filter(Boolean).join('\n');
|
||||||
|
|
||||||
|
if (!resp.ok) {
|
||||||
|
const msg = parsed.kind === 'json'
|
||||||
|
? (parsed.data?.error || JSON.stringify(parsed.data, null, 2))
|
||||||
|
: String(parsed.data || '');
|
||||||
|
throw new Error(`${meta}${msg}`);
|
||||||
}
|
}
|
||||||
};
|
|
||||||
|
|
||||||
fetch(url, options)
|
const pretty = parsed.kind === 'json'
|
||||||
.then(response => response.json())
|
? JSON.stringify(parsed.data, null, 2)
|
||||||
.then(data => {
|
: String(parsed.data ?? '');
|
||||||
responseBody.textContent = JSON.stringify(data, null, 2);
|
|
||||||
|
out.textContent = meta + pretty;
|
||||||
})
|
})
|
||||||
.catch(error => {
|
.catch((err) => {
|
||||||
responseBody.textContent = 'Error: ' + error.message;
|
out.textContent = 'Error: ' + err.message;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function tryApiJsonTextarea(endpoint, textareaId) {
|
||||||
|
try {
|
||||||
|
const body = safeParseJsonFromTextarea(textareaId);
|
||||||
|
tryApi(endpoint, 'POST', body);
|
||||||
|
} catch (e) {
|
||||||
|
const responseId = 'response-' + endpoint.replace(/\//g, '-');
|
||||||
|
const { body: out } = showResponse(responseId);
|
||||||
|
out.textContent = 'Error: ' + e.message;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
function tryInvalidateCountry() {
|
function tryInvalidateCountry() {
|
||||||
const countryInput = document.getElementById('invalidateCountry');
|
const countryInput = document.getElementById('invalidateCountry');
|
||||||
const country = countryInput.value.trim().toUpperCase();
|
const country = (countryInput?.value || '').trim().toUpperCase();
|
||||||
|
|
||||||
|
const { body: out } = showResponse('response-cache-invalidate');
|
||||||
|
|
||||||
if (!country || country.length !== 2) {
|
if (!country || country.length !== 2) {
|
||||||
alert('Please enter a valid 2-letter country code (e.g., CN, RU, US)');
|
out.textContent = 'Error: Please enter a valid 2-letter country code (e.g., CN, RU, US).';
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const url = baseUrl + '/api/cache/invalidate/' + country;
|
fetch(baseUrl + '/api/cache/invalidate/' + country, {
|
||||||
const responseDiv = document.getElementById('response-cache-invalidate');
|
|
||||||
const responseBody = document.getElementById('response-cache-invalidate-body');
|
|
||||||
|
|
||||||
responseDiv.style.display = 'block';
|
|
||||||
responseBody.textContent = 'Loading...';
|
|
||||||
|
|
||||||
fetch(url, {
|
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: { 'Content-Type': 'application/json' },
|
||||||
'Content-Type': 'application/json'
|
body: '{}'
|
||||||
}
|
|
||||||
})
|
})
|
||||||
.then(response => response.json())
|
.then(async (resp) => {
|
||||||
.then(data => {
|
const parsed = await readBodyAuto(resp);
|
||||||
responseBody.textContent = JSON.stringify(data, null, 2);
|
if (!resp.ok) {
|
||||||
if (data.success) {
|
const msg = parsed.kind === 'json'
|
||||||
|
? (parsed.data?.error || JSON.stringify(parsed.data, null, 2))
|
||||||
|
: String(parsed.data || '');
|
||||||
|
throw new Error(msg || `HTTP ${resp.status}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
out.textContent = (parsed.kind === 'json')
|
||||||
|
? JSON.stringify(parsed.data, null, 2)
|
||||||
|
: String(parsed.data ?? '');
|
||||||
|
|
||||||
|
if (parsed.kind === 'json' && parsed.data?.success && countryInput) {
|
||||||
countryInput.value = '';
|
countryInput.value = '';
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.catch(error => {
|
.catch((err) => {
|
||||||
responseBody.textContent = 'Error: ' + error.message;
|
out.textContent = 'Error: ' + err.message;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async function fetchAsBlob(url, method, jsonBody) {
|
||||||
|
const resp = await fetch(url, {
|
||||||
|
method,
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify(jsonBody ?? {})
|
||||||
|
});
|
||||||
|
|
||||||
|
const headersText = formatHeaders(resp.headers);
|
||||||
|
const cacheSummary = cacheHeaderSummary(resp.headers);
|
||||||
|
const ct = resp.headers.get('content-type') || '';
|
||||||
|
const cd = resp.headers.get('content-disposition') || '';
|
||||||
|
|
||||||
|
const blob = await resp.blob();
|
||||||
|
|
||||||
|
if (!resp.ok) {
|
||||||
|
let errText = '';
|
||||||
|
try { errText = await blob.text(); } catch { errText = ''; }
|
||||||
|
throw new Error(
|
||||||
|
`HTTP ${resp.status} ${resp.statusText}\n${cacheSummary}\n\n--- Headers ---\n${headersText}\n\n--- Body ---\n${errText}`.trim()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return { resp, blob, headersText, cacheSummary, contentType: ct, contentDisposition: cd };
|
||||||
|
}
|
||||||
|
|
||||||
|
function makeDownloadLink(blob, filename) {
|
||||||
|
const url = URL.createObjectURL(blob);
|
||||||
|
const a = document.createElement('a');
|
||||||
|
a.href = url;
|
||||||
|
a.download = filename;
|
||||||
|
a.textContent = filename;
|
||||||
|
a.className = 'link-primary api-download-link';
|
||||||
|
a.onclick = () => setTimeout(() => URL.revokeObjectURL(url), 2500);
|
||||||
|
return a;
|
||||||
|
}
|
||||||
|
|
||||||
|
function downloadFromApiJsonTextarea(endpoint, textareaId, fileBaseName) {
|
||||||
|
const responseId = 'response-' + endpoint.replace(/\//g, '-');
|
||||||
|
const { body: out } = showResponse(responseId);
|
||||||
|
|
||||||
|
let body;
|
||||||
|
try {
|
||||||
|
body = safeParseJsonFromTextarea(textareaId);
|
||||||
|
} catch (e) {
|
||||||
|
out.textContent = 'Error: ' + e.message;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const url = baseUrl + '/api/' + endpoint;
|
||||||
|
|
||||||
|
fetchAsBlob(url, 'POST', body)
|
||||||
|
.then(async ({ blob, headersText, cacheSummary, contentType }) => {
|
||||||
|
const ext = guessExtensionFromContentType(contentType);
|
||||||
|
const filename = `${fileBaseName}.${ext}`;
|
||||||
|
|
||||||
|
let preview = '';
|
||||||
|
try {
|
||||||
|
const txt = await blob.text();
|
||||||
|
preview = firstLines(txt, 80);
|
||||||
|
} catch {
|
||||||
|
preview = '(binary content)';
|
||||||
|
}
|
||||||
|
|
||||||
|
out.textContent =
|
||||||
|
`OK\n${cacheSummary}\n\n--- Headers ---\n${headersText}\n\n--- Preview (first lines) ---\n${preview}\n\n--- Download ---\n`;
|
||||||
|
|
||||||
|
const link = makeDownloadLink(blob, filename);
|
||||||
|
out.parentElement.appendChild(link);
|
||||||
|
})
|
||||||
|
.catch((err) => {
|
||||||
|
out.textContent = 'Error: ' + err.message;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function previewTextFromGenerate(textareaId) {
|
||||||
|
const { body: out } = showResponse('response-generate-download');
|
||||||
|
|
||||||
|
let body;
|
||||||
|
try {
|
||||||
|
body = safeParseJsonFromTextarea(textareaId);
|
||||||
|
} catch (e) {
|
||||||
|
out.textContent = 'Error: ' + e.message;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
fetchAsBlob(baseUrl + '/api/generate', 'POST', body)
|
||||||
|
.then(async ({ blob, headersText, cacheSummary }) => {
|
||||||
|
let text = '';
|
||||||
|
try { text = await blob.text(); } catch { text = '(binary content)'; }
|
||||||
|
|
||||||
|
out.textContent =
|
||||||
|
`OK\n${cacheSummary}\n\n--- Headers ---\n${headersText}\n\n--- Preview (first ~80 lines) ---\n${firstLines(text, 80)}\n`;
|
||||||
|
})
|
||||||
|
.catch((err) => {
|
||||||
|
out.textContent = 'Error: ' + err.message;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function downloadFromGenerate(textareaId, fileBaseName) {
|
||||||
|
const { body: out } = showResponse('response-generate-download');
|
||||||
|
|
||||||
|
let body;
|
||||||
|
try {
|
||||||
|
body = safeParseJsonFromTextarea(textareaId);
|
||||||
|
} catch (e) {
|
||||||
|
out.textContent = 'Error: ' + e.message;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
fetchAsBlob(baseUrl + '/api/generate', 'POST', body)
|
||||||
|
.then(async ({ blob, headersText, cacheSummary, contentType, contentDisposition }) => {
|
||||||
|
const ext = guessExtensionFromContentType(contentType);
|
||||||
|
let filename = `${fileBaseName}.${ext}`;
|
||||||
|
|
||||||
|
const m = /filename="?([^"]+)"?/i.exec(contentDisposition || '');
|
||||||
|
if (m && m[1]) filename = m[1];
|
||||||
|
|
||||||
|
let preview = '';
|
||||||
|
try {
|
||||||
|
const txt = await blob.text();
|
||||||
|
preview = firstLines(txt, 80);
|
||||||
|
} catch {
|
||||||
|
preview = '(binary content)';
|
||||||
|
}
|
||||||
|
|
||||||
|
out.textContent =
|
||||||
|
`OK\n${cacheSummary}\n\n--- Headers ---\n${headersText}\n\n--- Preview (first lines) ---\n${preview}\n\n--- Download ---\n`;
|
||||||
|
|
||||||
|
const link = makeDownloadLink(blob, filename);
|
||||||
|
out.parentElement.appendChild(link);
|
||||||
|
})
|
||||||
|
.catch((err) => {
|
||||||
|
out.textContent = 'Error: ' + err.message;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
25
static/js/base.js
Normal file
25
static/js/base.js
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
document.addEventListener("DOMContentLoaded", function () {
|
||||||
|
|
||||||
|
const dot = document.getElementById("apiStatusDot");
|
||||||
|
const text = document.getElementById("apiStatusText");
|
||||||
|
|
||||||
|
if (!dot || !text) return;
|
||||||
|
|
||||||
|
fetch("/health", { method: "GET" })
|
||||||
|
.then(response => {
|
||||||
|
if (!response.ok) throw new Error("API not healthy");
|
||||||
|
dot.classList.remove("bg-secondary");
|
||||||
|
dot.classList.add("bg-success");
|
||||||
|
text.textContent = "API Online";
|
||||||
|
text.classList.remove("text-muted");
|
||||||
|
text.classList.add("text-success");
|
||||||
|
})
|
||||||
|
.catch(() => {
|
||||||
|
dot.classList.remove("bg-secondary");
|
||||||
|
dot.classList.add("bg-danger");
|
||||||
|
text.textContent = "API Offline";
|
||||||
|
text.classList.remove("text-muted");
|
||||||
|
text.classList.add("text-danger");
|
||||||
|
});
|
||||||
|
|
||||||
|
});
|
||||||
@@ -12,8 +12,8 @@ async function loadCacheStats() {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
const [cacheResponse, sqliteResponse] = await Promise.all([
|
const [cacheResponse, sqliteResponse] = await Promise.all([
|
||||||
fetch('/api/cache/status'),
|
fetch('/api/cache/redis/status'),
|
||||||
fetch('/api/database/sqlite/status')
|
fetch('/api/cache/sqlite/status')
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const cacheData = await cacheResponse.json();
|
const cacheData = await cacheResponse.json();
|
||||||
|
|||||||
195
static/js/generator.js
Normal file
195
static/js/generator.js
Normal file
@@ -0,0 +1,195 @@
|
|||||||
|
const baseUrl = window.location.origin;
|
||||||
|
|
||||||
|
const variantsByApp = {
|
||||||
|
haproxy: [
|
||||||
|
{ value: "map", label: "map (recommended)" },
|
||||||
|
{ value: "acl", label: "acl" },
|
||||||
|
{ value: "lua", label: "lua" },
|
||||||
|
],
|
||||||
|
apache: [
|
||||||
|
{ value: "24", label: "24 (recommended)" },
|
||||||
|
{ value: "22", label: "22 (legacy)" },
|
||||||
|
],
|
||||||
|
nginx: [
|
||||||
|
{ value: "geo", label: "geo (recommended)" },
|
||||||
|
{ value: "deny", label: "deny (recommended)" },
|
||||||
|
{ value: "map", label: "map (not recommended)" },
|
||||||
|
],
|
||||||
|
};
|
||||||
|
|
||||||
|
function $(id) { return document.getElementById(id); }
|
||||||
|
|
||||||
|
function setBaseUrl() {
|
||||||
|
const el = $("baseUrl");
|
||||||
|
if (el) el.textContent = baseUrl;
|
||||||
|
}
|
||||||
|
|
||||||
|
function normalizeCountries(input) {
|
||||||
|
return String(input || "")
|
||||||
|
.split(",")
|
||||||
|
.map(s => s.trim().toUpperCase())
|
||||||
|
.filter(Boolean);
|
||||||
|
}
|
||||||
|
|
||||||
|
function updateModeUI() {
|
||||||
|
const mode = $("pyMode").value;
|
||||||
|
|
||||||
|
const rawOn = mode === "raw";
|
||||||
|
const genOn = mode === "generate";
|
||||||
|
|
||||||
|
$("pyRawFormatBox").style.display = rawOn ? "block" : "none";
|
||||||
|
$("pyAsJsBox").style.display = rawOn ? "block" : "none";
|
||||||
|
$("pyJsVarBox").style.display = rawOn ? "block" : "none";
|
||||||
|
|
||||||
|
$("pyAppTypeBox").style.display = genOn ? "block" : "none";
|
||||||
|
$("pyAppVariantBox").style.display = genOn ? "block" : "none";
|
||||||
|
|
||||||
|
if (genOn) {
|
||||||
|
updateVariantOptions();
|
||||||
|
} else {
|
||||||
|
updateRawJsFields();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function updateVariantOptions() {
|
||||||
|
const app = $("pyAppType").value;
|
||||||
|
const select = $("pyAppVariant");
|
||||||
|
const hint = $("variantHint");
|
||||||
|
|
||||||
|
select.innerHTML = "";
|
||||||
|
(variantsByApp[app] || []).forEach(v => {
|
||||||
|
const opt = document.createElement("option");
|
||||||
|
opt.value = v.value;
|
||||||
|
opt.textContent = v.label;
|
||||||
|
select.appendChild(opt);
|
||||||
|
});
|
||||||
|
|
||||||
|
if (app === "haproxy") hint.textContent = "Recommended: haproxy + map";
|
||||||
|
else if (app === "apache") hint.textContent = "Recommended: apache + 24";
|
||||||
|
else if (app === "nginx") hint.textContent = "Recommended: nginx + geo or deny (avoid map)";
|
||||||
|
else hint.textContent = "";
|
||||||
|
}
|
||||||
|
|
||||||
|
function updateRawJsFields() {
|
||||||
|
const fmt = $("pyRawFormat").value;
|
||||||
|
const asJs = $("pyAsJs").value === "true";
|
||||||
|
|
||||||
|
const allowJs = fmt === "raw-cidr_json";
|
||||||
|
$("pyAsJs").disabled = !allowJs;
|
||||||
|
$("pyJsVar").disabled = !allowJs || !asJs;
|
||||||
|
|
||||||
|
if (!allowJs) {
|
||||||
|
$("pyAsJs").value = "false";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildPythonScript() {
|
||||||
|
const mode = $("pyMode").value;
|
||||||
|
const countries = normalizeCountries($("pyCountries").value);
|
||||||
|
const aggregate = $("pyAggregate").value === "true";
|
||||||
|
const useCache = $("pyCache").value === "true";
|
||||||
|
|
||||||
|
let endpoint = "";
|
||||||
|
const payload = { countries, aggregate, use_cache: useCache };
|
||||||
|
|
||||||
|
if (mode === "raw") {
|
||||||
|
endpoint = "/api/generate/raw";
|
||||||
|
payload.app_type = $("pyRawFormat").value;
|
||||||
|
|
||||||
|
if (payload.app_type === "raw-cidr_json") {
|
||||||
|
const asJs = $("pyAsJs").value === "true";
|
||||||
|
payload.as_js = asJs;
|
||||||
|
if (asJs) payload.js_var = $("pyJsVar").value || "geoipBlocklist";
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
endpoint = "/api/generate";
|
||||||
|
payload.app_type = $("pyAppType").value;
|
||||||
|
payload.app_variant = $("pyAppVariant").value;
|
||||||
|
}
|
||||||
|
|
||||||
|
const payloadJson = JSON.stringify(payload, null, 4);
|
||||||
|
|
||||||
|
const script = `#!/usr/bin/env python3
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
import requests
|
||||||
|
|
||||||
|
BASE_URL = ${JSON.stringify(baseUrl)}
|
||||||
|
ENDPOINT = ${JSON.stringify(endpoint)}
|
||||||
|
|
||||||
|
payload_json = """${payloadJson}"""
|
||||||
|
payload = json.loads(payload_json)
|
||||||
|
|
||||||
|
resp = requests.post(BASE_URL + ENDPOINT, json=payload, timeout=120)
|
||||||
|
|
||||||
|
print("Status:", resp.status_code)
|
||||||
|
print("X-From-Cache:", resp.headers.get("X-From-Cache"))
|
||||||
|
print("X-Cache-Type:", resp.headers.get("X-Cache-Type"))
|
||||||
|
print("X-Generated-At:", resp.headers.get("X-Generated-At"))
|
||||||
|
|
||||||
|
ct = (resp.headers.get("Content-Type") or "").lower()
|
||||||
|
|
||||||
|
if resp.status_code >= 400:
|
||||||
|
try:
|
||||||
|
print(json.dumps(resp.json(), indent=2))
|
||||||
|
except Exception:
|
||||||
|
print(resp.text)
|
||||||
|
raise SystemExit(1)
|
||||||
|
|
||||||
|
if "application/json" in ct:
|
||||||
|
print(json.dumps(resp.json(), indent=2))
|
||||||
|
else:
|
||||||
|
filename = "output"
|
||||||
|
cd = resp.headers.get("Content-Disposition") or ""
|
||||||
|
m = re.search(r'filename="?([^"]+)"?', cd)
|
||||||
|
if m:
|
||||||
|
filename = m.group(1)
|
||||||
|
else:
|
||||||
|
if "text/csv" in ct:
|
||||||
|
filename += ".csv"
|
||||||
|
elif "javascript" in ct:
|
||||||
|
filename += ".js"
|
||||||
|
elif "text/plain" in ct:
|
||||||
|
filename += ".txt"
|
||||||
|
else:
|
||||||
|
filename += ".bin"
|
||||||
|
|
||||||
|
with open(filename, "wb") as f:
|
||||||
|
f.write(resp.content)
|
||||||
|
|
||||||
|
print("Saved to:", filename)
|
||||||
|
`;
|
||||||
|
|
||||||
|
$("pythonScriptOutput").textContent = script;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function copyPythonScript() {
|
||||||
|
const text = $("pythonScriptOutput").textContent || "";
|
||||||
|
await navigator.clipboard.writeText(text);
|
||||||
|
}
|
||||||
|
|
||||||
|
function bind() {
|
||||||
|
const topCopy = document.getElementById("btnCopyPyTop");
|
||||||
|
if (topCopy) topCopy.addEventListener("click", copyPythonScript);
|
||||||
|
|
||||||
|
$("pyMode").addEventListener("change", updateModeUI);
|
||||||
|
|
||||||
|
$("pyAppType").addEventListener("change", updateVariantOptions);
|
||||||
|
$("pyRawFormat").addEventListener("change", updateRawJsFields);
|
||||||
|
$("pyAsJs").addEventListener("change", updateRawJsFields);
|
||||||
|
|
||||||
|
$("btnGenPy").addEventListener("click", () => {
|
||||||
|
updateRawJsFields();
|
||||||
|
buildPythonScript();
|
||||||
|
});
|
||||||
|
|
||||||
|
$("btnCopyPy").addEventListener("click", copyPythonScript);
|
||||||
|
|
||||||
|
updateModeUI();
|
||||||
|
buildPythonScript();
|
||||||
|
}
|
||||||
|
|
||||||
|
document.addEventListener("DOMContentLoaded", () => {
|
||||||
|
setBaseUrl();
|
||||||
|
bind();
|
||||||
|
});
|
||||||
1053
templates/api.html
1053
templates/api.html
File diff suppressed because it is too large
Load Diff
@@ -19,9 +19,26 @@
|
|||||||
{{ app_name }}
|
{{ app_name }}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
</a>
|
</a>
|
||||||
<div>
|
<div class="d-flex align-items-center">
|
||||||
<a href="/" class="btn btn-sm btn-outline-secondary me-2">Home</a>
|
|
||||||
<a href="/api-docs" class="btn btn-sm btn-outline-primary">API Docs</a>
|
<!-- API status -->
|
||||||
|
<div class="me-3 d-flex align-items-center">
|
||||||
|
<span id="apiStatusDot" class="status-dot bg-secondary"></span>
|
||||||
|
<small id="apiStatusText" class="ms-2 text-muted">API</small>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<a href="/" class="btn btn-sm btn-outline-secondary me-2">
|
||||||
|
<i class="fas fa-home me-1"></i>Home
|
||||||
|
</a>
|
||||||
|
|
||||||
|
<a href="/api-docs" class="btn btn-sm btn-outline-primary me-2">
|
||||||
|
<i class="fas fa-book me-1"></i>API Docs
|
||||||
|
</a>
|
||||||
|
|
||||||
|
<a href="/generator" class="btn btn-sm btn-outline-dark">
|
||||||
|
<i class="fas fa-code me-1"></i>Script Generator
|
||||||
|
</a>
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</nav>
|
</nav>
|
||||||
@@ -42,6 +59,7 @@
|
|||||||
|
|
||||||
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/js/bootstrap.bundle.min.js"></script>
|
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/js/bootstrap.bundle.min.js"></script>
|
||||||
<script src="{{ url_for('static', filename='js/app.js') }}?v={{ js_hash }}"></script>
|
<script src="{{ url_for('static', filename='js/app.js') }}?v={{ js_hash }}"></script>
|
||||||
|
<script src="{{ url_for('static', filename='js/base.js') }}?v={{ js_hash }}"></script>
|
||||||
{% block scripts %}{% endblock %}
|
{% block scripts %}{% endblock %}
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|||||||
62
templates/error.html
Normal file
62
templates/error.html
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
|
||||||
|
{% block title %}{{ status }} - {{ title }}{% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
<div class="container mt-5">
|
||||||
|
<div class="row">
|
||||||
|
<div class="col-lg-8 mx-auto text-center">
|
||||||
|
|
||||||
|
<div class="mb-4">
|
||||||
|
<div class="display-4 fw-bold text-danger">{{ status }}</div>
|
||||||
|
<h3 class="mb-2">{{ title }}</h3>
|
||||||
|
<p class="text-muted mb-3">{{ message }}</p>
|
||||||
|
|
||||||
|
<div class="alert alert-light border text-start">
|
||||||
|
<div class="d-flex justify-content-between align-items-center flex-wrap gap-2">
|
||||||
|
<div>
|
||||||
|
<div class="small text-muted">Path</div>
|
||||||
|
<code>{{ path }}</code>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<div class="small text-muted">Hint</div>
|
||||||
|
{% if status == 405 %}
|
||||||
|
<span class="badge bg-warning text-dark">Check HTTP method</span>
|
||||||
|
{% elif status == 400 %}
|
||||||
|
<span class="badge bg-warning text-dark">Check request body</span>
|
||||||
|
{% elif status == 401 %}
|
||||||
|
<span class="badge bg-warning text-dark">Auth required</span>
|
||||||
|
{% elif status == 403 %}
|
||||||
|
<span class="badge bg-warning text-dark">Permission denied</span>
|
||||||
|
{% else %}
|
||||||
|
<span class="badge bg-secondary">Check URL</span>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="d-flex justify-content-center gap-2 flex-wrap">
|
||||||
|
<a href="/" class="btn btn-primary">
|
||||||
|
<i class="fas fa-home me-1"></i>Home
|
||||||
|
</a>
|
||||||
|
<a href="/api/docs" class="btn btn-outline-secondary">
|
||||||
|
<i class="fas fa-book me-1"></i>API Docs
|
||||||
|
</a>
|
||||||
|
<a href="/generator" class="btn btn-outline-dark">
|
||||||
|
<i class="fas fa-code me-1"></i>Script Generator
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="mt-4 small text-muted">
|
||||||
|
{% if path.startswith('/api/') %}
|
||||||
|
API endpoints return JSON for programmatic clients.
|
||||||
|
{% else %}
|
||||||
|
If you expected an API response, use <code>/api/...</code>.
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endblock %}
|
||||||
144
templates/generator.html
Normal file
144
templates/generator.html
Normal file
@@ -0,0 +1,144 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
|
||||||
|
{% block title %}Script Generator - {{ app_name }}{% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
<div class="container mt-4">
|
||||||
|
<div class="row">
|
||||||
|
<div class="col-lg-10 mx-auto">
|
||||||
|
|
||||||
|
<div class="mb-4">
|
||||||
|
<h2>Script Generator</h2>
|
||||||
|
<p class="text-muted mb-2">Generate ready-to-use integration scripts for this API.</p>
|
||||||
|
|
||||||
|
<div class="alert alert-info">
|
||||||
|
<strong>Base URL:</strong> <code id="baseUrl"></code>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="card mb-4">
|
||||||
|
<div class="card-header bg-dark text-white">
|
||||||
|
<strong><i class="fab fa-python me-2"></i>Python Generator</strong>
|
||||||
|
<span class="ms-2 small text-white-50">raw / generate</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="card-body">
|
||||||
|
|
||||||
|
<div class="row g-3">
|
||||||
|
|
||||||
|
<div class="col-md-4">
|
||||||
|
<label class="form-label">Mode</label>
|
||||||
|
<select id="pyMode" class="form-select form-select-sm">
|
||||||
|
<option value="raw">Raw blocklist (TXT / CSV / JSON / JS)</option>
|
||||||
|
<option value="generate">App config file (download)</option>
|
||||||
|
</select>
|
||||||
|
<div class="form-text">Preview endpoint is UI-only — not recommended for external automation.</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="col-md-4">
|
||||||
|
<label class="form-label">Countries (comma separated)</label>
|
||||||
|
<input type="text" id="pyCountries" class="form-control form-control-sm" value="PL">
|
||||||
|
<div class="form-text">Example: <code>PL</code> or <code>CN,RU</code></div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="col-md-2">
|
||||||
|
<label class="form-label">Aggregate</label>
|
||||||
|
<select id="pyAggregate" class="form-select form-select-sm">
|
||||||
|
<option value="true">true</option>
|
||||||
|
<option value="false">false</option>
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="col-md-2">
|
||||||
|
<label class="form-label">Use cache</label>
|
||||||
|
<select id="pyCache" class="form-select form-select-sm">
|
||||||
|
<option value="true">true</option>
|
||||||
|
<option value="false">false</option>
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- RAW options -->
|
||||||
|
<div class="col-md-4" id="pyRawFormatBox">
|
||||||
|
<label class="form-label">Raw format (app_type)</label>
|
||||||
|
<select id="pyRawFormat" class="form-select form-select-sm">
|
||||||
|
<option value="raw-cidr_txt">raw-cidr_txt (TXT)</option>
|
||||||
|
<option value="raw-cidr_csv">raw-cidr_csv (CSV)</option>
|
||||||
|
<option value="raw-cidr_json">raw-cidr_json (JSON)</option>
|
||||||
|
</select>
|
||||||
|
<div class="form-text">
|
||||||
|
JS wrapper works only with <code>raw-cidr_json</code>.
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="col-md-2" id="pyAsJsBox">
|
||||||
|
<label class="form-label">as_js</label>
|
||||||
|
<select id="pyAsJs" class="form-select form-select-sm">
|
||||||
|
<option value="false">false</option>
|
||||||
|
<option value="true">true</option>
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="col-md-3" id="pyJsVarBox">
|
||||||
|
<label class="form-label">js_var</label>
|
||||||
|
<input type="text" id="pyJsVar" class="form-control form-control-sm" value="geoipBlocklist">
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- GENERATE options -->
|
||||||
|
<div class="col-md-4" id="pyAppTypeBox" style="display:none;">
|
||||||
|
<label class="form-label">app_type</label>
|
||||||
|
<select id="pyAppType" class="form-select form-select-sm">
|
||||||
|
<option value="haproxy">haproxy</option>
|
||||||
|
<option value="apache">apache</option>
|
||||||
|
<option value="nginx">nginx</option>
|
||||||
|
</select>
|
||||||
|
<div class="form-text">
|
||||||
|
Avoid <code>nginx</code> + <code>map</code> for production.
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="col-md-4" id="pyAppVariantBox" style="display:none;">
|
||||||
|
<label class="form-label">app_variant</label>
|
||||||
|
<select id="pyAppVariant" class="form-select form-select-sm"></select>
|
||||||
|
<div class="form-text" id="variantHint"></div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="col-12 mt-2">
|
||||||
|
<button class="btn btn-primary btn-sm" id="btnGenPy">
|
||||||
|
<i class="fas fa-code me-1"></i>Generate
|
||||||
|
</button>
|
||||||
|
<button class="btn btn-outline-secondary btn-sm ms-2" id="btnCopyPy">
|
||||||
|
<i class="fas fa-copy me-1"></i>Copy
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<hr>
|
||||||
|
|
||||||
|
<div class="code-wrap mt-2">
|
||||||
|
<div class="code-wrap-header">
|
||||||
|
<span><i class="fab fa-python me-2"></i>generated.py</span>
|
||||||
|
<button class="btn btn-sm btn-outline-light" type="button" id="btnCopyPyTop">
|
||||||
|
<i class="fas fa-copy me-1"></i>Copy
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<pre class="code-wrap-body"><code id="pythonScriptOutput"></code></pre>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="alert alert-secondary">
|
||||||
|
<strong>Tip:</strong> If you need structured output for integrations, use <code>/api/generate/raw</code>.
|
||||||
|
For ready-to-use app configs, use <code>/api/generate</code>.
|
||||||
|
</div>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block scripts %}
|
||||||
|
<script src="{{ url_for('static', filename='js/generator.js') }}?v={{ js_hash }}"></script>
|
||||||
|
{% endblock %}
|
||||||
Reference in New Issue
Block a user