Files
ppf/config.ini.sample

147 lines
3.1 KiB
Plaintext

# PPF Configuration
# Copy this file to config.ini and adjust as needed.
[common]
# Comma-separated list of Tor SOCKS5 proxy addresses
tor_hosts = 127.0.0.1:9050
# Network timeouts (seconds)
timeout_connect = 10
timeout_read = 15
# Enable cProfile profiling (writes to data/profile.stats)
profiling = 0
[watchd]
# Proxy validation daemon settings
# Database file for proxy storage
database = proxies.sqlite
# Check type: judges (recommended), ssl, irc, or head
# judges - HTTP judge servers that echo back request headers
# ssl - TLS handshake test (fast, no payload, verifies MITM)
# irc - IRC server connection test
# head - HTTP HEAD request test
checktype = judges
# SSL verification mode (1=always, 0=never, 2=random)
# When enabled, tests TLS handshake with certificate verification.
# Benefits:
# - Fast: TLS handshake completes quickly, no payload transfer
# - Meaningful: Verifies proxy supports encrypted connections
# - MITM detection: Certificate validation catches interception
# - Fallback: Works when judge servers are blocked/rate-limited
use_ssl = 1
# Thread configuration
threads = 50
min_threads = 5
# Timeout for proxy test connections (seconds)
timeout = 15
# Number of failures before proxy marked dead
max_fail = 5
# Check intervals (seconds)
checktime = 1800
perfail_checktime = 3600
# Minimum success rate to accept results (percentage)
outage_threshold = 4.0
# Batch size for database writes
submit_after = 200
# Days before removing dead proxies
stale_days = 30
# Seconds between stats reports (0 = disabled)
stats_interval = 300
# Re-test old (previously dead) proxies
oldies = 0
oldies_checktime = 43200
oldies_multi = 10
# Server list file for IRC/head checks
source_file = servers.txt
# Enable Tor circuit health monitoring
tor_safeguard = 1
# Debug output
debug = 0
[ppf]
# URL harvester settings
# Database file (usually same as watchd)
database = proxies.sqlite
# Number of URL fetcher threads
threads = 3
# Timeout for URL fetches (seconds)
timeout = 15
# HTTP connection retries
http_retries = 1
# Check intervals for URLs (seconds)
checktime = 3600
perfail_checktime = 3600
# Number of failures before URL marked dead
max_fail = 5
# Only extract URLs from same domain
extract_samedomain = 0
# Debug output
debug = 0
[scraper]
# Search engine scraper settings
# Enable scraper
enabled = 1
# Number of scraper threads
threads = 3
# Search engines: searx, duckduckgo, startpage, brave, ecosia,
# mojeek, qwant, yandex, github, gitlab, codeberg, gitea
engines = searx,duckduckgo,github
# Query mode: p=proxies, s=search, w=websites (combine: psw)
query = psw
# Max pages to fetch per query
max_pages = 5
# Rate limiting: backoff delays (seconds)
backoff_base = 30
backoff_max = 3600
fail_threshold = 2
# LibreTranslate for multilingual queries (optional)
libretranslate_enabled = 0
libretranslate_url = https://lt.mymx.me/translate
# Debug output
debug = 0
[httpd]
# Web dashboard settings
# Enable web dashboard
enabled = 1
# Listen address (use 0.0.0.0 for all interfaces)
listenip = 127.0.0.1
# Listen port
port = 8081