docs: update worker hosts to cassius, edge, sentinel
This commit is contained in:
126
CLAUDE.md
126
CLAUDE.md
@@ -7,9 +7,9 @@
|
||||
│ Host │ Role │ Notes
|
||||
├──────────┼─────────────┼────────────────────────────────────────────────────────┤
|
||||
│ odin │ Master │ Scrapes proxy lists, verifies conflicts, port 8081
|
||||
│ forge │ Worker │ Tests proxies, reports to master via WireGuard
|
||||
│ hermes │ Worker │ Tests proxies, reports to master via WireGuard
|
||||
│ janus │ Worker │ Tests proxies, reports to master via WireGuard
|
||||
│ cassius │ Worker │ Tests proxies, reports to master via WireGuard
|
||||
│ edge │ Worker │ Tests proxies, reports to master via WireGuard
|
||||
│ sentinel │ Worker │ Tests proxies, reports to master via WireGuard
|
||||
└──────────┴─────────────┴────────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
@@ -43,20 +43,21 @@ cd /opt/ansible && source venv/bin/activate
|
||||
|
||||
```bash
|
||||
# Check worker status
|
||||
ANSIBLE_REMOTE_TMP=/tmp/.ansible ansible forge,hermes,janus -m shell -a "hostname"
|
||||
ANSIBLE_REMOTE_TMP=/tmp/.ansible ansible cassius,edge,sentinel -m shell -a "hostname"
|
||||
|
||||
# Check worker config
|
||||
ANSIBLE_REMOTE_TMP=/tmp/.ansible ansible forge,hermes,janus -m shell -a "grep -E 'threads|timeout|ssl' /home/podman/ppf/config.ini"
|
||||
ANSIBLE_REMOTE_TMP=/tmp/.ansible ansible cassius,edge,sentinel -m shell -a "grep -E 'threads|timeout|ssl' /home/podman/ppf/config.ini"
|
||||
|
||||
# Check worker logs
|
||||
ANSIBLE_REMOTE_TMP=/tmp/.ansible ansible forge -m shell -a "sudo -u podman journalctl --user -u ppf-worker -n 20"
|
||||
# Check worker logs (dynamic UID)
|
||||
ANSIBLE_REMOTE_TMP=/tmp/.ansible ansible cassius -m raw \
|
||||
-a "uid=\$(id -u podman) && sudo -u podman podman logs --tail 20 ppf-worker"
|
||||
|
||||
# Modify config option
|
||||
ANSIBLE_REMOTE_TMP=/tmp/.ansible ansible forge,hermes,janus -m lineinfile -a "path=/home/podman/ppf/config.ini line='ssl_only = 1' insertafter='ssl_first'"
|
||||
ANSIBLE_REMOTE_TMP=/tmp/.ansible ansible cassius,edge,sentinel -m lineinfile -a "path=/home/podman/ppf/config.ini line='ssl_only = 1' insertafter='ssl_first'"
|
||||
|
||||
# Restart workers (different UIDs!)
|
||||
ANSIBLE_REMOTE_TMP=/tmp/.ansible ansible janus,forge -m raw -a "sudo -u podman XDG_RUNTIME_DIR=/run/user/996 systemctl --user restart ppf-worker"
|
||||
ANSIBLE_REMOTE_TMP=/tmp/.ansible ansible hermes -m raw -a "sudo -u podman XDG_RUNTIME_DIR=/run/user/1001 systemctl --user restart ppf-worker"
|
||||
# Restart workers (dynamic UID discovery)
|
||||
ANSIBLE_REMOTE_TMP=/tmp/.ansible ansible cassius,edge,sentinel -m raw \
|
||||
-a "uid=\$(id -u podman) && sudo -u podman XDG_RUNTIME_DIR=/run/user/\$uid podman restart ppf-worker"
|
||||
```
|
||||
|
||||
## Full Deployment Procedure
|
||||
@@ -78,11 +79,11 @@ ANSIBLE_REMOTE_TMP=/tmp/.ansible ansible odin -m synchronize \
|
||||
-a "src=/home/user/git/ppf/ dest=/home/podman/ppf/ rsync_opts='--include=*.py,--include=servers.txt,--exclude=*'"
|
||||
|
||||
# Deploy to WORKERS (ppf/src/ subdirectory)
|
||||
ANSIBLE_REMOTE_TMP=/tmp/.ansible ansible forge,hermes,janus -m synchronize \
|
||||
ANSIBLE_REMOTE_TMP=/tmp/.ansible ansible cassius,edge,sentinel -m synchronize \
|
||||
-a "src=/home/user/git/ppf/ dest=/home/podman/ppf/src/ rsync_opts='--include=*.py,--include=servers.txt,--exclude=*'"
|
||||
|
||||
# CRITICAL: Fix ownership on ALL hosts (rsync uses ansible user, containers need podman)
|
||||
ANSIBLE_REMOTE_TMP=/tmp/.ansible ansible odin,forge,hermes,janus -m raw \
|
||||
ANSIBLE_REMOTE_TMP=/tmp/.ansible ansible odin,cassius,edge,sentinel -m raw \
|
||||
-a "chown -R podman:podman /home/podman/ppf/"
|
||||
```
|
||||
|
||||
@@ -95,11 +96,9 @@ ANSIBLE_REMOTE_TMP=/tmp/.ansible ansible odin,forge,hermes,janus -m raw \
|
||||
ansible odin -m raw \
|
||||
-a "cd /tmp && XDG_RUNTIME_DIR=/run/user/1005 runuser -u podman -- podman restart ppf"
|
||||
|
||||
# Restart WORKERS (note different UIDs)
|
||||
ansible janus,forge -m raw \
|
||||
-a "sudo -u podman XDG_RUNTIME_DIR=/run/user/996 systemctl --user restart ppf-worker"
|
||||
ansible hermes -m raw \
|
||||
-a "sudo -u podman XDG_RUNTIME_DIR=/run/user/1001 systemctl --user restart ppf-worker"
|
||||
# Restart WORKERS (dynamic UID discovery)
|
||||
ansible cassius,edge,sentinel -m raw \
|
||||
-a "uid=\$(id -u podman) && sudo -u podman XDG_RUNTIME_DIR=/run/user/\$uid podman restart ppf-worker"
|
||||
```
|
||||
|
||||
### Step 4: Verify All Running
|
||||
@@ -109,11 +108,9 @@ ansible hermes -m raw \
|
||||
ansible odin -m raw \
|
||||
-a "cd /tmp && XDG_RUNTIME_DIR=/run/user/1005 runuser -u podman -- podman ps"
|
||||
|
||||
# Check workers
|
||||
ansible janus,forge -m raw \
|
||||
-a "sudo -u podman XDG_RUNTIME_DIR=/run/user/996 systemctl --user is-active ppf-worker"
|
||||
ansible hermes -m raw \
|
||||
-a "sudo -u podman XDG_RUNTIME_DIR=/run/user/1001 systemctl --user is-active ppf-worker"
|
||||
# Check workers (dynamic UID discovery)
|
||||
ansible cassius,edge,sentinel -m raw \
|
||||
-a "uid=\$(id -u podman) && sudo -u podman XDG_RUNTIME_DIR=/run/user/\$uid podman ps --format '{{.Names}} {{.Status}}'"
|
||||
```
|
||||
|
||||
## Podman User IDs
|
||||
@@ -123,12 +120,14 @@ ansible hermes -m raw \
|
||||
│ Host │ UID │ XDG_RUNTIME_DIR
|
||||
├──────────┼───────┼─────────────────────────────┤
|
||||
│ odin │ 1005 │ /run/user/1005
|
||||
│ hermes │ 1001 │ /run/user/1001
|
||||
│ janus │ 996 │ /run/user/996
|
||||
│ forge │ 996 │ /run/user/996
|
||||
│ cassius │ 993 │ /run/user/993
|
||||
│ edge │ 993 │ /run/user/993
|
||||
│ sentinel │ 992 │ /run/user/992
|
||||
└──────────┴───────┴─────────────────────────────┘
|
||||
```
|
||||
|
||||
**Prefer dynamic UID discovery** (`uid=$(id -u podman)`) over hardcoded values.
|
||||
|
||||
## Configuration
|
||||
|
||||
### Odin config.ini
|
||||
@@ -186,41 +185,26 @@ batch_size = clamp(fair_share, min=100, max=1000)
|
||||
- Workers shuffle their batch locally to avoid testing same proxies simultaneously
|
||||
- Claims expire after 5 minutes if not completed
|
||||
|
||||
## Worker systemd Unit
|
||||
## Worker Container
|
||||
|
||||
Located at `/home/podman/.config/systemd/user/ppf-worker.service`:
|
||||
Workers run as podman containers with `--restart=unless-stopped`:
|
||||
|
||||
```ini
|
||||
[Unit]
|
||||
Description=PPF Worker Container
|
||||
After=network-online.target tor.service
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
Restart=on-failure
|
||||
RestartSec=10
|
||||
WorkingDirectory=%h
|
||||
ExecStartPre=-/usr/bin/podman stop -t 10 ppf-worker
|
||||
ExecStartPre=-/usr/bin/podman rm -f ppf-worker
|
||||
ExecStart=/usr/bin/podman run \
|
||||
--name ppf-worker --rm --log-driver=journald --network=host \
|
||||
-v %h/ppf/src:/app:ro \
|
||||
-v %h/ppf/data:/app/data \
|
||||
-v %h/ppf/config.ini:/app/config.ini:ro \
|
||||
-e PYTHONUNBUFFERED=1 \
|
||||
localhost/ppf-worker:latest \
|
||||
python -u ppf.py --worker --server http://10.200.1.250:8081
|
||||
ExecStop=/usr/bin/podman stop -t 10 ppf-worker
|
||||
|
||||
[Install]
|
||||
WantedBy=default.target
|
||||
```bash
|
||||
podman run -d --name ppf-worker --network=host --restart=unless-stopped \
|
||||
-e PYTHONUNBUFFERED=1 \
|
||||
-v /home/podman/ppf/src:/app:ro,Z \
|
||||
-v /home/podman/ppf/data:/app/data:Z \
|
||||
-v /home/podman/ppf/config.ini:/app/config.ini:ro,Z \
|
||||
-v /home/podman/ppf/servers.txt:/app/servers.txt:ro,Z \
|
||||
localhost/ppf-worker:latest \
|
||||
python -u ppf.py --worker --server http://10.200.1.250:8081
|
||||
```
|
||||
|
||||
## Rebuilding Images
|
||||
|
||||
```bash
|
||||
# Workers - from ppf/ directory (Dockerfile copies from src/)
|
||||
ansible forge,hermes,janus -m raw \
|
||||
ansible cassius,edge,sentinel -m raw \
|
||||
-a "cd /home/podman/ppf && sudo -u podman podman build -t localhost/ppf-worker:latest ."
|
||||
|
||||
# Odin - from ppf/ directory
|
||||
@@ -231,14 +215,16 @@ ansible odin -m raw \
|
||||
## API Endpoints
|
||||
|
||||
```
|
||||
/dashboard Web UI with live statistics
|
||||
/map Interactive world map
|
||||
/health Health check
|
||||
/api/stats Runtime statistics (JSON)
|
||||
/api/workers Connected worker status
|
||||
/api/memory Memory profiling data
|
||||
/api/countries Proxy counts by country
|
||||
/proxies Working proxies list
|
||||
/dashboard Web UI with live statistics
|
||||
/map Interactive world map
|
||||
/health Health check
|
||||
/api/stats Runtime statistics (JSON)
|
||||
/api/workers Connected worker status
|
||||
/api/countries Proxy counts by country
|
||||
/api/claim-urls Claim URL batch for worker-driven fetching (GET)
|
||||
/api/report-urls Report URL fetch results (POST)
|
||||
/api/report-proxies Report working proxies (POST)
|
||||
/proxies Working proxies list
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
@@ -247,7 +233,7 @@ ansible odin -m raw \
|
||||
|
||||
Workers need `servers.txt` in src/:
|
||||
```bash
|
||||
ansible forge,hermes,janus -m copy \
|
||||
ansible cassius,edge,sentinel -m copy \
|
||||
-a "src=/home/user/git/ppf/servers.txt dest=/home/podman/ppf/src/servers.txt owner=podman group=podman"
|
||||
```
|
||||
|
||||
@@ -270,15 +256,17 @@ ansible odin -m raw -a "cd /tmp; sudo -u podman podman restart ppf"
|
||||
|
||||
### Worker Keeps Crashing
|
||||
|
||||
1. Check systemd status with correct UID
|
||||
2. Verify servers.txt exists in src/
|
||||
3. Check ownership
|
||||
4. Run manually to see error:
|
||||
1. Check container status: `sudo -u podman podman ps -a`
|
||||
2. Check logs: `sudo -u podman podman logs --tail 50 ppf-worker`
|
||||
3. Verify servers.txt exists in src/
|
||||
4. Check ownership: `ls -la /home/podman/ppf/src/`
|
||||
5. Run manually to see error:
|
||||
```bash
|
||||
sudo -u podman podman run --rm --network=host \
|
||||
-v /home/podman/ppf/src:/app:ro \
|
||||
-v /home/podman/ppf/data:/app/data \
|
||||
-v /home/podman/ppf/config.ini:/app/config.ini:ro \
|
||||
-v /home/podman/ppf/src:/app:ro,Z \
|
||||
-v /home/podman/ppf/data:/app/data:Z \
|
||||
-v /home/podman/ppf/config.ini:/app/config.ini:ro,Z \
|
||||
-v /home/podman/ppf/servers.txt:/app/servers.txt:ro,Z \
|
||||
localhost/ppf-worker:latest \
|
||||
python -u ppf.py --worker --server http://10.200.1.250:8081
|
||||
```
|
||||
|
||||
Reference in New Issue
Block a user