docs: update worker hosts to cassius, edge, sentinel
This commit is contained in:
126
CLAUDE.md
126
CLAUDE.md
@@ -7,9 +7,9 @@
|
|||||||
│ Host │ Role │ Notes
|
│ Host │ Role │ Notes
|
||||||
├──────────┼─────────────┼────────────────────────────────────────────────────────┤
|
├──────────┼─────────────┼────────────────────────────────────────────────────────┤
|
||||||
│ odin │ Master │ Scrapes proxy lists, verifies conflicts, port 8081
|
│ odin │ Master │ Scrapes proxy lists, verifies conflicts, port 8081
|
||||||
│ forge │ Worker │ Tests proxies, reports to master via WireGuard
|
│ cassius │ Worker │ Tests proxies, reports to master via WireGuard
|
||||||
│ hermes │ Worker │ Tests proxies, reports to master via WireGuard
|
│ edge │ Worker │ Tests proxies, reports to master via WireGuard
|
||||||
│ janus │ Worker │ Tests proxies, reports to master via WireGuard
|
│ sentinel │ Worker │ Tests proxies, reports to master via WireGuard
|
||||||
└──────────┴─────────────┴────────────────────────────────────────────────────────┘
|
└──────────┴─────────────┴────────────────────────────────────────────────────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -43,20 +43,21 @@ cd /opt/ansible && source venv/bin/activate
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Check worker status
|
# Check worker status
|
||||||
ANSIBLE_REMOTE_TMP=/tmp/.ansible ansible forge,hermes,janus -m shell -a "hostname"
|
ANSIBLE_REMOTE_TMP=/tmp/.ansible ansible cassius,edge,sentinel -m shell -a "hostname"
|
||||||
|
|
||||||
# Check worker config
|
# Check worker config
|
||||||
ANSIBLE_REMOTE_TMP=/tmp/.ansible ansible forge,hermes,janus -m shell -a "grep -E 'threads|timeout|ssl' /home/podman/ppf/config.ini"
|
ANSIBLE_REMOTE_TMP=/tmp/.ansible ansible cassius,edge,sentinel -m shell -a "grep -E 'threads|timeout|ssl' /home/podman/ppf/config.ini"
|
||||||
|
|
||||||
# Check worker logs
|
# Check worker logs (dynamic UID)
|
||||||
ANSIBLE_REMOTE_TMP=/tmp/.ansible ansible forge -m shell -a "sudo -u podman journalctl --user -u ppf-worker -n 20"
|
ANSIBLE_REMOTE_TMP=/tmp/.ansible ansible cassius -m raw \
|
||||||
|
-a "uid=\$(id -u podman) && sudo -u podman podman logs --tail 20 ppf-worker"
|
||||||
|
|
||||||
# Modify config option
|
# Modify config option
|
||||||
ANSIBLE_REMOTE_TMP=/tmp/.ansible ansible forge,hermes,janus -m lineinfile -a "path=/home/podman/ppf/config.ini line='ssl_only = 1' insertafter='ssl_first'"
|
ANSIBLE_REMOTE_TMP=/tmp/.ansible ansible cassius,edge,sentinel -m lineinfile -a "path=/home/podman/ppf/config.ini line='ssl_only = 1' insertafter='ssl_first'"
|
||||||
|
|
||||||
# Restart workers (different UIDs!)
|
# Restart workers (dynamic UID discovery)
|
||||||
ANSIBLE_REMOTE_TMP=/tmp/.ansible ansible janus,forge -m raw -a "sudo -u podman XDG_RUNTIME_DIR=/run/user/996 systemctl --user restart ppf-worker"
|
ANSIBLE_REMOTE_TMP=/tmp/.ansible ansible cassius,edge,sentinel -m raw \
|
||||||
ANSIBLE_REMOTE_TMP=/tmp/.ansible ansible hermes -m raw -a "sudo -u podman XDG_RUNTIME_DIR=/run/user/1001 systemctl --user restart ppf-worker"
|
-a "uid=\$(id -u podman) && sudo -u podman XDG_RUNTIME_DIR=/run/user/\$uid podman restart ppf-worker"
|
||||||
```
|
```
|
||||||
|
|
||||||
## Full Deployment Procedure
|
## Full Deployment Procedure
|
||||||
@@ -78,11 +79,11 @@ ANSIBLE_REMOTE_TMP=/tmp/.ansible ansible odin -m synchronize \
|
|||||||
-a "src=/home/user/git/ppf/ dest=/home/podman/ppf/ rsync_opts='--include=*.py,--include=servers.txt,--exclude=*'"
|
-a "src=/home/user/git/ppf/ dest=/home/podman/ppf/ rsync_opts='--include=*.py,--include=servers.txt,--exclude=*'"
|
||||||
|
|
||||||
# Deploy to WORKERS (ppf/src/ subdirectory)
|
# Deploy to WORKERS (ppf/src/ subdirectory)
|
||||||
ANSIBLE_REMOTE_TMP=/tmp/.ansible ansible forge,hermes,janus -m synchronize \
|
ANSIBLE_REMOTE_TMP=/tmp/.ansible ansible cassius,edge,sentinel -m synchronize \
|
||||||
-a "src=/home/user/git/ppf/ dest=/home/podman/ppf/src/ rsync_opts='--include=*.py,--include=servers.txt,--exclude=*'"
|
-a "src=/home/user/git/ppf/ dest=/home/podman/ppf/src/ rsync_opts='--include=*.py,--include=servers.txt,--exclude=*'"
|
||||||
|
|
||||||
# CRITICAL: Fix ownership on ALL hosts (rsync uses ansible user, containers need podman)
|
# CRITICAL: Fix ownership on ALL hosts (rsync uses ansible user, containers need podman)
|
||||||
ANSIBLE_REMOTE_TMP=/tmp/.ansible ansible odin,forge,hermes,janus -m raw \
|
ANSIBLE_REMOTE_TMP=/tmp/.ansible ansible odin,cassius,edge,sentinel -m raw \
|
||||||
-a "chown -R podman:podman /home/podman/ppf/"
|
-a "chown -R podman:podman /home/podman/ppf/"
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -95,11 +96,9 @@ ANSIBLE_REMOTE_TMP=/tmp/.ansible ansible odin,forge,hermes,janus -m raw \
|
|||||||
ansible odin -m raw \
|
ansible odin -m raw \
|
||||||
-a "cd /tmp && XDG_RUNTIME_DIR=/run/user/1005 runuser -u podman -- podman restart ppf"
|
-a "cd /tmp && XDG_RUNTIME_DIR=/run/user/1005 runuser -u podman -- podman restart ppf"
|
||||||
|
|
||||||
# Restart WORKERS (note different UIDs)
|
# Restart WORKERS (dynamic UID discovery)
|
||||||
ansible janus,forge -m raw \
|
ansible cassius,edge,sentinel -m raw \
|
||||||
-a "sudo -u podman XDG_RUNTIME_DIR=/run/user/996 systemctl --user restart ppf-worker"
|
-a "uid=\$(id -u podman) && sudo -u podman XDG_RUNTIME_DIR=/run/user/\$uid podman restart ppf-worker"
|
||||||
ansible hermes -m raw \
|
|
||||||
-a "sudo -u podman XDG_RUNTIME_DIR=/run/user/1001 systemctl --user restart ppf-worker"
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Step 4: Verify All Running
|
### Step 4: Verify All Running
|
||||||
@@ -109,11 +108,9 @@ ansible hermes -m raw \
|
|||||||
ansible odin -m raw \
|
ansible odin -m raw \
|
||||||
-a "cd /tmp && XDG_RUNTIME_DIR=/run/user/1005 runuser -u podman -- podman ps"
|
-a "cd /tmp && XDG_RUNTIME_DIR=/run/user/1005 runuser -u podman -- podman ps"
|
||||||
|
|
||||||
# Check workers
|
# Check workers (dynamic UID discovery)
|
||||||
ansible janus,forge -m raw \
|
ansible cassius,edge,sentinel -m raw \
|
||||||
-a "sudo -u podman XDG_RUNTIME_DIR=/run/user/996 systemctl --user is-active ppf-worker"
|
-a "uid=\$(id -u podman) && sudo -u podman XDG_RUNTIME_DIR=/run/user/\$uid podman ps --format '{{.Names}} {{.Status}}'"
|
||||||
ansible hermes -m raw \
|
|
||||||
-a "sudo -u podman XDG_RUNTIME_DIR=/run/user/1001 systemctl --user is-active ppf-worker"
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Podman User IDs
|
## Podman User IDs
|
||||||
@@ -123,12 +120,14 @@ ansible hermes -m raw \
|
|||||||
│ Host │ UID │ XDG_RUNTIME_DIR
|
│ Host │ UID │ XDG_RUNTIME_DIR
|
||||||
├──────────┼───────┼─────────────────────────────┤
|
├──────────┼───────┼─────────────────────────────┤
|
||||||
│ odin │ 1005 │ /run/user/1005
|
│ odin │ 1005 │ /run/user/1005
|
||||||
│ hermes │ 1001 │ /run/user/1001
|
│ cassius │ 993 │ /run/user/993
|
||||||
│ janus │ 996 │ /run/user/996
|
│ edge │ 993 │ /run/user/993
|
||||||
│ forge │ 996 │ /run/user/996
|
│ sentinel │ 992 │ /run/user/992
|
||||||
└──────────┴───────┴─────────────────────────────┘
|
└──────────┴───────┴─────────────────────────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
|
**Prefer dynamic UID discovery** (`uid=$(id -u podman)`) over hardcoded values.
|
||||||
|
|
||||||
## Configuration
|
## Configuration
|
||||||
|
|
||||||
### Odin config.ini
|
### Odin config.ini
|
||||||
@@ -186,41 +185,26 @@ batch_size = clamp(fair_share, min=100, max=1000)
|
|||||||
- Workers shuffle their batch locally to avoid testing same proxies simultaneously
|
- Workers shuffle their batch locally to avoid testing same proxies simultaneously
|
||||||
- Claims expire after 5 minutes if not completed
|
- Claims expire after 5 minutes if not completed
|
||||||
|
|
||||||
## Worker systemd Unit
|
## Worker Container
|
||||||
|
|
||||||
Located at `/home/podman/.config/systemd/user/ppf-worker.service`:
|
Workers run as podman containers with `--restart=unless-stopped`:
|
||||||
|
|
||||||
```ini
|
```bash
|
||||||
[Unit]
|
podman run -d --name ppf-worker --network=host --restart=unless-stopped \
|
||||||
Description=PPF Worker Container
|
-e PYTHONUNBUFFERED=1 \
|
||||||
After=network-online.target tor.service
|
-v /home/podman/ppf/src:/app:ro,Z \
|
||||||
|
-v /home/podman/ppf/data:/app/data:Z \
|
||||||
[Service]
|
-v /home/podman/ppf/config.ini:/app/config.ini:ro,Z \
|
||||||
Type=simple
|
-v /home/podman/ppf/servers.txt:/app/servers.txt:ro,Z \
|
||||||
Restart=on-failure
|
localhost/ppf-worker:latest \
|
||||||
RestartSec=10
|
python -u ppf.py --worker --server http://10.200.1.250:8081
|
||||||
WorkingDirectory=%h
|
|
||||||
ExecStartPre=-/usr/bin/podman stop -t 10 ppf-worker
|
|
||||||
ExecStartPre=-/usr/bin/podman rm -f ppf-worker
|
|
||||||
ExecStart=/usr/bin/podman run \
|
|
||||||
--name ppf-worker --rm --log-driver=journald --network=host \
|
|
||||||
-v %h/ppf/src:/app:ro \
|
|
||||||
-v %h/ppf/data:/app/data \
|
|
||||||
-v %h/ppf/config.ini:/app/config.ini:ro \
|
|
||||||
-e PYTHONUNBUFFERED=1 \
|
|
||||||
localhost/ppf-worker:latest \
|
|
||||||
python -u ppf.py --worker --server http://10.200.1.250:8081
|
|
||||||
ExecStop=/usr/bin/podman stop -t 10 ppf-worker
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=default.target
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Rebuilding Images
|
## Rebuilding Images
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Workers - from ppf/ directory (Dockerfile copies from src/)
|
# Workers - from ppf/ directory (Dockerfile copies from src/)
|
||||||
ansible forge,hermes,janus -m raw \
|
ansible cassius,edge,sentinel -m raw \
|
||||||
-a "cd /home/podman/ppf && sudo -u podman podman build -t localhost/ppf-worker:latest ."
|
-a "cd /home/podman/ppf && sudo -u podman podman build -t localhost/ppf-worker:latest ."
|
||||||
|
|
||||||
# Odin - from ppf/ directory
|
# Odin - from ppf/ directory
|
||||||
@@ -231,14 +215,16 @@ ansible odin -m raw \
|
|||||||
## API Endpoints
|
## API Endpoints
|
||||||
|
|
||||||
```
|
```
|
||||||
/dashboard Web UI with live statistics
|
/dashboard Web UI with live statistics
|
||||||
/map Interactive world map
|
/map Interactive world map
|
||||||
/health Health check
|
/health Health check
|
||||||
/api/stats Runtime statistics (JSON)
|
/api/stats Runtime statistics (JSON)
|
||||||
/api/workers Connected worker status
|
/api/workers Connected worker status
|
||||||
/api/memory Memory profiling data
|
/api/countries Proxy counts by country
|
||||||
/api/countries Proxy counts by country
|
/api/claim-urls Claim URL batch for worker-driven fetching (GET)
|
||||||
/proxies Working proxies list
|
/api/report-urls Report URL fetch results (POST)
|
||||||
|
/api/report-proxies Report working proxies (POST)
|
||||||
|
/proxies Working proxies list
|
||||||
```
|
```
|
||||||
|
|
||||||
## Troubleshooting
|
## Troubleshooting
|
||||||
@@ -247,7 +233,7 @@ ansible odin -m raw \
|
|||||||
|
|
||||||
Workers need `servers.txt` in src/:
|
Workers need `servers.txt` in src/:
|
||||||
```bash
|
```bash
|
||||||
ansible forge,hermes,janus -m copy \
|
ansible cassius,edge,sentinel -m copy \
|
||||||
-a "src=/home/user/git/ppf/servers.txt dest=/home/podman/ppf/src/servers.txt owner=podman group=podman"
|
-a "src=/home/user/git/ppf/servers.txt dest=/home/podman/ppf/src/servers.txt owner=podman group=podman"
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -270,15 +256,17 @@ ansible odin -m raw -a "cd /tmp; sudo -u podman podman restart ppf"
|
|||||||
|
|
||||||
### Worker Keeps Crashing
|
### Worker Keeps Crashing
|
||||||
|
|
||||||
1. Check systemd status with correct UID
|
1. Check container status: `sudo -u podman podman ps -a`
|
||||||
2. Verify servers.txt exists in src/
|
2. Check logs: `sudo -u podman podman logs --tail 50 ppf-worker`
|
||||||
3. Check ownership
|
3. Verify servers.txt exists in src/
|
||||||
4. Run manually to see error:
|
4. Check ownership: `ls -la /home/podman/ppf/src/`
|
||||||
|
5. Run manually to see error:
|
||||||
```bash
|
```bash
|
||||||
sudo -u podman podman run --rm --network=host \
|
sudo -u podman podman run --rm --network=host \
|
||||||
-v /home/podman/ppf/src:/app:ro \
|
-v /home/podman/ppf/src:/app:ro,Z \
|
||||||
-v /home/podman/ppf/data:/app/data \
|
-v /home/podman/ppf/data:/app/data:Z \
|
||||||
-v /home/podman/ppf/config.ini:/app/config.ini:ro \
|
-v /home/podman/ppf/config.ini:/app/config.ini:ro,Z \
|
||||||
|
-v /home/podman/ppf/servers.txt:/app/servers.txt:ro,Z \
|
||||||
localhost/ppf-worker:latest \
|
localhost/ppf-worker:latest \
|
||||||
python -u ppf.py --worker --server http://10.200.1.250:8081
|
python -u ppf.py --worker --server http://10.200.1.250:8081
|
||||||
```
|
```
|
||||||
|
|||||||
Reference in New Issue
Block a user