- Skip recording BT observations for addresses matching peer bt_mac - Prevents scanners from being stored as regular devices - Filters at database level, not just frontend display Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
1220 lines
47 KiB
Python
1220 lines
47 KiB
Python
"""SQLite database for RF Mapper historical data and device tracking"""
|
|
|
|
import sqlite3
|
|
import json
|
|
from datetime import datetime, timedelta
|
|
from pathlib import Path
|
|
from dataclasses import dataclass
|
|
from typing import Optional
|
|
import threading
|
|
|
|
|
|
@dataclass
|
|
class DeviceStats:
|
|
"""Statistics for a device"""
|
|
device_id: str
|
|
device_type: str # 'wifi' or 'bluetooth'
|
|
name: str
|
|
manufacturer: str
|
|
first_seen: str
|
|
last_seen: str
|
|
total_observations: int
|
|
avg_rssi: float
|
|
min_rssi: int
|
|
max_rssi: int
|
|
avg_distance_m: float
|
|
min_distance_m: float
|
|
max_distance_m: float
|
|
|
|
|
|
@dataclass
|
|
class RSSIObservation:
|
|
"""Single RSSI observation"""
|
|
timestamp: str
|
|
rssi: int
|
|
distance_m: float
|
|
floor: Optional[int] = None
|
|
|
|
|
|
class DeviceDatabase:
|
|
"""SQLite database for device history and statistics"""
|
|
|
|
def __init__(self, db_path: Path | str):
|
|
self.db_path = Path(db_path)
|
|
self.db_path.parent.mkdir(parents=True, exist_ok=True)
|
|
self._local = threading.local()
|
|
self._init_schema()
|
|
|
|
def _get_connection(self) -> sqlite3.Connection:
|
|
"""Get thread-local database connection"""
|
|
if not hasattr(self._local, 'conn') or self._local.conn is None:
|
|
self._local.conn = sqlite3.connect(str(self.db_path))
|
|
self._local.conn.row_factory = sqlite3.Row
|
|
return self._local.conn
|
|
|
|
def _init_schema(self):
|
|
"""Initialize database schema"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
# Devices table - master record for each unique device
|
|
cursor.execute("""
|
|
CREATE TABLE IF NOT EXISTS devices (
|
|
device_id TEXT PRIMARY KEY,
|
|
device_type TEXT NOT NULL, -- 'wifi' or 'bluetooth'
|
|
name TEXT,
|
|
ssid TEXT, -- For WiFi only
|
|
manufacturer TEXT,
|
|
device_class TEXT, -- For Bluetooth
|
|
bt_device_type TEXT, -- For Bluetooth
|
|
encryption TEXT, -- For WiFi
|
|
channel INTEGER, -- For WiFi
|
|
frequency INTEGER, -- For WiFi
|
|
first_seen TEXT NOT NULL,
|
|
last_seen TEXT NOT NULL,
|
|
total_observations INTEGER DEFAULT 0,
|
|
custom_label TEXT, -- User-assigned name
|
|
is_favorite INTEGER DEFAULT 0,
|
|
assigned_floor INTEGER, -- User-assigned floor
|
|
notes TEXT,
|
|
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
|
updated_at TEXT DEFAULT CURRENT_TIMESTAMP
|
|
)
|
|
""")
|
|
|
|
# RSSI observations - time series data
|
|
cursor.execute("""
|
|
CREATE TABLE IF NOT EXISTS rssi_history (
|
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
device_id TEXT NOT NULL,
|
|
timestamp TEXT NOT NULL,
|
|
rssi INTEGER NOT NULL,
|
|
distance_m REAL,
|
|
floor INTEGER,
|
|
scan_id TEXT,
|
|
FOREIGN KEY (device_id) REFERENCES devices(device_id)
|
|
)
|
|
""")
|
|
|
|
# Scans table - record of each scan
|
|
cursor.execute("""
|
|
CREATE TABLE IF NOT EXISTS scans (
|
|
scan_id TEXT PRIMARY KEY,
|
|
timestamp TEXT NOT NULL,
|
|
location_label TEXT,
|
|
lat REAL,
|
|
lon REAL,
|
|
wifi_count INTEGER DEFAULT 0,
|
|
bt_count INTEGER DEFAULT 0,
|
|
created_at TEXT DEFAULT CURRENT_TIMESTAMP
|
|
)
|
|
""")
|
|
|
|
# Device statistics - pre-computed for performance
|
|
cursor.execute("""
|
|
CREATE TABLE IF NOT EXISTS device_stats (
|
|
device_id TEXT PRIMARY KEY,
|
|
avg_rssi REAL,
|
|
min_rssi INTEGER,
|
|
max_rssi INTEGER,
|
|
avg_distance_m REAL,
|
|
min_distance_m REAL,
|
|
max_distance_m REAL,
|
|
appearance_count INTEGER DEFAULT 0,
|
|
last_computed TEXT,
|
|
FOREIGN KEY (device_id) REFERENCES devices(device_id)
|
|
)
|
|
""")
|
|
|
|
# Movement events - detected motion
|
|
cursor.execute("""
|
|
CREATE TABLE IF NOT EXISTS movement_events (
|
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
device_id TEXT NOT NULL,
|
|
timestamp TEXT NOT NULL,
|
|
rssi_delta INTEGER,
|
|
distance_delta_m REAL,
|
|
direction TEXT, -- 'approaching', 'receding', 'stationary'
|
|
velocity_m_s REAL,
|
|
FOREIGN KEY (device_id) REFERENCES devices(device_id)
|
|
)
|
|
""")
|
|
|
|
# Alerts table - for new device detection, absence alerts
|
|
cursor.execute("""
|
|
CREATE TABLE IF NOT EXISTS alerts (
|
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
alert_type TEXT NOT NULL, -- 'new_device', 'device_absent', 'rssi_threshold'
|
|
device_id TEXT,
|
|
timestamp TEXT NOT NULL,
|
|
message TEXT,
|
|
acknowledged INTEGER DEFAULT 0,
|
|
FOREIGN KEY (device_id) REFERENCES devices(device_id)
|
|
)
|
|
""")
|
|
|
|
# Create indexes for performance
|
|
cursor.execute("CREATE INDEX IF NOT EXISTS idx_rssi_device_time ON rssi_history(device_id, timestamp)")
|
|
cursor.execute("CREATE INDEX IF NOT EXISTS idx_rssi_timestamp ON rssi_history(timestamp)")
|
|
cursor.execute("CREATE INDEX IF NOT EXISTS idx_devices_type ON devices(device_type)")
|
|
cursor.execute("CREATE INDEX IF NOT EXISTS idx_devices_last_seen ON devices(last_seen)")
|
|
cursor.execute("CREATE INDEX IF NOT EXISTS idx_movement_device ON movement_events(device_id, timestamp)")
|
|
cursor.execute("CREATE INDEX IF NOT EXISTS idx_alerts_type ON alerts(alert_type, acknowledged)")
|
|
|
|
# Add assigned_floor column if it doesn't exist (migration for existing DBs)
|
|
try:
|
|
cursor.execute("ALTER TABLE devices ADD COLUMN assigned_floor INTEGER")
|
|
except sqlite3.OperationalError:
|
|
pass # Column already exists
|
|
|
|
# Add custom position columns for manual position override (migration)
|
|
try:
|
|
cursor.execute("ALTER TABLE devices ADD COLUMN custom_lat_offset REAL")
|
|
except sqlite3.OperationalError:
|
|
pass # Column already exists
|
|
|
|
try:
|
|
cursor.execute("ALTER TABLE devices ADD COLUMN custom_lon_offset REAL")
|
|
except sqlite3.OperationalError:
|
|
pass # Column already exists
|
|
|
|
# Add departure_notified column for HA integration (migration)
|
|
try:
|
|
cursor.execute("ALTER TABLE devices ADD COLUMN departure_notified INTEGER DEFAULT 0")
|
|
except sqlite3.OperationalError:
|
|
pass # Column already exists
|
|
|
|
# Add scanner_id column to scans table for multi-scanner support (migration)
|
|
try:
|
|
cursor.execute("ALTER TABLE scans ADD COLUMN scanner_id TEXT")
|
|
except sqlite3.OperationalError:
|
|
pass # Column already exists
|
|
|
|
# Add scanner_id column to rssi_history for multi-scanner support (migration)
|
|
try:
|
|
cursor.execute("ALTER TABLE rssi_history ADD COLUMN scanner_id TEXT")
|
|
except sqlite3.OperationalError:
|
|
pass # Column already exists
|
|
|
|
# Peers table - known scanner peers for sync
|
|
cursor.execute("""
|
|
CREATE TABLE IF NOT EXISTS peers (
|
|
scanner_id TEXT PRIMARY KEY,
|
|
name TEXT,
|
|
url TEXT NOT NULL,
|
|
floor INTEGER,
|
|
latitude REAL,
|
|
longitude REAL,
|
|
last_seen TEXT,
|
|
registered_at TEXT
|
|
)
|
|
""")
|
|
|
|
# Add bt_mac column to peers table if missing (for scanner BT filtering)
|
|
try:
|
|
cursor.execute("ALTER TABLE peers ADD COLUMN bt_mac TEXT")
|
|
except sqlite3.OperationalError:
|
|
pass # Column already exists
|
|
|
|
# Add notes column to devices table if missing (for sync)
|
|
try:
|
|
cursor.execute("ALTER TABLE devices ADD COLUMN notes TEXT")
|
|
except sqlite3.OperationalError:
|
|
pass # Column already exists
|
|
|
|
# Add source_scanner columns for peer sync (device positions relative to source scanner)
|
|
try:
|
|
cursor.execute("ALTER TABLE devices ADD COLUMN source_scanner_id TEXT")
|
|
except sqlite3.OperationalError:
|
|
pass # Column already exists
|
|
|
|
try:
|
|
cursor.execute("ALTER TABLE devices ADD COLUMN source_scanner_lat REAL")
|
|
except sqlite3.OperationalError:
|
|
pass # Column already exists
|
|
|
|
try:
|
|
cursor.execute("ALTER TABLE devices ADD COLUMN source_scanner_lon REAL")
|
|
except sqlite3.OperationalError:
|
|
pass # Column already exists
|
|
|
|
conn.commit()
|
|
|
|
def record_scan(self, scan_id: str, timestamp: str, location_label: str,
|
|
lat: float, lon: float, wifi_count: int, bt_count: int,
|
|
scanner_id: Optional[str] = None):
|
|
"""Record a scan event
|
|
|
|
Args:
|
|
scan_id: Unique identifier for this scan
|
|
timestamp: ISO timestamp of the scan
|
|
location_label: User-defined location label
|
|
lat: Latitude of scan location
|
|
lon: Longitude of scan location
|
|
wifi_count: Number of WiFi networks detected
|
|
bt_count: Number of Bluetooth devices detected
|
|
scanner_id: ID of the scanner that performed this scan
|
|
"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
cursor.execute("""
|
|
INSERT OR REPLACE INTO scans (scan_id, timestamp, location_label, lat, lon, wifi_count, bt_count, scanner_id)
|
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
|
""", (scan_id, timestamp, location_label, lat, lon, wifi_count, bt_count, scanner_id))
|
|
|
|
conn.commit()
|
|
|
|
def record_wifi_observation(self, bssid: str, ssid: str, rssi: int, distance_m: float,
|
|
channel: int, frequency: int, encryption: str,
|
|
manufacturer: str, floor: Optional[int] = None,
|
|
scan_id: Optional[str] = None,
|
|
scanner_id: Optional[str] = None):
|
|
"""Record a WiFi network observation
|
|
|
|
Args:
|
|
bssid: MAC address of the WiFi network
|
|
ssid: Network name
|
|
rssi: Signal strength in dBm
|
|
distance_m: Estimated distance in meters
|
|
channel: WiFi channel
|
|
frequency: Frequency in MHz
|
|
encryption: Encryption type
|
|
manufacturer: Manufacturer from OUI lookup
|
|
floor: Floor where the device was detected
|
|
scan_id: ID of the scan this observation belongs to
|
|
scanner_id: ID of the scanner that detected this network
|
|
"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
timestamp = datetime.now().isoformat()
|
|
|
|
# Insert or update device (don't touch updated_at - that's for metadata changes only)
|
|
cursor.execute("""
|
|
INSERT INTO devices (device_id, device_type, name, ssid, manufacturer, encryption, channel, frequency, first_seen, last_seen, total_observations)
|
|
VALUES (?, 'wifi', ?, ?, ?, ?, ?, ?, ?, ?, 1)
|
|
ON CONFLICT(device_id) DO UPDATE SET
|
|
name = COALESCE(excluded.name, devices.name),
|
|
ssid = COALESCE(excluded.ssid, devices.ssid),
|
|
manufacturer = COALESCE(excluded.manufacturer, devices.manufacturer),
|
|
encryption = COALESCE(excluded.encryption, devices.encryption),
|
|
channel = COALESCE(excluded.channel, devices.channel),
|
|
frequency = COALESCE(excluded.frequency, devices.frequency),
|
|
last_seen = excluded.last_seen,
|
|
total_observations = devices.total_observations + 1
|
|
""", (bssid, ssid, ssid, manufacturer, encryption, channel, frequency, timestamp, timestamp))
|
|
|
|
# Insert RSSI observation
|
|
cursor.execute("""
|
|
INSERT INTO rssi_history (device_id, timestamp, rssi, distance_m, floor, scan_id, scanner_id)
|
|
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
""", (bssid, timestamp, rssi, distance_m, floor, scan_id, scanner_id))
|
|
|
|
conn.commit()
|
|
|
|
# Check if this is a new device
|
|
cursor.execute("SELECT total_observations FROM devices WHERE device_id = ?", (bssid,))
|
|
row = cursor.fetchone()
|
|
if row and row['total_observations'] == 1:
|
|
self._create_alert('new_device', bssid, f"New WiFi network detected: {ssid} ({manufacturer})")
|
|
|
|
def record_bluetooth_observation(self, address: str, name: str, rssi: int, distance_m: float,
|
|
device_class: str, device_type: str, manufacturer: str,
|
|
floor: Optional[int] = None, scan_id: Optional[str] = None,
|
|
scanner_id: Optional[str] = None):
|
|
"""Record a Bluetooth device observation
|
|
|
|
Args:
|
|
address: MAC address of the Bluetooth device
|
|
name: Device name
|
|
rssi: Signal strength in dBm
|
|
distance_m: Estimated distance in meters
|
|
device_class: Bluetooth device class
|
|
device_type: Inferred device type (Phone, Headphones, etc.)
|
|
manufacturer: Manufacturer from OUI lookup
|
|
floor: Floor where the device was detected
|
|
scan_id: ID of the scan this observation belongs to
|
|
scanner_id: ID of the scanner that detected this device
|
|
"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
timestamp = datetime.now().isoformat()
|
|
|
|
# Skip if this is a known scanner's BT MAC (don't record scanners as devices)
|
|
cursor.execute("SELECT 1 FROM peers WHERE UPPER(bt_mac) = UPPER(?)", (address,))
|
|
if cursor.fetchone():
|
|
return # Skip scanner device
|
|
|
|
# Get previous observation for movement detection
|
|
cursor.execute("""
|
|
SELECT rssi, distance_m, timestamp FROM rssi_history
|
|
WHERE device_id = ? ORDER BY timestamp DESC LIMIT 1
|
|
""", (address,))
|
|
prev = cursor.fetchone()
|
|
|
|
# Insert or update device (don't touch updated_at - that's for metadata changes only)
|
|
cursor.execute("""
|
|
INSERT INTO devices (device_id, device_type, name, manufacturer, device_class, bt_device_type, first_seen, last_seen, total_observations)
|
|
VALUES (?, 'bluetooth', ?, ?, ?, ?, ?, ?, 1)
|
|
ON CONFLICT(device_id) DO UPDATE SET
|
|
name = CASE WHEN excluded.name != '<unknown>' AND excluded.name != '' THEN excluded.name ELSE devices.name END,
|
|
manufacturer = COALESCE(NULLIF(excluded.manufacturer, ''), devices.manufacturer),
|
|
device_class = COALESCE(excluded.device_class, devices.device_class),
|
|
bt_device_type = COALESCE(excluded.bt_device_type, devices.bt_device_type),
|
|
last_seen = excluded.last_seen,
|
|
total_observations = devices.total_observations + 1
|
|
""", (address, name, manufacturer, device_class, device_type, timestamp, timestamp))
|
|
|
|
# Insert RSSI observation
|
|
cursor.execute("""
|
|
INSERT INTO rssi_history (device_id, timestamp, rssi, distance_m, floor, scan_id, scanner_id)
|
|
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
""", (address, timestamp, rssi, distance_m, floor, scan_id, scanner_id))
|
|
|
|
conn.commit()
|
|
|
|
# Movement detection
|
|
if prev:
|
|
rssi_delta = rssi - prev['rssi']
|
|
distance_delta = distance_m - prev['distance_m']
|
|
prev_time = datetime.fromisoformat(prev['timestamp'])
|
|
time_delta = (datetime.now() - prev_time).total_seconds()
|
|
|
|
if abs(distance_delta) > 0.5 and time_delta > 0: # More than 0.5m movement
|
|
velocity = distance_delta / time_delta if time_delta > 0 else 0
|
|
direction = 'approaching' if distance_delta < 0 else 'receding'
|
|
|
|
cursor.execute("""
|
|
INSERT INTO movement_events (device_id, timestamp, rssi_delta, distance_delta_m, direction, velocity_m_s)
|
|
VALUES (?, ?, ?, ?, ?, ?)
|
|
""", (address, timestamp, rssi_delta, distance_delta, direction, velocity))
|
|
conn.commit()
|
|
|
|
# Check if this is a new device
|
|
cursor.execute("SELECT total_observations FROM devices WHERE device_id = ?", (address,))
|
|
row = cursor.fetchone()
|
|
if row and row['total_observations'] == 1:
|
|
self._create_alert('new_device', address, f"New Bluetooth device detected: {name} ({manufacturer})")
|
|
|
|
def _create_alert(self, alert_type: str, device_id: str, message: str):
|
|
"""Create an alert"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
timestamp = datetime.now().isoformat()
|
|
|
|
cursor.execute("""
|
|
INSERT INTO alerts (alert_type, device_id, timestamp, message)
|
|
VALUES (?, ?, ?, ?)
|
|
""", (alert_type, device_id, timestamp, message))
|
|
|
|
conn.commit()
|
|
|
|
def get_device(self, device_id: str) -> Optional[dict]:
|
|
"""Get device details"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
cursor.execute("SELECT * FROM devices WHERE device_id = ?", (device_id,))
|
|
row = cursor.fetchone()
|
|
return dict(row) if row else None
|
|
|
|
def get_all_devices(self, device_type: Optional[str] = None,
|
|
since: Optional[str] = None,
|
|
limit: int = 100) -> list[dict]:
|
|
"""Get all devices with optional filtering"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
query = "SELECT * FROM devices WHERE 1=1"
|
|
params = []
|
|
|
|
if device_type:
|
|
query += " AND device_type = ?"
|
|
params.append(device_type)
|
|
|
|
if since:
|
|
query += " AND last_seen >= ?"
|
|
params.append(since)
|
|
|
|
query += " ORDER BY last_seen DESC LIMIT ?"
|
|
params.append(limit)
|
|
|
|
cursor.execute(query, params)
|
|
return [dict(row) for row in cursor.fetchall()]
|
|
|
|
def get_device_rssi_history(self, device_id: str,
|
|
since: Optional[str] = None,
|
|
limit: int = 1000) -> list[RSSIObservation]:
|
|
"""Get RSSI history for a device"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
query = "SELECT timestamp, rssi, distance_m, floor FROM rssi_history WHERE device_id = ?"
|
|
params = [device_id]
|
|
|
|
if since:
|
|
query += " AND timestamp >= ?"
|
|
params.append(since)
|
|
|
|
query += " ORDER BY timestamp DESC LIMIT ?"
|
|
params.append(limit)
|
|
|
|
cursor.execute(query, params)
|
|
return [RSSIObservation(
|
|
timestamp=row['timestamp'],
|
|
rssi=row['rssi'],
|
|
distance_m=row['distance_m'],
|
|
floor=row['floor']
|
|
) for row in cursor.fetchall()]
|
|
|
|
def get_device_stats(self, device_id: str) -> Optional[DeviceStats]:
|
|
"""Get computed statistics for a device"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
# Get device info
|
|
cursor.execute("SELECT * FROM devices WHERE device_id = ?", (device_id,))
|
|
device = cursor.fetchone()
|
|
if not device:
|
|
return None
|
|
|
|
# Compute stats from RSSI history
|
|
cursor.execute("""
|
|
SELECT
|
|
AVG(rssi) as avg_rssi,
|
|
MIN(rssi) as min_rssi,
|
|
MAX(rssi) as max_rssi,
|
|
AVG(distance_m) as avg_distance_m,
|
|
MIN(distance_m) as min_distance_m,
|
|
MAX(distance_m) as max_distance_m
|
|
FROM rssi_history WHERE device_id = ?
|
|
""", (device_id,))
|
|
stats = cursor.fetchone()
|
|
|
|
return DeviceStats(
|
|
device_id=device_id,
|
|
device_type=device['device_type'],
|
|
name=device['custom_label'] or device['name'] or device['ssid'] or device_id,
|
|
manufacturer=device['manufacturer'] or '',
|
|
first_seen=device['first_seen'],
|
|
last_seen=device['last_seen'],
|
|
total_observations=device['total_observations'],
|
|
avg_rssi=round(stats['avg_rssi'], 1) if stats['avg_rssi'] else 0,
|
|
min_rssi=stats['min_rssi'] or 0,
|
|
max_rssi=stats['max_rssi'] or 0,
|
|
avg_distance_m=round(stats['avg_distance_m'], 2) if stats['avg_distance_m'] else 0,
|
|
min_distance_m=round(stats['min_distance_m'], 2) if stats['min_distance_m'] else 0,
|
|
max_distance_m=round(stats['max_distance_m'], 2) if stats['max_distance_m'] else 0
|
|
)
|
|
|
|
def get_device_multi_scanner_rssi(self, device_id: str, seconds: int = 60) -> list[dict]:
|
|
"""Get recent RSSI readings per scanner for a device.
|
|
|
|
Args:
|
|
device_id: The device MAC address
|
|
seconds: Time window in seconds (default 60)
|
|
|
|
Returns:
|
|
List of dicts with scanner_id, avg_rssi, sample_count, last_seen
|
|
"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
query = """
|
|
SELECT scanner_id,
|
|
AVG(rssi) as avg_rssi,
|
|
COUNT(*) as sample_count,
|
|
MAX(timestamp) as last_seen
|
|
FROM rssi_history
|
|
WHERE device_id = ?
|
|
AND scanner_id IS NOT NULL
|
|
AND timestamp >= datetime('now', '-' || ? || ' seconds')
|
|
GROUP BY scanner_id
|
|
"""
|
|
cursor.execute(query, (device_id, seconds))
|
|
return [dict(r) for r in cursor.fetchall()]
|
|
|
|
def get_devices_seen_by_multiple_scanners(self, seconds: int = 60) -> list[str]:
|
|
"""Get device IDs seen by 2+ scanners recently.
|
|
|
|
Args:
|
|
seconds: Time window in seconds (default 60)
|
|
|
|
Returns:
|
|
List of device IDs seen by multiple scanners
|
|
"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
query = """
|
|
SELECT device_id
|
|
FROM rssi_history
|
|
WHERE scanner_id IS NOT NULL
|
|
AND timestamp >= datetime('now', '-' || ? || ' seconds')
|
|
GROUP BY device_id
|
|
HAVING COUNT(DISTINCT scanner_id) >= 2
|
|
"""
|
|
cursor.execute(query, (seconds,))
|
|
return [r['device_id'] for r in cursor.fetchall()]
|
|
|
|
def get_movement_events(self, device_id: Optional[str] = None,
|
|
since: Optional[str] = None,
|
|
limit: int = 100) -> list[dict]:
|
|
"""Get movement events"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
query = "SELECT * FROM movement_events WHERE 1=1"
|
|
params = []
|
|
|
|
if device_id:
|
|
query += " AND device_id = ?"
|
|
params.append(device_id)
|
|
|
|
if since:
|
|
query += " AND timestamp >= ?"
|
|
params.append(since)
|
|
|
|
query += " ORDER BY timestamp DESC LIMIT ?"
|
|
params.append(limit)
|
|
|
|
cursor.execute(query, params)
|
|
return [dict(row) for row in cursor.fetchall()]
|
|
|
|
def get_alerts(self, acknowledged: Optional[bool] = None,
|
|
alert_type: Optional[str] = None,
|
|
limit: int = 50) -> list[dict]:
|
|
"""Get alerts"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
query = "SELECT * FROM alerts WHERE 1=1"
|
|
params = []
|
|
|
|
if acknowledged is not None:
|
|
query += " AND acknowledged = ?"
|
|
params.append(1 if acknowledged else 0)
|
|
|
|
if alert_type:
|
|
query += " AND alert_type = ?"
|
|
params.append(alert_type)
|
|
|
|
query += " ORDER BY timestamp DESC LIMIT ?"
|
|
params.append(limit)
|
|
|
|
cursor.execute(query, params)
|
|
return [dict(row) for row in cursor.fetchall()]
|
|
|
|
def acknowledge_alert(self, alert_id: int):
|
|
"""Mark an alert as acknowledged"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
cursor.execute("UPDATE alerts SET acknowledged = 1 WHERE id = ?", (alert_id,))
|
|
conn.commit()
|
|
|
|
def set_device_label(self, device_id: str, label: str):
|
|
"""Set a custom label for a device"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
cursor.execute("""
|
|
UPDATE devices SET custom_label = ?, updated_at = CURRENT_TIMESTAMP
|
|
WHERE device_id = ?
|
|
""", (label, device_id))
|
|
conn.commit()
|
|
|
|
def set_device_favorite(self, device_id: str, is_favorite: bool):
|
|
"""Mark a device as favorite"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
cursor.execute("""
|
|
UPDATE devices SET is_favorite = ?, updated_at = CURRENT_TIMESTAMP
|
|
WHERE device_id = ?
|
|
""", (1 if is_favorite else 0, device_id))
|
|
conn.commit()
|
|
|
|
def set_device_floor(self, device_id: str, floor: Optional[int]):
|
|
"""Set assigned floor for a device"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
cursor.execute("""
|
|
UPDATE devices SET assigned_floor = ?, updated_at = CURRENT_TIMESTAMP
|
|
WHERE device_id = ?
|
|
""", (floor, device_id))
|
|
conn.commit()
|
|
|
|
def get_device_floor(self, device_id: str) -> Optional[int]:
|
|
"""Get assigned floor for a device"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
cursor.execute("SELECT assigned_floor FROM devices WHERE device_id = ?", (device_id,))
|
|
row = cursor.fetchone()
|
|
return row['assigned_floor'] if row else None
|
|
|
|
def get_all_device_floors(self) -> dict:
|
|
"""Get all device floor assignments as a dict"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
cursor.execute("SELECT device_id, assigned_floor FROM devices WHERE assigned_floor IS NOT NULL")
|
|
return {row['device_id']: row['assigned_floor'] for row in cursor.fetchall()}
|
|
|
|
def get_all_device_sources(self) -> dict:
|
|
"""Get all device source scanner info as a dict.
|
|
|
|
Returns:
|
|
Dict mapping device_id to {scanner_id, lat, lon} or None if local
|
|
"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
cursor.execute("""
|
|
SELECT device_id, source_scanner_id, source_scanner_lat, source_scanner_lon
|
|
FROM devices
|
|
WHERE source_scanner_id IS NOT NULL
|
|
""")
|
|
return {
|
|
row['device_id']: {
|
|
'scanner_id': row['source_scanner_id'],
|
|
'lat': row['source_scanner_lat'],
|
|
'lon': row['source_scanner_lon']
|
|
}
|
|
for row in cursor.fetchall()
|
|
}
|
|
|
|
def set_device_source(self, device_id: str, scanner_id: str,
|
|
scanner_lat: float, scanner_lon: float):
|
|
"""Set source scanner info for a device (where it was detected/positioned)."""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
cursor.execute("""
|
|
UPDATE devices
|
|
SET source_scanner_id = ?, source_scanner_lat = ?, source_scanner_lon = ?
|
|
WHERE device_id = ?
|
|
""", (scanner_id, scanner_lat, scanner_lon, device_id))
|
|
conn.commit()
|
|
|
|
def set_device_position(self, device_id: str, lat_offset: float, lon_offset: float):
|
|
"""Set custom position offset for a device (relative to scanner position)"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
cursor.execute("""
|
|
UPDATE devices SET custom_lat_offset = ?, custom_lon_offset = ?, updated_at = CURRENT_TIMESTAMP
|
|
WHERE device_id = ?
|
|
""", (lat_offset, lon_offset, device_id))
|
|
conn.commit()
|
|
|
|
def get_device_position(self, device_id: str) -> tuple | None:
|
|
"""Get custom position offset for a device, returns (lat_offset, lon_offset) or None"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
cursor.execute(
|
|
"SELECT custom_lat_offset, custom_lon_offset FROM devices WHERE device_id = ?",
|
|
(device_id,)
|
|
)
|
|
row = cursor.fetchone()
|
|
if row and row['custom_lat_offset'] is not None and row['custom_lon_offset'] is not None:
|
|
return (row['custom_lat_offset'], row['custom_lon_offset'])
|
|
return None
|
|
|
|
def get_all_device_positions(self) -> dict:
|
|
"""Get all device position offsets as a dict: {device_id: {lat_offset, lon_offset}}"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
cursor.execute("""
|
|
SELECT device_id, custom_lat_offset, custom_lon_offset
|
|
FROM devices
|
|
WHERE custom_lat_offset IS NOT NULL AND custom_lon_offset IS NOT NULL
|
|
""")
|
|
return {
|
|
row['device_id']: {
|
|
'lat_offset': row['custom_lat_offset'],
|
|
'lon_offset': row['custom_lon_offset']
|
|
}
|
|
for row in cursor.fetchall()
|
|
}
|
|
|
|
def clear_device_position(self, device_id: str):
|
|
"""Clear custom position for a device (reset to RSSI-based)"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
cursor.execute("""
|
|
UPDATE devices SET custom_lat_offset = NULL, custom_lon_offset = NULL, updated_at = CURRENT_TIMESTAMP
|
|
WHERE device_id = ?
|
|
""", (device_id,))
|
|
conn.commit()
|
|
|
|
def get_recently_departed(self, timeout_minutes: int) -> list[dict]:
|
|
"""Get devices not seen within timeout that haven't been notified.
|
|
|
|
Args:
|
|
timeout_minutes: Minutes since last_seen to consider departed
|
|
|
|
Returns:
|
|
List of device dicts that have departed but not yet notified
|
|
"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
cutoff = (datetime.now() - timedelta(minutes=timeout_minutes)).isoformat()
|
|
cursor.execute("""
|
|
SELECT device_id, device_type, name, ssid, manufacturer, last_seen
|
|
FROM devices
|
|
WHERE last_seen < ? AND (departure_notified = 0 OR departure_notified IS NULL)
|
|
""", (cutoff,))
|
|
|
|
return [dict(row) for row in cursor.fetchall()]
|
|
|
|
def mark_departure_notified(self, device_id: str):
|
|
"""Mark device as notified about departure."""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
cursor.execute("""
|
|
UPDATE devices SET departure_notified = 1, updated_at = CURRENT_TIMESTAMP
|
|
WHERE device_id = ?
|
|
""", (device_id,))
|
|
conn.commit()
|
|
|
|
def reset_departure_notified(self, device_id: str):
|
|
"""Reset departure notification flag when device returns."""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
cursor.execute("""
|
|
UPDATE devices SET departure_notified = 0, updated_at = CURRENT_TIMESTAMP
|
|
WHERE device_id = ?
|
|
""", (device_id,))
|
|
conn.commit()
|
|
|
|
def get_recent_activity(self, hours: int = 24) -> dict:
|
|
"""Get activity summary for the last N hours"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
since = (datetime.now() - timedelta(hours=hours)).isoformat()
|
|
|
|
# Count active devices
|
|
cursor.execute("""
|
|
SELECT device_type, COUNT(*) as count
|
|
FROM devices WHERE last_seen >= ?
|
|
GROUP BY device_type
|
|
""", (since,))
|
|
active_counts = {row['device_type']: row['count'] for row in cursor.fetchall()}
|
|
|
|
# Count observations
|
|
cursor.execute("""
|
|
SELECT COUNT(*) as count FROM rssi_history WHERE timestamp >= ?
|
|
""", (since,))
|
|
observation_count = cursor.fetchone()['count']
|
|
|
|
# Count movement events
|
|
cursor.execute("""
|
|
SELECT COUNT(*) as count FROM movement_events WHERE timestamp >= ?
|
|
""", (since,))
|
|
movement_count = cursor.fetchone()['count']
|
|
|
|
# Count new devices
|
|
cursor.execute("""
|
|
SELECT COUNT(*) as count FROM devices WHERE first_seen >= ?
|
|
""", (since,))
|
|
new_device_count = cursor.fetchone()['count']
|
|
|
|
# Count scans
|
|
cursor.execute("""
|
|
SELECT COUNT(*) as count FROM scans WHERE timestamp >= ?
|
|
""", (since,))
|
|
scan_count = cursor.fetchone()['count']
|
|
|
|
return {
|
|
"period_hours": hours,
|
|
"since": since,
|
|
"active_wifi_devices": active_counts.get('wifi', 0),
|
|
"active_bt_devices": active_counts.get('bluetooth', 0),
|
|
"total_observations": observation_count,
|
|
"movement_events": movement_count,
|
|
"new_devices": new_device_count,
|
|
"scan_count": scan_count
|
|
}
|
|
|
|
def get_device_activity_pattern(self, device_id: str, days: int = 7) -> dict:
|
|
"""Get hourly activity pattern for a device over the last N days"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
since = (datetime.now() - timedelta(days=days)).isoformat()
|
|
|
|
# Count observations per hour of day
|
|
cursor.execute("""
|
|
SELECT
|
|
CAST(strftime('%H', timestamp) AS INTEGER) as hour,
|
|
COUNT(*) as count,
|
|
AVG(rssi) as avg_rssi
|
|
FROM rssi_history
|
|
WHERE device_id = ? AND timestamp >= ?
|
|
GROUP BY hour
|
|
ORDER BY hour
|
|
""", (device_id, since))
|
|
|
|
hourly = {row['hour']: {'count': row['count'], 'avg_rssi': round(row['avg_rssi'], 1)}
|
|
for row in cursor.fetchall()}
|
|
|
|
# Count observations per day of week (0=Monday, 6=Sunday)
|
|
cursor.execute("""
|
|
SELECT
|
|
CAST(strftime('%w', timestamp) AS INTEGER) as dow,
|
|
COUNT(*) as count
|
|
FROM rssi_history
|
|
WHERE device_id = ? AND timestamp >= ?
|
|
GROUP BY dow
|
|
ORDER BY dow
|
|
""", (device_id, since))
|
|
|
|
daily = {row['dow']: row['count'] for row in cursor.fetchall()}
|
|
|
|
return {
|
|
"device_id": device_id,
|
|
"period_days": days,
|
|
"hourly_pattern": hourly,
|
|
"daily_pattern": daily
|
|
}
|
|
|
|
def cleanup_old_data(self, retention_days: int = 30):
|
|
"""Remove data older than retention period"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
cutoff = (datetime.now() - timedelta(days=retention_days)).isoformat()
|
|
|
|
# Delete old RSSI history (keep summary in devices table)
|
|
cursor.execute("DELETE FROM rssi_history WHERE timestamp < ?", (cutoff,))
|
|
|
|
# Delete old movement events
|
|
cursor.execute("DELETE FROM movement_events WHERE timestamp < ?", (cutoff,))
|
|
|
|
# Delete old acknowledged alerts
|
|
cursor.execute("DELETE FROM alerts WHERE timestamp < ? AND acknowledged = 1", (cutoff,))
|
|
|
|
# Delete old scans
|
|
cursor.execute("DELETE FROM scans WHERE timestamp < ?", (cutoff,))
|
|
|
|
conn.commit()
|
|
|
|
return {
|
|
"retention_days": retention_days,
|
|
"cutoff": cutoff,
|
|
"cleaned_at": datetime.now().isoformat()
|
|
}
|
|
|
|
def get_database_stats(self) -> dict:
|
|
"""Get database statistics"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
cursor.execute("SELECT COUNT(*) as count FROM devices")
|
|
device_count = cursor.fetchone()['count']
|
|
|
|
cursor.execute("SELECT COUNT(*) as count FROM rssi_history")
|
|
observation_count = cursor.fetchone()['count']
|
|
|
|
cursor.execute("SELECT COUNT(*) as count FROM scans")
|
|
scan_count = cursor.fetchone()['count']
|
|
|
|
cursor.execute("SELECT COUNT(*) as count FROM movement_events")
|
|
movement_count = cursor.fetchone()['count']
|
|
|
|
cursor.execute("SELECT COUNT(*) as count FROM alerts WHERE acknowledged = 0")
|
|
unread_alerts = cursor.fetchone()['count']
|
|
|
|
# Get database file size
|
|
db_size = self.db_path.stat().st_size if self.db_path.exists() else 0
|
|
|
|
return {
|
|
"total_devices": device_count,
|
|
"total_observations": observation_count,
|
|
"total_scans": scan_count,
|
|
"total_movement_events": movement_count,
|
|
"unread_alerts": unread_alerts,
|
|
"database_size_bytes": db_size,
|
|
"database_size_mb": round(db_size / 1024 / 1024, 2)
|
|
}
|
|
|
|
# ==================== Peer Sync Methods ====================
|
|
|
|
def register_peer(self, scanner_id: str, name: str, url: str,
|
|
floor: Optional[int] = None, latitude: Optional[float] = None,
|
|
longitude: Optional[float] = None, bt_mac: Optional[str] = None) -> bool:
|
|
"""Register a peer scanner.
|
|
|
|
Args:
|
|
scanner_id: Unique identifier for the peer scanner
|
|
name: Human-readable name
|
|
url: Base URL of the peer (e.g., http://192.168.129.9:5000)
|
|
floor: Floor where peer scanner is located
|
|
latitude: GPS latitude of peer
|
|
longitude: GPS longitude of peer
|
|
bt_mac: Bluetooth MAC address of the scanner (for filtering from device lists)
|
|
|
|
Returns:
|
|
True if newly registered, False if updated existing
|
|
"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
timestamp = datetime.now().isoformat()
|
|
|
|
# Check if peer already exists
|
|
cursor.execute("SELECT scanner_id FROM peers WHERE scanner_id = ?", (scanner_id,))
|
|
exists = cursor.fetchone() is not None
|
|
|
|
cursor.execute("""
|
|
INSERT INTO peers (scanner_id, name, url, floor, latitude, longitude, bt_mac, last_seen, registered_at)
|
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
ON CONFLICT(scanner_id) DO UPDATE SET
|
|
name = excluded.name,
|
|
url = excluded.url,
|
|
floor = excluded.floor,
|
|
latitude = excluded.latitude,
|
|
longitude = excluded.longitude,
|
|
bt_mac = COALESCE(excluded.bt_mac, peers.bt_mac),
|
|
last_seen = excluded.last_seen
|
|
""", (scanner_id, name, url, floor, latitude, longitude, bt_mac, timestamp, timestamp))
|
|
|
|
conn.commit()
|
|
return not exists
|
|
|
|
def get_peers(self) -> list[dict]:
|
|
"""Get all registered peers.
|
|
|
|
Returns:
|
|
List of peer dictionaries with scanner_id, name, url, floor, etc.
|
|
"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
cursor.execute("SELECT * FROM peers ORDER BY registered_at")
|
|
return [dict(row) for row in cursor.fetchall()]
|
|
|
|
def get_peer(self, scanner_id: str) -> Optional[dict]:
|
|
"""Get a specific peer by scanner_id."""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
cursor.execute("SELECT * FROM peers WHERE scanner_id = ?", (scanner_id,))
|
|
row = cursor.fetchone()
|
|
return dict(row) if row else None
|
|
|
|
def remove_peer(self, scanner_id: str) -> bool:
|
|
"""Remove a peer scanner.
|
|
|
|
Returns:
|
|
True if peer was removed, False if not found
|
|
"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
cursor.execute("DELETE FROM peers WHERE scanner_id = ?", (scanner_id,))
|
|
conn.commit()
|
|
return cursor.rowcount > 0
|
|
|
|
def update_peer_last_seen(self, scanner_id: str):
|
|
"""Update the last_seen timestamp for a peer."""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
timestamp = datetime.now().isoformat()
|
|
|
|
cursor.execute(
|
|
"UPDATE peers SET last_seen = ? WHERE scanner_id = ?",
|
|
(timestamp, scanner_id)
|
|
)
|
|
conn.commit()
|
|
|
|
def get_devices_since(self, since: Optional[str] = None) -> list[dict]:
|
|
"""Get devices updated since a given timestamp for sync.
|
|
|
|
Args:
|
|
since: ISO timestamp. If None, returns all devices with sync-relevant data.
|
|
|
|
Returns:
|
|
List of device dicts with sync-relevant fields.
|
|
Note: Position offsets are NOT synced as they are relative to each scanner's location.
|
|
Source scanner info IS synced so receiving scanners can calculate positions correctly.
|
|
"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
|
|
# Include source scanner info so receiving scanners know where device was detected
|
|
# Don't sync position offsets - they're relative to each scanner's location
|
|
query = """
|
|
SELECT device_id, device_type, name, ssid, manufacturer,
|
|
custom_label, assigned_floor, is_favorite, notes, updated_at,
|
|
source_scanner_id, source_scanner_lat, source_scanner_lon
|
|
FROM devices
|
|
WHERE (custom_label IS NOT NULL OR assigned_floor IS NOT NULL
|
|
OR is_favorite = 1 OR notes IS NOT NULL)
|
|
"""
|
|
params = []
|
|
|
|
if since:
|
|
query += " AND updated_at > ?"
|
|
params.append(since)
|
|
|
|
query += " ORDER BY updated_at"
|
|
|
|
cursor.execute(query, params)
|
|
return [dict(row) for row in cursor.fetchall()]
|
|
|
|
def bulk_update_devices(self, devices: list[dict], source_scanner: str,
|
|
source_scanner_lat: Optional[float] = None,
|
|
source_scanner_lon: Optional[float] = None) -> int:
|
|
"""Bulk update device metadata from peer sync.
|
|
|
|
Uses timestamp-based conflict resolution: newer wins.
|
|
Only updates non-null fields from peer.
|
|
Preserves source scanner info so device positions are calculated relative to
|
|
the scanner that originally detected them.
|
|
|
|
Args:
|
|
devices: List of device dicts from peer
|
|
source_scanner: Scanner ID that sent the update
|
|
source_scanner_lat: Latitude of source scanner (for position calculation)
|
|
source_scanner_lon: Longitude of source scanner (for position calculation)
|
|
|
|
Returns:
|
|
Number of devices updated
|
|
"""
|
|
conn = self._get_connection()
|
|
cursor = conn.cursor()
|
|
updated_count = 0
|
|
|
|
for dev in devices:
|
|
device_id = dev.get("device_id")
|
|
if not device_id:
|
|
continue
|
|
|
|
# Get existing device
|
|
cursor.execute(
|
|
"SELECT updated_at, source_scanner_id FROM devices WHERE device_id = ?",
|
|
(device_id,)
|
|
)
|
|
existing = cursor.fetchone()
|
|
|
|
# Determine source scanner info for this device
|
|
# Use device's original source if present, otherwise use the peer sending the data
|
|
dev_source_id = dev.get("source_scanner_id") or source_scanner
|
|
dev_source_lat = dev.get("source_scanner_lat") or source_scanner_lat
|
|
dev_source_lon = dev.get("source_scanner_lon") or source_scanner_lon
|
|
|
|
if existing:
|
|
# Check timestamp - skip if local is newer
|
|
local_updated = existing["updated_at"] or ""
|
|
peer_updated = dev.get("updated_at", "")
|
|
if local_updated > peer_updated:
|
|
continue # Local is newer, skip
|
|
|
|
# Merge non-null fields from peer
|
|
updates = []
|
|
params = []
|
|
|
|
if dev.get("custom_label") is not None:
|
|
updates.append("custom_label = ?")
|
|
params.append(dev["custom_label"])
|
|
|
|
if dev.get("assigned_floor") is not None:
|
|
updates.append("assigned_floor = ?")
|
|
params.append(dev["assigned_floor"])
|
|
|
|
# Note: position offsets are NOT synced - they're relative to each scanner
|
|
|
|
if dev.get("is_favorite") is not None:
|
|
updates.append("is_favorite = ?")
|
|
params.append(1 if dev["is_favorite"] else 0)
|
|
|
|
if dev.get("notes") is not None:
|
|
updates.append("notes = ?")
|
|
params.append(dev["notes"])
|
|
|
|
# Update source scanner info if not already set locally
|
|
# (preserve original source, don't overwrite with intermediate peer)
|
|
if not existing["source_scanner_id"] and dev_source_id:
|
|
updates.append("source_scanner_id = ?")
|
|
params.append(dev_source_id)
|
|
if dev_source_lat is not None:
|
|
updates.append("source_scanner_lat = ?")
|
|
params.append(dev_source_lat)
|
|
if dev_source_lon is not None:
|
|
updates.append("source_scanner_lon = ?")
|
|
params.append(dev_source_lon)
|
|
|
|
if updates:
|
|
# Keep the peer's updated_at to preserve timeline
|
|
updates.append("updated_at = ?")
|
|
params.append(peer_updated)
|
|
params.append(device_id)
|
|
|
|
cursor.execute(
|
|
f"UPDATE devices SET {', '.join(updates)} WHERE device_id = ?",
|
|
params
|
|
)
|
|
if cursor.rowcount > 0:
|
|
updated_count += 1
|
|
else:
|
|
# Device doesn't exist locally - create it if it has useful metadata
|
|
if dev.get("assigned_floor") is not None or dev.get("custom_label") or dev.get("is_favorite"):
|
|
device_type = dev.get("device_type", "bluetooth")
|
|
name = dev.get("name") or dev.get("ssid") or device_id
|
|
now = datetime.now().isoformat()
|
|
|
|
cursor.execute("""
|
|
INSERT INTO devices (device_id, device_type, name, ssid, manufacturer,
|
|
custom_label, assigned_floor, is_favorite, notes,
|
|
first_seen, last_seen, total_observations, updated_at,
|
|
source_scanner_id, source_scanner_lat, source_scanner_lon)
|
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 0, ?, ?, ?, ?)
|
|
""", (
|
|
device_id, device_type, name, dev.get("ssid"), dev.get("manufacturer"),
|
|
dev.get("custom_label"), dev.get("assigned_floor"),
|
|
1 if dev.get("is_favorite") else 0, dev.get("notes"),
|
|
now, now, dev.get("updated_at", now),
|
|
dev_source_id, dev_source_lat, dev_source_lon
|
|
))
|
|
updated_count += 1
|
|
|
|
conn.commit()
|
|
return updated_count
|
|
|
|
def close(self):
|
|
"""Close database connection"""
|
|
if hasattr(self._local, 'conn') and self._local.conn:
|
|
self._local.conn.close()
|
|
self._local.conn = None
|
|
|
|
|
|
# Global database instance
|
|
_db: DeviceDatabase | None = None
|
|
|
|
|
|
def get_database(db_path: Path | str | None = None) -> DeviceDatabase:
|
|
"""Get the global database instance"""
|
|
global _db
|
|
if _db is None:
|
|
if db_path is None:
|
|
db_path = Path.home() / "git" / "rf-mapper" / "data" / "devices.db"
|
|
_db = DeviceDatabase(db_path)
|
|
return _db
|
|
|
|
|
|
def init_database(db_path: Path | str) -> DeviceDatabase:
|
|
"""Initialize the global database instance"""
|
|
global _db
|
|
_db = DeviceDatabase(db_path)
|
|
return _db
|