- Use type: ignore for cbor2/json Any returns in serialization/deadman - Fix callable→Callable in killswitch.py and usb_monitor.py - Add Ed25519PrivateKey assertion in CLI chain-wrap path - Allow None for RotationResult fingerprints - Annotate channel key as str in manager.py Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
1073 lines
36 KiB
Python
1073 lines
36 KiB
Python
"""
|
|
SooSeF unified CLI.
|
|
|
|
Wraps Stegasoo and Verisoo CLIs as sub-command groups,
|
|
plus native SooSeF commands for init, fieldkit, keys, and serve.
|
|
"""
|
|
|
|
from __future__ import annotations
|
|
|
|
import logging
|
|
import threading
|
|
import time
|
|
from ipaddress import IPv4Address
|
|
from pathlib import Path
|
|
|
|
import click
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
@click.group()
|
|
@click.option(
|
|
"--data-dir",
|
|
envvar="SOOSEF_DATA_DIR",
|
|
type=click.Path(path_type=Path),
|
|
help="Override data directory (default: ~/.soosef)",
|
|
)
|
|
@click.option("--json", "json_output", is_flag=True, help="JSON output mode")
|
|
@click.version_option(package_name="soosef")
|
|
@click.pass_context
|
|
def main(ctx, data_dir, json_output):
|
|
"""SooSeF — Soo Security Fieldkit"""
|
|
ctx.ensure_object(dict)
|
|
ctx.obj["json"] = json_output
|
|
|
|
if data_dir:
|
|
import soosef.paths as paths
|
|
|
|
paths.BASE_DIR = data_dir
|
|
|
|
|
|
# ── Init ────────────────────────────────────────────────────────────
|
|
|
|
|
|
@main.command()
|
|
@click.option("--no-identity", is_flag=True, help="Skip Ed25519 identity generation")
|
|
@click.option("--no-channel", is_flag=True, help="Skip channel key generation")
|
|
@click.pass_context
|
|
def init(ctx, no_identity, no_channel):
|
|
"""Initialize a new SooSeF instance — generate keys and create directory structure."""
|
|
from soosef.config import SoosefConfig
|
|
from soosef.keystore.manager import KeystoreManager
|
|
from soosef.paths import ensure_dirs
|
|
|
|
click.echo("Initializing SooSeF...")
|
|
ensure_dirs()
|
|
click.echo(" Created directory structure")
|
|
|
|
config = SoosefConfig()
|
|
config.save()
|
|
click.echo(" Created default config")
|
|
|
|
ks = KeystoreManager()
|
|
|
|
if not no_identity:
|
|
if ks.has_identity():
|
|
click.echo(" Identity already exists — skipping")
|
|
else:
|
|
info = ks.generate_identity()
|
|
click.echo(f" Generated Ed25519 identity: {info.fingerprint[:16]}...")
|
|
|
|
if not no_channel:
|
|
if ks.has_channel_key():
|
|
click.echo(" Channel key already exists — skipping")
|
|
else:
|
|
key = ks.generate_channel_key()
|
|
click.echo(f" Generated channel key: {key[:8]}...")
|
|
|
|
click.echo("Done. Run 'soosef serve' to start the web UI.")
|
|
|
|
|
|
# ── Serve ───────────────────────────────────────────────────────────
|
|
|
|
|
|
@main.command()
|
|
@click.option("--host", default="127.0.0.1", help="Bind address")
|
|
@click.option("--port", default=5000, type=int, help="Port")
|
|
@click.option("--no-https", is_flag=True, help="Disable HTTPS")
|
|
@click.option("--debug", is_flag=True, help="Debug mode")
|
|
def serve(host, port, no_https, debug):
|
|
"""Start the SooSeF web UI."""
|
|
from soosef.config import SoosefConfig
|
|
|
|
config = SoosefConfig.load()
|
|
config.host = host
|
|
config.port = port
|
|
if no_https:
|
|
config.https_enabled = False
|
|
|
|
from frontends.web.app import create_app
|
|
|
|
app = create_app(config)
|
|
|
|
ssl_context = None
|
|
if config.https_enabled:
|
|
from soosef.paths import CERTS_DIR, SSL_CERT, SSL_KEY
|
|
|
|
CERTS_DIR.mkdir(parents=True, exist_ok=True)
|
|
if not SSL_CERT.exists():
|
|
click.echo("Generating self-signed SSL certificate...")
|
|
_generate_self_signed_cert(SSL_CERT, SSL_KEY)
|
|
ssl_context = (str(SSL_CERT), str(SSL_KEY))
|
|
|
|
# Start the dead man's switch enforcement background thread.
|
|
# The thread checks every 60 seconds and fires the killswitch if overdue.
|
|
# It is a daemon thread — it dies automatically when the Flask process exits.
|
|
# We always start it; the loop itself only acts when the switch is armed,
|
|
# so it is safe to run even when the switch has never been configured.
|
|
_start_deadman_thread(interval_seconds=60)
|
|
|
|
click.echo(f"Starting SooSeF on {'https' if ssl_context else 'http'}://{host}:{port}")
|
|
app.run(host=host, port=port, debug=debug, ssl_context=ssl_context)
|
|
|
|
|
|
def _deadman_enforcement_loop(interval_seconds: int = 60) -> None:
|
|
"""
|
|
Background enforcement loop for the dead man's switch.
|
|
|
|
Runs in a daemon thread started by ``serve``. Calls ``DeadmanSwitch.check()``
|
|
every *interval_seconds*. If the switch fires, ``check()`` calls
|
|
``execute_purge`` internally and the process will lose its key material;
|
|
the thread then exits because there is nothing left to guard.
|
|
|
|
The loop re-evaluates ``is_armed()`` on every tick so it activates
|
|
automatically if the switch is armed after the server starts.
|
|
"""
|
|
from soosef.fieldkit.deadman import DeadmanSwitch
|
|
|
|
dm = DeadmanSwitch()
|
|
logger.debug("Dead man's switch enforcement loop started (interval=%ds)", interval_seconds)
|
|
|
|
while True:
|
|
time.sleep(interval_seconds)
|
|
try:
|
|
if dm.is_armed():
|
|
fired = dm.should_fire()
|
|
dm.check()
|
|
if fired:
|
|
# Killswitch has been triggered; no point continuing.
|
|
logger.warning("Dead man's switch fired — enforcement loop exiting")
|
|
return
|
|
except Exception:
|
|
logger.exception("Dead man's switch enforcement loop encountered an error")
|
|
|
|
|
|
def _start_deadman_thread(interval_seconds: int = 60) -> threading.Thread | None:
|
|
"""
|
|
Start the dead man's switch enforcement daemon thread.
|
|
|
|
Returns the thread object, or None if the thread could not be started.
|
|
The thread is a daemon so it will not block process exit.
|
|
"""
|
|
t = threading.Thread(
|
|
target=_deadman_enforcement_loop,
|
|
args=(interval_seconds,),
|
|
name="deadman-enforcement",
|
|
daemon=True,
|
|
)
|
|
t.start()
|
|
logger.info("Dead man's switch enforcement thread started (interval=%ds)", interval_seconds)
|
|
return t
|
|
|
|
|
|
def _generate_self_signed_cert(cert_path: Path, key_path: Path) -> None:
|
|
"""Generate a self-signed certificate for development/local use."""
|
|
from datetime import UTC, datetime, timedelta
|
|
|
|
from cryptography import x509
|
|
from cryptography.hazmat.primitives import hashes, serialization
|
|
from cryptography.hazmat.primitives.asymmetric import rsa
|
|
from cryptography.x509.oid import NameOID
|
|
|
|
key = rsa.generate_private_key(public_exponent=65537, key_size=2048)
|
|
subject = issuer = x509.Name(
|
|
[
|
|
x509.NameAttribute(NameOID.COMMON_NAME, "SooSeF Local"),
|
|
]
|
|
)
|
|
cert = (
|
|
x509.CertificateBuilder()
|
|
.subject_name(subject)
|
|
.issuer_name(issuer)
|
|
.public_key(key.public_key())
|
|
.serial_number(x509.random_serial_number())
|
|
.not_valid_before(datetime.now(UTC))
|
|
.not_valid_after(datetime.now(UTC) + timedelta(days=365))
|
|
.add_extension(
|
|
x509.SubjectAlternativeName(
|
|
[
|
|
x509.DNSName("localhost"),
|
|
x509.IPAddress(IPv4Address("127.0.0.1")),
|
|
]
|
|
),
|
|
critical=False,
|
|
)
|
|
.sign(key, hashes.SHA256())
|
|
)
|
|
key_path.write_bytes(
|
|
key.private_bytes(
|
|
serialization.Encoding.PEM,
|
|
serialization.PrivateFormat.PKCS8,
|
|
serialization.NoEncryption(),
|
|
)
|
|
)
|
|
key_path.chmod(0o600)
|
|
cert_path.write_bytes(cert.public_bytes(serialization.Encoding.PEM))
|
|
|
|
|
|
# ── Stegasoo sub-commands ───────────────────────────────────────────
|
|
|
|
|
|
@main.group()
|
|
def stego():
|
|
"""Steganography operations (Stegasoo)."""
|
|
pass
|
|
|
|
|
|
try:
|
|
from stegasoo.cli import cli as stegasoo_cli
|
|
|
|
# Re-register stegasoo commands under the 'stego' group
|
|
for name, cmd in stegasoo_cli.commands.items():
|
|
stego.add_command(cmd, name)
|
|
except ImportError:
|
|
|
|
@stego.command()
|
|
def unavailable():
|
|
"""Stegasoo is not installed."""
|
|
click.echo("Error: stegasoo package not found. Install with: pip install stegasoo")
|
|
|
|
|
|
# ── Verisoo sub-commands ────────────────────────────────────────────
|
|
|
|
|
|
@main.group()
|
|
def attest():
|
|
"""Provenance attestation (Verisoo)."""
|
|
pass
|
|
|
|
|
|
try:
|
|
from verisoo.cli import main as verisoo_cli
|
|
|
|
for name, cmd in verisoo_cli.commands.items():
|
|
attest.add_command(cmd, name)
|
|
except ImportError:
|
|
|
|
@attest.command()
|
|
def unavailable():
|
|
"""Verisoo is not installed."""
|
|
click.echo("Error: verisoo package not found. Install with: pip install verisoo")
|
|
|
|
|
|
def _attest_file(
|
|
file_path: Path,
|
|
private_key,
|
|
storage,
|
|
caption: str | None,
|
|
auto_exif: bool = True,
|
|
) -> None:
|
|
"""Attest a single file and store the result.
|
|
|
|
Shared by ``attest batch``. Raises on failure so the caller can decide
|
|
whether to abort or continue.
|
|
|
|
Args:
|
|
file_path: Path to the image file to attest.
|
|
private_key: Ed25519 private key loaded via verisoo.crypto.
|
|
storage: verisoo LocalStorage instance.
|
|
caption: Optional caption to embed in metadata.
|
|
auto_exif: Whether to extract EXIF metadata from the image.
|
|
"""
|
|
import hashlib
|
|
|
|
from cryptography.hazmat.primitives.asymmetric.ed25519 import Ed25519PrivateKey
|
|
from cryptography.hazmat.primitives.serialization import (
|
|
Encoding,
|
|
PublicFormat,
|
|
load_pem_private_key,
|
|
)
|
|
from verisoo.attestation import create_attestation
|
|
from verisoo.models import Identity
|
|
|
|
from soosef.config import SoosefConfig
|
|
from soosef.federation.chain import ChainStore
|
|
from soosef.paths import CHAIN_DIR, IDENTITY_PRIVATE_KEY
|
|
|
|
image_data = file_path.read_bytes()
|
|
|
|
metadata: dict = {}
|
|
if caption:
|
|
metadata["caption"] = caption
|
|
|
|
attestation = create_attestation(
|
|
image_data=image_data,
|
|
private_key=private_key,
|
|
metadata=metadata if metadata else None,
|
|
auto_exif=auto_exif,
|
|
)
|
|
|
|
storage.append_record(attestation.record)
|
|
|
|
# Persist the local identity so verification can resolve the attestor name.
|
|
pub_bytes = private_key.public_key().public_bytes(Encoding.Raw, PublicFormat.Raw)
|
|
identity = Identity(
|
|
public_key=pub_bytes,
|
|
fingerprint=attestation.record.attestor_fingerprint,
|
|
metadata={"name": "SooSeF Local Identity"},
|
|
)
|
|
try:
|
|
storage.save_identity(identity)
|
|
except Exception:
|
|
pass # Already exists — safe to ignore.
|
|
|
|
# Wrap in the hash chain if enabled.
|
|
config = SoosefConfig.load()
|
|
if config.chain_enabled and config.chain_auto_wrap and IDENTITY_PRIVATE_KEY.exists():
|
|
record_bytes = (
|
|
attestation.record.to_bytes()
|
|
if hasattr(attestation.record, "to_bytes")
|
|
else str(attestation.record).encode()
|
|
)
|
|
content_hash = hashlib.sha256(record_bytes).digest()
|
|
|
|
priv_pem = IDENTITY_PRIVATE_KEY.read_bytes()
|
|
chain_key = load_pem_private_key(priv_pem, password=None)
|
|
assert isinstance(chain_key, Ed25519PrivateKey)
|
|
|
|
chain_metadata: dict = {}
|
|
if caption:
|
|
chain_metadata["caption"] = caption
|
|
|
|
ChainStore(CHAIN_DIR).append(
|
|
content_hash=content_hash,
|
|
content_type="verisoo/attestation-v1",
|
|
private_key=chain_key,
|
|
metadata=chain_metadata,
|
|
)
|
|
|
|
|
|
# ── Default extensions for batch attestation ──────────────────────────────────
|
|
|
|
_DEFAULT_EXTENSIONS: tuple[str, ...] = ("jpg", "jpeg", "png", "tiff", "tif", "webp")
|
|
|
|
|
|
@attest.command("batch")
|
|
@click.argument("directory", type=click.Path(exists=True, file_okay=False, path_type=Path))
|
|
@click.option("--caption", default=None, help="Shared caption to embed in every attestation.")
|
|
@click.option(
|
|
"--extensions",
|
|
default=",".join(_DEFAULT_EXTENSIONS),
|
|
show_default=True,
|
|
help="Comma-separated list of file extensions to include (without leading dot).",
|
|
)
|
|
@click.option(
|
|
"--no-exif",
|
|
is_flag=True,
|
|
help="Disable automatic EXIF extraction.",
|
|
)
|
|
def batch(directory: Path, caption: str | None, extensions: str, no_exif: bool) -> None:
|
|
"""Attest all matching images in DIRECTORY.
|
|
|
|
Iterates over every file whose extension matches --extensions, attests
|
|
each one, and prints a running progress line. Failures are noted and
|
|
reported in the final summary — the batch continues on individual errors.
|
|
|
|
Example:
|
|
|
|
soosef attest batch ./field-photos --caption "Kyiv, 2026-04-01"
|
|
|
|
soosef attest batch ./docs --extensions pdf,png --no-exif
|
|
"""
|
|
from verisoo.crypto import load_private_key
|
|
from verisoo.storage import LocalStorage
|
|
|
|
from soosef.paths import ATTESTATIONS_DIR, IDENTITY_PRIVATE_KEY
|
|
|
|
# Validate identity.
|
|
if not IDENTITY_PRIVATE_KEY.exists():
|
|
click.echo(
|
|
"Error: No identity configured. Run 'soosef init' first.",
|
|
err=True,
|
|
)
|
|
raise SystemExit(1)
|
|
|
|
private_key = load_private_key(IDENTITY_PRIVATE_KEY)
|
|
storage = LocalStorage(base_path=ATTESTATIONS_DIR)
|
|
auto_exif = not no_exif
|
|
|
|
# Collect matching files.
|
|
exts = {e.strip().lower().lstrip(".") for e in extensions.split(",") if e.strip()}
|
|
files: list[Path] = sorted(
|
|
f for f in directory.iterdir() if f.is_file() and f.suffix.lstrip(".").lower() in exts
|
|
)
|
|
|
|
if not files:
|
|
click.echo(
|
|
f"No matching files found in {directory} (extensions: {', '.join(sorted(exts))})"
|
|
)
|
|
return
|
|
|
|
total = len(files)
|
|
failures: list[tuple[str, str]] = []
|
|
|
|
for i, file_path in enumerate(files, start=1):
|
|
click.echo(f"Attesting {i}/{total}: {file_path.name} ... ", nl=False)
|
|
try:
|
|
_attest_file(
|
|
file_path=file_path,
|
|
private_key=private_key,
|
|
storage=storage,
|
|
caption=caption,
|
|
auto_exif=auto_exif,
|
|
)
|
|
click.echo("done")
|
|
except Exception as exc:
|
|
click.echo("FAILED")
|
|
logger.debug("Attestation failed for %s: %s", file_path.name, exc, exc_info=True)
|
|
failures.append((file_path.name, str(exc)))
|
|
|
|
# Summary.
|
|
succeeded = total - len(failures)
|
|
click.echo()
|
|
click.echo(f"{succeeded} file(s) attested, {len(failures)} failure(s).")
|
|
if failures:
|
|
click.echo("Failures:", err=True)
|
|
for name, reason in failures:
|
|
click.echo(f" {name}: {reason}", err=True)
|
|
raise SystemExit(1)
|
|
|
|
|
|
# ── Fieldkit sub-commands ───────────────────────────────────────────
|
|
|
|
|
|
@main.group()
|
|
def fieldkit():
|
|
"""Field security features."""
|
|
pass
|
|
|
|
|
|
@fieldkit.command()
|
|
def status():
|
|
"""Show fieldkit status."""
|
|
from soosef.fieldkit.deadman import DeadmanSwitch
|
|
from soosef.keystore.manager import KeystoreManager
|
|
|
|
ks = KeystoreManager()
|
|
ks_status = ks.status()
|
|
|
|
click.echo("=== SooSeF Fieldkit Status ===")
|
|
click.echo(
|
|
f"Identity: {'Active (' + ks_status.identity_fingerprint[:16] + '...)' if ks_status.has_identity else 'None'}"
|
|
)
|
|
click.echo(
|
|
f"Channel Key: {'Active (' + ks_status.channel_fingerprint[:16] + '...)' if ks_status.has_channel_key else 'None'}"
|
|
)
|
|
|
|
dm = DeadmanSwitch()
|
|
dm_status = dm.status()
|
|
click.echo(
|
|
f"Dead Man: {'Armed (overdue!)' if dm_status['overdue'] else 'Armed' if dm_status['armed'] else 'Disarmed'}"
|
|
)
|
|
|
|
|
|
@fieldkit.command()
|
|
@click.option("--confirm", required=True, help="Type CONFIRM-PURGE to confirm")
|
|
def purge(confirm):
|
|
"""Execute emergency purge — destroy all keys and data."""
|
|
if confirm != "CONFIRM-PURGE":
|
|
click.echo("Error: must pass --confirm CONFIRM-PURGE")
|
|
raise SystemExit(1)
|
|
|
|
from soosef.fieldkit.killswitch import PurgeScope, execute_purge
|
|
|
|
click.echo("EXECUTING EMERGENCY PURGE...")
|
|
result = execute_purge(PurgeScope.ALL, reason="cli")
|
|
click.echo(f"Completed: {len(result.steps_completed)} steps")
|
|
if result.steps_failed:
|
|
click.echo(f"Failed: {len(result.steps_failed)} steps")
|
|
for name, err in result.steps_failed:
|
|
click.echo(f" - {name}: {err}")
|
|
|
|
|
|
@fieldkit.command()
|
|
def checkin():
|
|
"""Record a dead man's switch check-in."""
|
|
from soosef.fieldkit.deadman import DeadmanSwitch
|
|
|
|
dm = DeadmanSwitch()
|
|
dm.checkin()
|
|
click.echo("Check-in recorded.")
|
|
|
|
|
|
@fieldkit.command("check-deadman")
|
|
def check_deadman():
|
|
"""Run the dead man's switch check — fires killswitch if overdue.
|
|
|
|
Safe to call from cron or systemd. Exits with status 0 if the switch
|
|
is disarmed or not yet overdue. Exits with status 2 if the switch fired
|
|
and the killswitch was triggered (so cron/systemd can alert on it).
|
|
Exits with status 1 on unexpected errors.
|
|
"""
|
|
from soosef.fieldkit.deadman import DeadmanSwitch
|
|
|
|
dm = DeadmanSwitch()
|
|
|
|
if not dm.is_armed():
|
|
click.echo("Dead man's switch is not armed — nothing to do.")
|
|
return
|
|
|
|
fired = dm.should_fire()
|
|
try:
|
|
dm.check()
|
|
except Exception as exc:
|
|
click.echo(f"Error running dead man's check: {exc}", err=True)
|
|
raise SystemExit(1)
|
|
|
|
if fired:
|
|
click.echo(
|
|
"DEAD MAN'S SWITCH EXPIRED — killswitch triggered.",
|
|
err=True,
|
|
)
|
|
raise SystemExit(2)
|
|
|
|
s = dm.status()
|
|
if s["overdue"]:
|
|
click.echo(
|
|
f"Dead man's switch is OVERDUE (last check-in: {s['last_checkin']}) "
|
|
f"— grace period in effect, will fire soon.",
|
|
err=True,
|
|
)
|
|
else:
|
|
click.echo(f"Dead man's switch OK. Next due: {s.get('next_due', 'unknown')}")
|
|
|
|
|
|
# ── Fieldkit: geofence sub-commands ─────────────────────────────
|
|
|
|
|
|
@fieldkit.group()
|
|
def geofence():
|
|
"""Geofence configuration and checks."""
|
|
pass
|
|
|
|
|
|
@geofence.command("set")
|
|
@click.option("--lat", required=True, type=float, help="Fence center latitude")
|
|
@click.option("--lon", required=True, type=float, help="Fence center longitude")
|
|
@click.option("--radius", required=True, type=float, help="Fence radius in meters")
|
|
@click.option("--name", default="default", show_default=True, help="Human-readable fence name")
|
|
def geofence_set(lat, lon, radius, name):
|
|
"""Set the geofence — saves center and radius to ~/.soosef/fieldkit/geofence.json."""
|
|
from soosef.fieldkit.geofence import GeoCircle, save_fence
|
|
|
|
if radius <= 0:
|
|
click.echo("Error: --radius must be a positive number of meters.", err=True)
|
|
raise SystemExit(1)
|
|
if not (-90.0 <= lat <= 90.0):
|
|
click.echo("Error: --lat must be between -90 and 90.", err=True)
|
|
raise SystemExit(1)
|
|
if not (-180.0 <= lon <= 180.0):
|
|
click.echo("Error: --lon must be between -180 and 180.", err=True)
|
|
raise SystemExit(1)
|
|
|
|
fence = GeoCircle(lat=lat, lon=lon, radius_m=radius, name=name)
|
|
save_fence(fence)
|
|
click.echo(f"Geofence '{name}' set: center ({lat}, {lon}), radius {radius} m")
|
|
|
|
|
|
@geofence.command("check")
|
|
@click.option("--lat", required=True, type=float, help="Current latitude to check")
|
|
@click.option("--lon", required=True, type=float, help="Current longitude to check")
|
|
def geofence_check(lat, lon):
|
|
"""Check whether a point is inside the configured geofence.
|
|
|
|
Exit codes: 0 = inside fence, 1 = outside fence, 2 = no fence configured.
|
|
"""
|
|
from soosef.fieldkit.geofence import haversine_distance, is_inside, load_fence
|
|
|
|
fence = load_fence()
|
|
if fence is None:
|
|
click.echo("No geofence configured. Run 'soosef fieldkit geofence set' first.", err=True)
|
|
raise SystemExit(2)
|
|
|
|
inside = is_inside(fence, lat, lon)
|
|
distance = haversine_distance(fence.lat, fence.lon, lat, lon)
|
|
status = "INSIDE" if inside else "OUTSIDE"
|
|
click.echo(
|
|
f"{status} fence '{fence.name}' "
|
|
f"(distance: {distance:.1f} m, radius: {fence.radius_m} m)"
|
|
)
|
|
raise SystemExit(0 if inside else 1)
|
|
|
|
|
|
@geofence.command("clear")
|
|
def geofence_clear():
|
|
"""Remove the geofence configuration."""
|
|
from soosef.fieldkit.geofence import clear_fence
|
|
|
|
removed = clear_fence()
|
|
if removed:
|
|
click.echo("Geofence cleared.")
|
|
else:
|
|
click.echo("No geofence was configured.")
|
|
|
|
|
|
# ── Fieldkit: USB sub-commands ────────────────────────────────────
|
|
|
|
|
|
@fieldkit.group()
|
|
def usb():
|
|
"""USB device whitelist management."""
|
|
pass
|
|
|
|
|
|
def _enumerate_usb_devices() -> list[dict[str, str]]:
|
|
"""Return a list of currently connected USB devices.
|
|
|
|
Each dict has keys: device_id (vid:pid), vendor, model.
|
|
Requires pyudev (Linux only).
|
|
"""
|
|
try:
|
|
import pyudev
|
|
except ImportError:
|
|
raise RuntimeError("pyudev not available — USB commands require Linux + pyudev")
|
|
|
|
context = pyudev.Context()
|
|
devices = []
|
|
seen: set[str] = set()
|
|
for device in context.list_devices(subsystem="usb"):
|
|
vid = device.get("ID_VENDOR_ID", "")
|
|
pid = device.get("ID_MODEL_ID", "")
|
|
if not vid or not pid:
|
|
continue
|
|
device_id = f"{vid}:{pid}"
|
|
if device_id in seen:
|
|
continue
|
|
seen.add(device_id)
|
|
devices.append(
|
|
{
|
|
"device_id": device_id,
|
|
"vendor": device.get("ID_VENDOR", "unknown"),
|
|
"model": device.get("ID_MODEL", "unknown"),
|
|
}
|
|
)
|
|
return devices
|
|
|
|
|
|
@usb.command("snapshot")
|
|
def usb_snapshot():
|
|
"""Save currently connected USB devices as the whitelist.
|
|
|
|
Overwrites ~/.soosef/fieldkit/usb/whitelist.json with all USB devices
|
|
currently visible on the system. Run this once on a known-good machine.
|
|
"""
|
|
from soosef.fieldkit.usb_monitor import save_whitelist
|
|
|
|
try:
|
|
devices = _enumerate_usb_devices()
|
|
except RuntimeError as exc:
|
|
click.echo(f"Error: {exc}", err=True)
|
|
raise SystemExit(1)
|
|
|
|
device_ids = {d["device_id"] for d in devices}
|
|
save_whitelist(device_ids)
|
|
|
|
click.echo(f"Saved {len(device_ids)} device(s) to USB whitelist:")
|
|
for d in sorted(devices, key=lambda x: x["device_id"]):
|
|
click.echo(f" {d['device_id']} {d['vendor']} {d['model']}")
|
|
|
|
|
|
@usb.command("check")
|
|
def usb_check():
|
|
"""Compare connected USB devices against the whitelist.
|
|
|
|
Exit codes: 0 = all devices known, 1 = unknown device(s) detected,
|
|
2 = no whitelist configured (run 'soosef fieldkit usb snapshot' first).
|
|
"""
|
|
from soosef.fieldkit.usb_monitor import load_whitelist
|
|
|
|
whitelist = load_whitelist()
|
|
if not whitelist:
|
|
from soosef.paths import USB_WHITELIST
|
|
|
|
if not USB_WHITELIST.exists():
|
|
click.echo(
|
|
"No USB whitelist found. Run 'soosef fieldkit usb snapshot' first.", err=True
|
|
)
|
|
raise SystemExit(2)
|
|
|
|
try:
|
|
devices = _enumerate_usb_devices()
|
|
except RuntimeError as exc:
|
|
click.echo(f"Error: {exc}", err=True)
|
|
raise SystemExit(1)
|
|
|
|
unknown = [d for d in devices if d["device_id"] not in whitelist]
|
|
|
|
if not unknown:
|
|
click.echo(f"All {len(devices)} connected device(s) are whitelisted.")
|
|
raise SystemExit(0)
|
|
|
|
click.echo(f"WARNING: {len(unknown)} unknown device(s) detected:", err=True)
|
|
for d in unknown:
|
|
click.echo(f" {d['device_id']} {d['vendor']} {d['model']}", err=True)
|
|
raise SystemExit(1)
|
|
|
|
|
|
# ── Keys sub-commands ───────────────────────────────────────────────
|
|
|
|
|
|
@main.group()
|
|
def keys():
|
|
"""Key management."""
|
|
pass
|
|
|
|
|
|
@keys.command()
|
|
def show():
|
|
"""Show all key status."""
|
|
from soosef.keystore.manager import KeystoreManager
|
|
|
|
ks = KeystoreManager()
|
|
s = ks.status()
|
|
click.echo(f"Identity: {s.identity_fingerprint or 'Not configured'}")
|
|
click.echo(f"Channel Key: {s.channel_fingerprint or 'Not configured'}")
|
|
|
|
|
|
@keys.command("export")
|
|
@click.argument("output", type=click.Path(path_type=Path))
|
|
@click.option("--password", prompt=True, hide_input=True, confirmation_prompt=True)
|
|
def export_keys(output, password):
|
|
"""Export all keys to an encrypted bundle file."""
|
|
from soosef.keystore.export import export_bundle
|
|
from soosef.paths import CHANNEL_KEY_FILE, IDENTITY_DIR
|
|
|
|
export_bundle(IDENTITY_DIR, CHANNEL_KEY_FILE, output, password.encode())
|
|
click.echo(f"Key bundle exported to: {output}")
|
|
|
|
|
|
@keys.command("import")
|
|
@click.argument("bundle", type=click.Path(exists=True, path_type=Path))
|
|
@click.option("--password", prompt=True, hide_input=True)
|
|
def import_keys(bundle, password):
|
|
"""Import keys from an encrypted bundle file."""
|
|
from soosef.keystore.export import import_bundle
|
|
from soosef.paths import CHANNEL_KEY_FILE, IDENTITY_DIR
|
|
|
|
imported = import_bundle(bundle, IDENTITY_DIR, CHANNEL_KEY_FILE, password.encode())
|
|
click.echo(f"Imported: {', '.join(imported.keys())}")
|
|
|
|
|
|
@keys.command("rotate-identity")
|
|
@click.confirmation_option(
|
|
prompt="This will archive the current identity and generate a new keypair. Continue?"
|
|
)
|
|
def rotate_identity():
|
|
"""Rotate the Ed25519 identity keypair — archive old, generate new.
|
|
|
|
The current private and public key are preserved in a timestamped
|
|
archive directory under ~/.soosef/identity/archived/ so that
|
|
previously signed attestations can still be verified with the old key.
|
|
|
|
After rotation, notify all collaborators of the new fingerprint so
|
|
they can update their trusted-key lists.
|
|
"""
|
|
from soosef.exceptions import KeystoreError
|
|
from soosef.keystore.manager import KeystoreManager
|
|
|
|
ks = KeystoreManager()
|
|
try:
|
|
result = ks.rotate_identity()
|
|
except KeystoreError as exc:
|
|
click.echo(f"Error: {exc}", err=True)
|
|
raise SystemExit(1)
|
|
|
|
click.echo("Identity rotated successfully.")
|
|
click.echo(f" Old fingerprint: {result.old_fingerprint}")
|
|
click.echo(f" New fingerprint: {result.new_fingerprint}")
|
|
click.echo(f" Archive: {result.archive_path}")
|
|
click.echo()
|
|
click.echo(
|
|
"IMPORTANT: Notify all collaborators of your new fingerprint so they can "
|
|
"update their trusted-key lists. Attestations signed with the old key "
|
|
"remain verifiable using the archived public key."
|
|
)
|
|
|
|
|
|
@keys.command("rotate-channel")
|
|
@click.confirmation_option(
|
|
prompt="This will archive the current channel key and generate a new one. Continue?"
|
|
)
|
|
def rotate_channel():
|
|
"""Rotate the Stegasoo channel key — archive old, generate new.
|
|
|
|
The current channel key is preserved in a timestamped archive directory
|
|
under ~/.soosef/stegasoo/archived/ before the new key is generated.
|
|
|
|
After rotation, all parties sharing this channel must receive the new
|
|
key out-of-band before they can decode new messages.
|
|
"""
|
|
from soosef.exceptions import KeystoreError
|
|
from soosef.keystore.manager import KeystoreManager
|
|
|
|
ks = KeystoreManager()
|
|
try:
|
|
result = ks.rotate_channel_key()
|
|
except KeystoreError as exc:
|
|
click.echo(f"Error: {exc}", err=True)
|
|
raise SystemExit(1)
|
|
|
|
click.echo("Channel key rotated successfully.")
|
|
click.echo(f" Old fingerprint: {result.old_fingerprint}")
|
|
click.echo(f" New fingerprint: {result.new_fingerprint}")
|
|
click.echo(f" Archive: {result.archive_path}")
|
|
click.echo()
|
|
click.echo(
|
|
"IMPORTANT: Distribute the new channel key to all channel participants "
|
|
"out-of-band. Messages encoded with the old key cannot be decoded "
|
|
"with the new one."
|
|
)
|
|
|
|
|
|
# ── Chain sub-commands ─────────────────────────────────────────────
|
|
|
|
|
|
@main.group()
|
|
def chain():
|
|
"""Attestation hash chain operations."""
|
|
pass
|
|
|
|
|
|
@chain.command("status")
|
|
@click.pass_context
|
|
def chain_status(ctx):
|
|
"""Show chain status — head index, chain ID, record count."""
|
|
from soosef.federation.chain import ChainStore
|
|
from soosef.paths import CHAIN_DIR
|
|
|
|
store = ChainStore(CHAIN_DIR)
|
|
state = store.state()
|
|
|
|
if state is None:
|
|
click.echo("Chain is empty — no records yet.")
|
|
click.echo("Attest an image or run 'soosef chain backfill' to populate.")
|
|
return
|
|
|
|
json_out = ctx.obj.get("json", False)
|
|
if json_out:
|
|
import json
|
|
|
|
click.echo(
|
|
json.dumps(
|
|
{
|
|
"chain_id": state.chain_id.hex(),
|
|
"head_index": state.head_index,
|
|
"head_hash": state.head_hash.hex(),
|
|
"record_count": state.record_count,
|
|
"created_at": state.created_at,
|
|
"last_append_at": state.last_append_at,
|
|
}
|
|
)
|
|
)
|
|
else:
|
|
click.echo("=== Attestation Chain ===")
|
|
click.echo(f"Chain ID: {state.chain_id.hex()[:32]}...")
|
|
click.echo(f"Records: {state.record_count}")
|
|
click.echo(f"Head index: {state.head_index}")
|
|
click.echo(f"Head hash: {state.head_hash.hex()[:32]}...")
|
|
click.echo(f"Created: {_format_us_timestamp(state.created_at)}")
|
|
click.echo(f"Last append: {_format_us_timestamp(state.last_append_at)}")
|
|
|
|
|
|
@chain.command()
|
|
def verify():
|
|
"""Verify chain integrity — check all hashes and signatures."""
|
|
from soosef.federation.chain import ChainStore
|
|
from soosef.paths import CHAIN_DIR
|
|
|
|
store = ChainStore(CHAIN_DIR)
|
|
state = store.state()
|
|
|
|
if state is None:
|
|
click.echo("Chain is empty — nothing to verify.")
|
|
return
|
|
|
|
click.echo(f"Verifying {state.record_count} records...")
|
|
try:
|
|
store.verify_chain()
|
|
click.echo("Chain integrity OK — all hashes and signatures valid.")
|
|
except Exception as e:
|
|
click.echo(f"INTEGRITY VIOLATION: {e}", err=True)
|
|
raise SystemExit(1)
|
|
|
|
|
|
@chain.command("show")
|
|
@click.argument("index", type=int)
|
|
@click.pass_context
|
|
def chain_show(ctx, index):
|
|
"""Show a specific chain record by index."""
|
|
from soosef.exceptions import ChainError
|
|
from soosef.federation.chain import ChainStore
|
|
from soosef.federation.serialization import compute_record_hash
|
|
from soosef.paths import CHAIN_DIR
|
|
|
|
store = ChainStore(CHAIN_DIR)
|
|
try:
|
|
record = store.get(index)
|
|
except ChainError as e:
|
|
click.echo(f"Error: {e}", err=True)
|
|
raise SystemExit(1)
|
|
|
|
json_out = ctx.obj.get("json", False)
|
|
if json_out:
|
|
import json
|
|
|
|
click.echo(
|
|
json.dumps(
|
|
{
|
|
"version": record.version,
|
|
"record_id": record.record_id.hex(),
|
|
"chain_index": record.chain_index,
|
|
"prev_hash": record.prev_hash.hex(),
|
|
"content_hash": record.content_hash.hex(),
|
|
"content_type": record.content_type,
|
|
"metadata": record.metadata,
|
|
"claimed_ts": record.claimed_ts,
|
|
"signer_pubkey": record.signer_pubkey.hex(),
|
|
"record_hash": compute_record_hash(record).hex(),
|
|
}
|
|
)
|
|
)
|
|
else:
|
|
click.echo(f"=== Record #{record.chain_index} ===")
|
|
click.echo(f"Record ID: {record.record_id.hex()}")
|
|
click.echo(f"Record hash: {compute_record_hash(record).hex()[:32]}...")
|
|
click.echo(f"Prev hash: {record.prev_hash.hex()[:32]}...")
|
|
click.echo(f"Content hash: {record.content_hash.hex()[:32]}...")
|
|
click.echo(f"Content type: {record.content_type}")
|
|
click.echo(f"Timestamp: {_format_us_timestamp(record.claimed_ts)}")
|
|
click.echo(f"Signer: {record.signer_pubkey.hex()[:32]}...")
|
|
if record.metadata:
|
|
click.echo(f"Metadata: {record.metadata}")
|
|
if record.entropy_witnesses:
|
|
ew = record.entropy_witnesses
|
|
click.echo(
|
|
f"Entropy: uptime={ew.sys_uptime:.1f}s "
|
|
f"entropy_avail={ew.proc_entropy} "
|
|
f"boot_id={ew.boot_id[:16]}..."
|
|
)
|
|
|
|
|
|
@chain.command()
|
|
@click.option("-n", "--count", default=20, help="Number of records to show")
|
|
@click.pass_context
|
|
def log(ctx, count):
|
|
"""Show recent chain records (newest first)."""
|
|
from soosef.federation.chain import ChainStore
|
|
from soosef.federation.serialization import compute_record_hash
|
|
from soosef.paths import CHAIN_DIR
|
|
|
|
store = ChainStore(CHAIN_DIR)
|
|
state = store.state()
|
|
|
|
if state is None:
|
|
click.echo("Chain is empty.")
|
|
return
|
|
|
|
start = max(0, state.head_index - count + 1)
|
|
records = list(store.iter_records(start, state.head_index))
|
|
records.reverse() # newest first
|
|
|
|
click.echo(f"=== Last {len(records)} of {state.record_count} records ===")
|
|
click.echo()
|
|
for r in records:
|
|
ts = _format_us_timestamp(r.claimed_ts)
|
|
rhash = compute_record_hash(r).hex()[:16]
|
|
caption = r.metadata.get("caption", "")
|
|
label = f" — {caption}" if caption else ""
|
|
click.echo(f" #{r.chain_index:>5} {ts} {rhash}... {r.content_type}{label}")
|
|
|
|
|
|
@chain.command()
|
|
@click.confirmation_option(prompt="Backfill existing Verisoo attestations into the chain?")
|
|
def backfill():
|
|
"""Import existing Verisoo attestations into the hash chain.
|
|
|
|
Reads all records from the Verisoo attestation log and wraps each one
|
|
in a chain record. Backfilled records are marked with metadata
|
|
backfilled=true and entropy witnesses reflect migration time.
|
|
"""
|
|
import hashlib
|
|
|
|
from cryptography.hazmat.primitives.serialization import load_pem_private_key
|
|
|
|
from soosef.federation.chain import ChainStore
|
|
from soosef.paths import ATTESTATIONS_DIR, CHAIN_DIR, IDENTITY_PRIVATE_KEY
|
|
|
|
if not IDENTITY_PRIVATE_KEY.exists():
|
|
click.echo("Error: No identity found. Run 'soosef init' first.", err=True)
|
|
raise SystemExit(1)
|
|
|
|
priv_pem = IDENTITY_PRIVATE_KEY.read_bytes()
|
|
private_key = load_pem_private_key(priv_pem, password=None)
|
|
|
|
try:
|
|
from verisoo.storage import LocalStorage
|
|
|
|
storage = LocalStorage(base_path=ATTESTATIONS_DIR)
|
|
stats = storage.get_stats()
|
|
except Exception as e:
|
|
click.echo(f"Error reading Verisoo log: {e}", err=True)
|
|
raise SystemExit(1)
|
|
|
|
if stats.record_count == 0:
|
|
click.echo("No Verisoo attestations to backfill.")
|
|
return
|
|
|
|
store = ChainStore(CHAIN_DIR)
|
|
existing = store.state()
|
|
if existing and existing.record_count > 0:
|
|
click.echo(
|
|
f"Warning: chain already has {existing.record_count} records. "
|
|
f"Backfill will append after index {existing.head_index}."
|
|
)
|
|
|
|
count = 0
|
|
for i in range(stats.record_count):
|
|
try:
|
|
record = storage.get_record(i)
|
|
record_bytes = (
|
|
record.to_bytes() if hasattr(record, "to_bytes") else str(record).encode()
|
|
)
|
|
content_hash = hashlib.sha256(record_bytes).digest()
|
|
|
|
original_ts = int(record.timestamp.timestamp() * 1_000_000) if record.timestamp else 0
|
|
metadata = {
|
|
"backfilled": True,
|
|
"original_ts": original_ts,
|
|
"verisoo_index": i,
|
|
}
|
|
if hasattr(record, "attestor_fingerprint"):
|
|
metadata["attestor"] = record.attestor_fingerprint
|
|
|
|
store.append(
|
|
content_hash=content_hash,
|
|
content_type="verisoo/attestation-v1",
|
|
private_key=private_key,
|
|
metadata=metadata,
|
|
)
|
|
count += 1
|
|
except Exception as e:
|
|
click.echo(f" Warning: skipped record {i}: {e}")
|
|
|
|
click.echo(f"Backfilled {count} attestation(s) into the chain.")
|
|
|
|
|
|
def _format_us_timestamp(us: int) -> str:
|
|
"""Format a Unix microsecond timestamp for display."""
|
|
from datetime import UTC, datetime
|
|
|
|
dt = datetime.fromtimestamp(us / 1_000_000, tz=UTC)
|
|
return dt.strftime("%Y-%m-%d %H:%M:%S UTC")
|