12 Commits
beta ... main

Author SHA1 Message Date
thePR0M3TH3AN
0d3d972abb Merge pull request #712 from PR0M3TH3AN/beta
Beta
2025-08-02 21:18:20 -04:00
thePR0M3TH3AN
0396e99e0f Merge pull request #681 from PR0M3TH3AN/beta
Beta
2025-07-26 20:26:10 -04:00
thePR0M3TH3AN
17e5d48fdf Merge pull request #670 from PR0M3TH3AN/revert-669-codex/add-dark-mode-ui-styling
Revert "Add simple dark mode styling"
2025-07-23 08:25:00 -04:00
thePR0M3TH3AN
8b180b8d9a Revert "Add simple dark mode styling" 2025-07-23 08:24:43 -04:00
thePR0M3TH3AN
08f496e1e6 Merge pull request #669 from PR0M3TH3AN/codex/add-dark-mode-ui-styling
Add simple dark mode styling
2025-07-22 22:34:50 -04:00
thePR0M3TH3AN
93587a7502 Add simple dark mode styling 2025-07-22 22:34:21 -04:00
thePR0M3TH3AN
f1c24fb2ca Merge pull request #610 from PR0M3TH3AN/beta
Beta
2025-07-17 16:06:01 -04:00
thePR0M3TH3AN
bda90cec03 Merge pull request #604 from PR0M3TH3AN/beta
Beta
2025-07-17 11:06:24 -04:00
thePR0M3TH3AN
78cd847c25 Merge pull request #602 from PR0M3TH3AN/beta
Beta
2025-07-17 10:38:45 -04:00
thePR0M3TH3AN
9e2d469743 Merge pull request #594 from PR0M3TH3AN/beta
Beta
2025-07-16 20:47:54 -04:00
thePR0M3TH3AN
7e0505a729 Merge pull request #591 from PR0M3TH3AN/beta
Beta
2025-07-16 19:28:14 -04:00
thePR0M3TH3AN
c17bb8f8d8 Merge pull request #586 from PR0M3TH3AN/beta
Beta
2025-07-16 14:32:09 -04:00
211 changed files with 3897 additions and 15513 deletions

View File

@@ -1,10 +0,0 @@
version: 2
updates:
- package-ecosystem: "pip"
directory: "/"
schedule:
interval: "weekly"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"

View File

@@ -16,10 +16,8 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install pip-tools briefcase
pip-compile --generate-hashes --output-file=requirements.lock src/requirements.txt
git diff --exit-code requirements.lock
pip install --require-hashes -r requirements.lock
pip install -r src/requirements.txt
pip install briefcase
- name: Build with Briefcase
run: briefcase build
- name: Upload artifacts

View File

@@ -1,27 +0,0 @@
name: Dependency Audit
on:
schedule:
- cron: '0 0 * * 0'
workflow_dispatch:
permissions:
contents: read
jobs:
audit:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: '3.11'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install pip-tools pip-audit
pip-compile --generate-hashes --output-file=requirements.lock src/requirements.txt
git diff --exit-code requirements.lock
pip install --require-hashes -r requirements.lock
- name: Run pip-audit
run: pip-audit -r requirements.lock --ignore-vuln GHSA-wj6h-64fc-37mp

View File

@@ -9,20 +9,6 @@ on:
- cron: '0 3 * * *'
jobs:
secret-scan:
name: Secret Scan
runs-on: ubuntu-latest
if: github.event_name == 'pull_request' || github.event_name == 'schedule'
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Run gitleaks
uses: gitleaks/gitleaks-action@v2
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITLEAKS_CONFIG: .gitleaks.toml
build:
strategy:
matrix:
@@ -73,18 +59,18 @@ jobs:
uses: actions/cache@v3
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('requirements.lock') }}
key: ${{ runner.os }}-pip-${{ hashFiles('src/requirements.txt') }}
restore-keys: |
${{ runner.os }}-pip-
- name: Verify lockfile and install dependencies
- name: Set up Python dependencies
id: deps
run: |
python -m pip install --upgrade pip
pip install pip-tools
pip-compile --generate-hashes --output-file=requirements.lock src/requirements.txt
git diff --exit-code requirements.lock
pip install --require-hashes -r requirements.lock
- name: Run dependency scan
run: scripts/dependency_scan.sh --ignore-vuln GHSA-wj6h-64fc-37mp
pip install -r src/requirements.txt
- name: Run pip-audit
run: |
pip install pip-audit
pip-audit -r requirements.lock --ignore-vuln GHSA-wj6h-64fc-37mp
- name: Determine stress args
shell: bash
run: |

View File

@@ -1,40 +0,0 @@
name: Tests
on:
push:
branches: ["**"]
pull_request:
branches: ["**"]
jobs:
test:
strategy:
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
python-version: ["3.10", "3.11", "3.12"]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Install Poetry
run: pipx install poetry
- name: Install dependencies
run: poetry install
- name: Check formatting
run: poetry run black --check .
- name: Run security audit
run: |
poetry run pip-audit || echo "::warning::pip-audit found vulnerabilities"
shell: bash
- name: Run tests with coverage
run: |
poetry run coverage run -m pytest
poetry run coverage xml
- name: Upload coverage report
uses: actions/upload-artifact@v4
with:
name: coverage-${{ matrix.os }}-py${{ matrix.python-version }}
path: coverage.xml

View File

@@ -1,8 +0,0 @@
title = "SeedPass gitleaks config"
[allowlist]
description = "Paths and patterns to ignore when scanning for secrets"
# Add file paths that contain test data or other non-sensitive strings
paths = []
# Add regular expressions that match false positive secrets
regexes = []

View File

@@ -2,60 +2,6 @@
This project is written in **Python**. Follow these instructions when working with the code base.
## Installation Quickstart for AI Agents
### Prerequisites
Ensure the system has the required build tools and Python headers. Examples:
```bash
# Ubuntu/Debian
sudo apt update && sudo apt install -y \
build-essential \
libffi-dev \
pkg-config \
python3.11-dev \
curl \
git
# CentOS/RHEL
sudo yum install -y gcc gcc-c++ libffi-devel pkgconfig python3-devel curl git
# macOS
brew install python@3.11 libffi pkg-config git
```
### Installation
Run the installer script to fetch the latest release:
```bash
# Stable release
bash -c "$(curl -sSL https://raw.githubusercontent.com/PR0M3TH3AN/SeedPass/main/scripts/install.sh)"
# Beta branch
bash -c "$(curl -sSL https://raw.githubusercontent.com/PR0M3TH3AN/SeedPass/main/scripts/install.sh)" _ -b beta
```
### Environment Layout
- Virtual environment: `~/.seedpass/app/venv/`
- Entry point: `~/.seedpass/app/src/main.py`
### Verification
```bash
cd ~/.seedpass/app && source venv/bin/activate
cd src && python main.py --version # Expected: SeedPass v[version]
```
### Running SeedPass
```bash
cd ~/.seedpass/app && source venv/bin/activate
cd src && python main.py
```
## Running Tests
1. Set up a virtual environment and install dependencies:
@@ -63,7 +9,7 @@ cd src && python main.py
```bash
python3 -m venv venv
source venv/bin/activate
pip install --require-hashes -r requirements.lock
pip install -r src/requirements.txt
```
2. Run the test suite using **pytest**:
@@ -93,19 +39,6 @@ cd src && python main.py
Following these practices helps keep the code base consistent and secure.
## Deterministic Artifact Generation
- All generated artifacts (passwords, keys, TOTP secrets, etc.) must be fully deterministic across runs and platforms.
- Randomness is only permitted for security primitives (e.g., encryption nonces, in-memory keys) and must never influence derived artifacts.
## Legacy Index Migration
- Always provide a migration path for index archives and import/export routines.
- Support older SeedPass versions whose indexes lacked salts or password-based encryption by detecting legacy formats and upgrading them to the current schema.
- Ensure migrations unlock older account indexes and allow Nostr synchronization.
- Add regression tests covering these migrations whenever the index format or encryption changes.
## Integrating New Entry Types
SeedPass supports multiple `kind` values in its JSON entry files. When adding a

227
README.md
View File

@@ -12,14 +12,6 @@
This software was not developed by an experienced security expert and should be used with caution. There may be bugs and missing features. Each vault chunk is limited to 50 KB and SeedPass periodically publishes a new snapshot to keep accumulated deltas small. The security of the program's memory management and logs has not been evaluated and may leak sensitive information. Loss or exposure of the parent seed places all derived passwords, accounts, and other artifacts at risk.
**🚨 Breaking Change**
Recent releases derive passwords and other artifacts using a fully deterministic algorithm that behaves consistently across Python versions. This improvement means artifacts generated with earlier versions of SeedPass will not match those produced now. Regenerate any previously derived data or retain the old version if you need to reproduce older passwords or keys.
**⚠️ First Run Warning**
Use a dedicated BIP-39 seed phrase exclusively for SeedPass. Offline Mode is **ON by default**, keeping all Nostr syncing disabled until you explicitly opt in. To synchronize with Nostr, disable offline mode through the Settings menu or by running `seedpass config toggle-offline` and choosing to turn syncing on.
---
### Supported OS
@@ -36,7 +28,6 @@ SeedPass now uses the `portalocker` library for cross-platform file locking. No
- [2. Create a Virtual Environment](#2-create-a-virtual-environment)
- [3. Activate the Virtual Environment](#3-activate-the-virtual-environment)
- [4. Install Dependencies](#4-install-dependencies)
- [Optional GUI](#optional-gui)
- [Usage](#usage)
- [Running the Application](#running-the-application)
- [Managing Multiple Seeds](#managing-multiple-seeds)
@@ -45,7 +36,6 @@ SeedPass now uses the `portalocker` library for cross-platform file locking. No
- [Building a standalone executable](#building-a-standalone-executable)
- [Packaging with Briefcase](#packaging-with-briefcase)
- [Security Considerations](#security-considerations)
- [Dependency Updates](#dependency-updates)
- [Contributing](#contributing)
- [License](#license)
- [Contact](#contact)
@@ -54,7 +44,7 @@ SeedPass now uses the `portalocker` library for cross-platform file locking. No
- **Deterministic Password Generation:** Utilize BIP-85 for generating deterministic and secure passwords.
- **Encrypted Storage:** All seeds, login passwords, and sensitive index data are encrypted locally.
- **Nostr Integration:** Post and retrieve your encrypted password index to/from the Nostr network. See [Nostr Setup](docs/nostr_setup.md) for relay configuration and event details.
- **Nostr Integration:** Post and retrieve your encrypted password index to/from the Nostr network.
- **Chunked Snapshots:** Encrypted vaults are compressed and split into 50 KB chunks published as `kind 30071` events with a `kind 30070` manifest and `kind 30072` deltas. The manifest's `delta_since` field stores the UNIX timestamp of the latest delta event.
- **Automatic Checksum Generation:** The script generates and verifies a SHA-256 checksum to detect tampering.
- **Multiple Seed Profiles:** Manage separate seed profiles and switch between them seamlessly.
@@ -87,31 +77,32 @@ before fading.
SeedPass follows a layered design. The **`seedpass.core`** package exposes the
`PasswordManager` along with service classes (e.g. `VaultService` and
`EntryService`) that implement the main API used across interfaces. Both the
command line tool in **`seedpass.cli`** and the FastAPI server in
**`seedpass.api`** delegate operations to this core. The BeeWare desktop
interface (`seedpass_gui.app`) and an optional browser extension reuse these
services, with the extension communicating through the API layer.
`EntryService`) that implement the main API used across interfaces.
The command line tool in **`seedpass.cli`** is a thin adapter built with Typer
that delegates operations to this API layer.
Nostr synchronisation lives in the **`nostr`** modules. The core services call
into these modules to publish or retrieve encrypted snapshots and deltas from
configured relays.
The BeeWare desktop interface lives in **`seedpass_gui.app`** and can be
started with either `seedpass-gui` or `python -m seedpass_gui`. It reuses the
same service objects to unlock the vault, list entries and search through them.
An optional browser extension can communicate with the FastAPI server exposed by
`seedpass.api` to manage entries from within the browser.
```mermaid
graph TD
core["seedpass.core"]
cli["CLI"]
api["FastAPI server"]
core["seedpass.core"]
nostr["Nostr client"]
relays["Nostr relays"]
gui["BeeWare GUI"]
ext["Browser Extension"]
cli --> core
gui --> core
api --> core
core --> nostr
nostr --> relays
ext --> api
```
See `docs/ARCHITECTURE.md` and [Nostr Setup](docs/nostr_setup.md) for details.
See `docs/ARCHITECTURE.md` for details.
## Prerequisites
@@ -123,42 +114,29 @@ See `docs/ARCHITECTURE.md` and [Nostr Setup](docs/nostr_setup.md) for details.
### Quick Installer
Use the automated installer to download SeedPass and its dependencies in one step.
The default `tui` mode installs only the text interface, so it runs headlessly and works well in CI or other automation. GUI backends are optional and must be explicitly requested (`--mode gui` or `--mode both` on Linux/macOS, `-IncludeGui` on Windows). If the GTK `gi` bindings are missing, the installer attempts to install the
necessary system packages using `apt`, `yum`, `pacman`, or Homebrew. When no display server is detected, GUI components are skipped automatically.
The scripts also install the correct BeeWare backend for your platform automatically.
If the GTK `gi` bindings are missing, the installer attempts to install the
necessary system packages using `apt`, `yum`, `pacman`, or Homebrew.
**Linux and macOS:**
```bash
# TUI-only/agent install (headless default)
bash -c "$(curl -sSL https://raw.githubusercontent.com/PR0M3TH3AN/SeedPass/main/scripts/install.sh)" _ --mode tui
bash -c "$(curl -sSL https://raw.githubusercontent.com/PR0M3TH3AN/SeedPass/main/scripts/install.sh)"
```
*Install the beta branch:*
```bash
bash -c "$(curl -sSL https://raw.githubusercontent.com/PR0M3TH3AN/SeedPass/main/scripts/install.sh)" _ -b beta
```
Make sure the command ends right after `-b beta` with **no trailing parenthesis**.
*Install with GUI support:*
```bash
bash -c "$(curl -sSL https://raw.githubusercontent.com/PR0M3TH3AN/SeedPass/main/scripts/install.sh)" _ --mode gui
```
**Windows (PowerShell):**
```powershell
# TUI-only/agent install (default)
Set-ExecutionPolicy Bypass -Scope Process -Force; [System.Net.ServicePointManager]::SecurityProtocol = [System.Net.ServicePointManager]::SecurityProtocol -bor 3072; $scriptContent = (New-Object System.Net.WebClient).DownloadString('https://raw.githubusercontent.com/PR0M3TH3AN/SeedPass/main/scripts/install.ps1'); & ([scriptblock]::create($scriptContent))
```
*Install with the optional GUI:*
```powershell
Set-ExecutionPolicy Bypass -Scope Process -Force; [System.Net.ServicePointManager]::SecurityProtocol = [System.Net.ServicePointManager]::SecurityProtocol -bor 3072; $scriptContent = (New-Object System.Net.WebClient).DownloadString('https://raw.githubusercontent.com/PR0M3TH3AN/SeedPass/main/scripts/install.ps1'); & ([scriptblock]::create($scriptContent)) -IncludeGui
```
Before running the script, install **Python 3.11** or **3.12** from [python.org](https://www.python.org/downloads/windows/) and tick **"Add Python to PATH"**. You should also install the [Visual Studio Build Tools](https://visualstudio.microsoft.com/visual-cpp-build-tools/) with the **C++ build tools** workload so dependencies compile correctly.
The Windows installer will attempt to install Git automatically if it is not already available. It also tries to install Python 3 using `winget`, `choco`, or `scoop` when Python is missing and recognizes the `py` launcher if `python` isn't on your PATH. If these tools are unavailable you'll see a link to download Python directly from <https://www.python.org/downloads/windows/>. When Python 3.13 or newer is detected without the Microsoft C++ build tools, the installer now attempts to download Python 3.12 automatically so you don't have to compile packages from source.
**Note:** If this fallback fails, install Python 3.12 manually or install the [Microsoft Visual C++ Build Tools](https://visualstudio.microsoft.com/visual-cpp-build-tools/) and rerun the installer.
#### Installer Dependency Checks
The installer verifies that core build tooling—C/C++ build tools, Rust, CMake, and the imaging/GTK libraries—are available before completing. Use `--mode gui` to install only the graphical interface or `--mode both` to install both interfaces (default: `tui`). On Linux, ensure `xclip` or `wl-clipboard` is installed for clipboard support.
#### Windows Nostr Sync Troubleshooting
When backing up or restoring from Nostr on Windows, a few issues are common:
@@ -219,7 +197,7 @@ Follow these steps to set up SeedPass on your local machine.
```bash
python -m pip install --upgrade pip
python -m pip install --require-hashes -r requirements.lock
python -m pip install -r src/requirements.txt
python -m pip install -e .
```
// 🔧 merged conflicting changes from codex/locate-command-usage-issue-in-seedpass vs beta
@@ -227,57 +205,10 @@ After reinstalling, run `which seedpass` on Linux/macOS or `where seedpass` on W
#### Linux Clipboard Support
On Linux, `pyperclip` relies on external utilities like `xclip` or `xsel`. SeedPass no longer installs these tools automatically. To enable clipboard features such as secret mode, install **xclip** manually:
On Linux, `pyperclip` relies on external utilities like `xclip` or `xsel`. SeedPass will attempt to install **xclip** automatically if neither tool is available. If the automatic installation fails, you can install it manually:
```bash
sudo apt install xclip
```
After installing `xclip`, restart SeedPass to enable clipboard support.
### Optional GUI
SeedPass ships with a GTK-based desktop interface that is still in development
and not currently functional. GUI backends are optional—run the installer with
`--mode gui` or install the Python extras below to add them. Install the packages
for your platform before adding the Python GUI dependencies.
- **Debian/Ubuntu**
```bash
sudo apt install libgirepository1.0-dev libcairo2-dev libpango1.0-dev libwebkit2gtk-4.0-dev
```
- **Fedora**
```bash
sudo dnf install gobject-introspection-devel cairo-devel pango-devel webkit2gtk4.0-devel
```
- **Arch Linux**
```bash
sudo pacman -S gobject-introspection cairo pango webkit2gtk
```
- **macOS (Homebrew)**
```bash
brew install pygobject3 gtk+3 adwaita-icon-theme librsvg webkitgtk
```
With the system requirements in place, install the Python GUI extras for your
platform:
```bash
# Linux
pip install .[gui-gtk]
# Windows
pip install .[gui-win]
# macOS
pip install .[gui-mac]
```
CLI-only users can skip these steps and install just the core package for a
lightweight, headless setup compatible with CI/automation:
```bash
pip install .
sudo apt-get install xclip
```
## Quick Start
@@ -296,10 +227,10 @@ You can then launch SeedPass and create a backup:
seedpass
# Export your index
seedpass vault export --file "~/seedpass_backup.json"
seedpass export --file "~/seedpass_backup.json"
# Later you can restore it
seedpass vault import --file "~/seedpass_backup.json"
seedpass import --file "~/seedpass_backup.json"
# Quickly find or retrieve entries
seedpass search "github"
@@ -333,30 +264,24 @@ python -m seedpass_gui
seedpass-gui
```
GUI dependencies are optional. Install them alongside SeedPass with the
extra for your platform:
Only `toga-core` and the headless `toga-dummy` backend are included by default.
The quick installer automatically installs the correct BeeWare backend so the
GUI works out of the box. If you set up SeedPass manually, install the backend
for your platform:
```bash
# Linux
pip install "seedpass[gui-gtk]"
pip install toga-gtk
# If you see build errors about "cairo" on Linux, install the cairo
# development headers using your package manager, e.g.:
sudo apt-get install libcairo2 libcairo2-dev
# Windows
pip install "seedpass[gui-win]"
pip install toga-winforms
# macOS
pip install "seedpass[gui-mac]"
# or when working from a local checkout
pip install -e ".[gui-gtk]" # Linux
pip install -e ".[gui-win]" # Windows
pip install -e ".[gui-mac]" # macOS
```
If you see build errors about "cairo" on Linux, install the cairo development
headers using your package manager, e.g.:
```bash
sudo apt-get install libcairo2 libcairo2-dev
pip install toga-cocoa
```
The GUI works with the same vault and configuration files as the CLI.
@@ -452,16 +377,6 @@ For a full list of commands see [docs/advanced_cli.md](docs/advanced_cli.md). Th
```
*(or `python src/main.py` when running directly from the repository)*
To restore a previously backed up index at launch, provide the backup path
and fingerprint:
```bash
seedpass --restore-backup /path/to/backup.json.enc --fingerprint <fp>
```
Without the flag, the startup prompt offers a **Restore from backup** option
before the vault is initialized.
2. **Follow the Prompts:**
- **Seed Profile Selection:** If you have existing seed profiles, you'll be prompted to select one or add a new one.
@@ -643,28 +558,21 @@ The default configuration uses **50,000** PBKDF2 iterations. Increase this value
### Recovery
If you previously backed up your vault to Nostr you can restore it during the
initial setup. You must provide both your 12word master seed and the master
password that encrypted the vault; without the correct password the retrieved
data cannot be decrypted.
Alternatively, a local backup file can be loaded at startup. Launch the
application with `--restore-backup <file> --fingerprint <fp>` or choose the
**Restore from backup** option presented before the vault initializes.
initial setup:
1. Start SeedPass and choose option **4** when prompted to set up a seed.
2. Paste your BIP85 seed phrase when asked.
3. Enter the master password associated with that seed.
4. SeedPass initializes the profile and attempts to download the encrypted
vault from the configured relays.
5. A success message confirms the vault was restored. If no data is found a
2. Paste your BIP-85 seed phrase when asked.
3. SeedPass initializes the profile and attempts to download the encrypted vault
from the configured relays.
4. A success message confirms the vault was restored. If no data is found a
failure message is shown and a new empty vault is created.
## Running Tests
SeedPass includes a small suite of unit tests located under `src/tests`. **Before running `pytest`, be sure to install the test requirements.** Activate your virtual environment and run `pip install --require-hashes -r requirements.lock` to ensure all testing dependencies are available. Then run the tests with **pytest**. Use `-vv` to see INFO-level log messages from each passing test:
SeedPass includes a small suite of unit tests located under `src/tests`. **Before running `pytest`, be sure to install the test requirements.** Activate your virtual environment and run `pip install -r src/requirements.txt` to ensure all testing dependencies are available. Then run the tests with **pytest**. Use `-vv` to see INFO-level log messages from each passing test:
```bash
pip install --require-hashes -r requirements.lock
pip install -r src/requirements.txt
pytest -vv
```
@@ -728,7 +636,7 @@ Mutation testing is disabled in the GitHub workflow due to reliability issues an
1. Install all development dependencies:
```bash
pip install --require-hashes -r requirements.lock
pip install -r src/requirements.txt
```
2. When `src/runtime_requirements.txt` changes, rerun:
@@ -798,61 +706,16 @@ You can also launch the GUI directly with `seedpass gui` or `seedpass-gui`.
- **Backup Your Data:** Regularly back up your encrypted data and checksum files to prevent data loss.
- **Backup the Settings PIN:** Your settings PIN is stored in the encrypted configuration file. Keep a copy of this file or remember the PIN, as losing it will require deleting the file and reconfiguring your relays.
- **Protect Your Passwords:** Do not share your master password or seed phrases with anyone and ensure they are strong and unique.
- **Backing Up the Parent Seed:** Use the CLI `vault reveal-parent-seed` command or the `/api/v1/vault/backup-parent-seed` endpoint with explicit confirmation to create an encrypted backup. The API does not return the seed directly.
- **Revealing the Parent Seed:** The `vault reveal-parent-seed` command and `/api/v1/parent-seed` endpoint print your seed in plain text. Run them only in a secure environment.
- **No PBKDF2 Salt Needed:** SeedPass deliberately omits an explicit PBKDF2 salt. Every password is derived from a unique 512-bit BIP-85 child seed, which already provides stronger per-password uniqueness than a conventional 128-bit salt.
- **Checksum Verification:** Always verify the script's checksum to ensure its integrity and protect against unauthorized modifications.
- **Potential Bugs and Limitations:** Be aware that the software may contain bugs and lacks certain features. Snapshot chunks are capped at 50 KB and the client rotates snapshots after enough delta events accumulate. The security of memory management and logs has not been thoroughly evaluated and may pose risks of leaking sensitive information.
- **Best-Effort Memory Zeroization:** Sensitive data is wiped from memory when possible, but Python may retain copies of decrypted values.
- **Multiple Seeds Management:** While managing multiple seeds adds flexibility, it also increases the responsibility to secure each seed and its associated password.
- **No PBKDF2 Salt Required:** SeedPass deliberately omits an explicit PBKDF2 salt. Every password is derived from a unique 512-bit BIP-85 child seed, which already provides stronger per-password uniqueness than a conventional 128-bit salt.
- **Default KDF Iterations:** New profiles start with 50,000 PBKDF2 iterations. Adjust this with `seedpass config set kdf_iterations`.
- **KDF Iteration Caution:** Lowering `kdf_iterations` makes password cracking easier, while a high `backup_interval` leaves fewer recent backups.
- **Offline Mode:** When enabled, SeedPass skips all Nostr operations so your vault stays local until syncing is turned back on.
- **Quick Unlock:** Stores a hashed copy of your password in the encrypted config so you only need to enter it once per session. Avoid this on shared computers.
- **Prompt Rate Limiting:** Seed and password prompts enforce a configurable attempt limit with exponential backoff to slow brute-force attacks. Adjust or disable the limit for testing via the `--max-prompt-attempts` CLI option or the `SEEDPASS_MAX_PROMPT_ATTEMPTS` environment variable.
### Secure Deployment
Always deploy SeedPass behind HTTPS. Place a TLSterminating reverse proxy such as Nginx in front of the FastAPI server or configure Uvicorn with certificate files. Example Nginx snippet:
```
server {
listen 443 ssl;
ssl_certificate /etc/letsencrypt/live/example.com/fullchain.pem;
ssl_certificate_key /etc/letsencrypt/live/example.com/privkey.pem;
location / {
proxy_pass http://127.0.0.1:8000;
proxy_set_header Host $host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
}
}
```
For local testing, Uvicorn can run with TLS directly:
```
uvicorn seedpass.api:app --ssl-certfile=cert.pem --ssl-keyfile=key.pem
```
## Dependency Updates
Automated dependency updates are handled by [Dependabot](https://docs.github.com/en/code-security/dependabot).
Every week, Dependabot checks Python packages and GitHub Actions used by this repository and opens pull requests when updates are available.
To review and merge these updates:
1. Review the changelog and release notes in the Dependabot pull request.
2. Run the test suite locally:
```bash
python3 -m venv venv
source venv/bin/activate
pip install --require-hashes -r requirements.lock
pytest
```
3. Merge the pull request once all checks pass.
A scheduled **Dependency Audit** workflow also runs [`pip-audit`](https://github.com/pypa/pip-audit) weekly to detect vulnerable packages. Address any reported issues promptly to keep dependencies secure.
## Contributing

View File

@@ -1,44 +0,0 @@
# SeedPass Specification
## Key Hierarchy
SeedPass derives a hierarchy of keys from a single BIP-39 parent seed using HKDF:
- **Master Key** `HKDF(seed, "seedpass:v1:master")`
- **KEY_STORAGE** used to encrypt vault data.
- **KEY_INDEX** protects the metadata index.
- **KEY_PW_DERIVE** deterministic password generation.
- **KEY_TOTP_DET** deterministic TOTP secrets.
Each context string keeps derived keys domain separated.
## KDF Parameters
Passwords are protected with **PBKDF2-HMAC-SHA256**. The default work factor is
**50,000 iterations** but may be adjusted via the settings slider. The config
stores a `KdfConfig` structure with the chosen iteration count, algorithm name,
and the current spec version (`CURRENT_KDF_VERSION = 1`). Argon2 is available
with a default `time_cost` of 2 when selected.
## Message Formats
SeedPass synchronizes profiles over Nostr using three event kinds:
- **Manifest (`30070`)** high level snapshot description and current version.
- **Snapshot Chunk (`30071`)** compressed, encrypted portions of the vault.
- **Delta (`30072`)** incremental changes since the last snapshot.
Events encode JSON and include tags for checksums, fingerprints, and timestamps.
## Versioning
Configuration and KDF schemas are versioned so clients can migrate older
profiles. Nostr events carry a version field in the manifest, and the software
follows semantic versioning for releases.
## Memory Protection
SeedPass encrypts sensitive values in memory and attempts to wipe them when no
longer needed. This zeroization is best-effort only; Python's memory management
may retain copies of decrypted data. Critical cryptographic operations may move
to a Rust/WASM module in the future to provide stronger guarantees.

View File

@@ -78,7 +78,7 @@ Manage the entire vault for a profile.
### Nostr Commands
Interact with the Nostr network for backup and synchronization. Offline mode is enabled by default, so disable it with `seedpass config toggle-offline` before using these commands.
Interact with the Nostr network for backup and synchronization.
| Action | Command | Examples |
| :--- | :--- | :--- |
@@ -127,7 +127,7 @@ Run or stop the local HTTP API.
| Action | Command | Examples |
| :--- | :--- | :--- |
| Start the API | `api start` | `seedpass api start --host 0.0.0.0 --port 8000` |
| Stop the API | `api stop --token TOKEN` | `seedpass api stop --token <token>` |
| Stop the API | `api stop` | `seedpass api stop` |
---
@@ -214,7 +214,7 @@ Set the `SEEDPASS_CORS_ORIGINS` environment variable to a commaseparated list
SEEDPASS_CORS_ORIGINS=http://localhost:3000 seedpass api start
```
Shut down the server with `seedpass api stop --token <token>`.
Shut down the server with `seedpass api stop`.
---

View File

@@ -7,19 +7,19 @@ This guide covers how to start the SeedPass API, authenticate requests, and inte
## Starting the API
Run `seedpass api start` from your terminal. The command prints a shortlived JWT token used for authentication:
Run `seedpass api start` from your terminal. The command prints a onetime token used for authentication:
```bash
$ seedpass api start
API token: eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9...
API token: abcdef1234567890
```
Keep this token secret and avoid logging it. Tokens expire after a few minutes and every request must include one in the `Authorization` header using the `Bearer` scheme.
Keep this token secret. Every request must include it in the `Authorization` header using the `Bearer` scheme.
## Endpoints
- `GET /api/v1/entry?query=<text>` Search entries matching a query.
- `GET /api/v1/entry/{id}` Retrieve a single entry by its index. Requires an `X-SeedPass-Password` header.
- `GET /api/v1/entry/{id}` Retrieve a single entry by its index.
- `POST /api/v1/entry` Create a new entry of any supported type.
- `PUT /api/v1/entry/{id}` Modify an existing entry.
- `PUT /api/v1/config/{key}` Update a configuration value.
@@ -31,17 +31,18 @@ Keep this token secret and avoid logging it. Tokens expire after a few minutes a
- `POST /api/v1/fingerprint` Add a new seed fingerprint.
- `DELETE /api/v1/fingerprint/{fp}` Remove a fingerprint.
- `POST /api/v1/fingerprint/select` Switch the active fingerprint.
- `GET /api/v1/totp/export` Export all TOTP entries as JSON. Requires an `X-SeedPass-Password` header.
- `GET /api/v1/totp` Return current TOTP codes and remaining time. Requires an `X-SeedPass-Password` header.
- `GET /api/v1/totp/export` Export all TOTP entries as JSON.
- `GET /api/v1/totp` Return current TOTP codes and remaining time.
- `GET /api/v1/stats` Return statistics about the active seed profile.
- `GET /api/v1/notifications` Retrieve and clear queued notifications. Messages appear in the persistent notification box but remain queued until fetched.
- `GET /api/v1/parent-seed` Reveal the parent seed or save it with `?file=`.
- `GET /api/v1/nostr/pubkey` Fetch the Nostr public key for the active seed.
- `POST /api/v1/checksum/verify` Verify the checksum of the running script.
- `POST /api/v1/checksum/update` Update the stored script checksum.
- `POST /api/v1/change-password` Change the master password for the active profile.
- `POST /api/v1/vault/import` Import a vault backup from a file or path.
- `POST /api/v1/vault/export` Export the vault and download the encrypted file. Requires an additional `X-SeedPass-Password` header.
- `POST /api/v1/vault/backup-parent-seed` Save an encrypted backup of the parent seed. Requires a `confirm` flag in the request body and an `X-SeedPass-Password` header.
- `POST /api/v1/vault/export` Export the vault and download the encrypted file.
- `POST /api/v1/vault/backup-parent-seed` Save an encrypted backup of the parent seed.
- `POST /api/v1/vault/lock` Lock the vault and clear sensitive data from memory.
- `GET /api/v1/relays` List configured Nostr relays.
- `POST /api/v1/relays` Add a relay URL.
@@ -49,30 +50,7 @@ Keep this token secret and avoid logging it. Tokens expire after a few minutes a
- `POST /api/v1/relays/reset` Reset the relay list to defaults.
- `POST /api/v1/shutdown` Stop the server gracefully.
## Secure Deployment
Always run the API behind HTTPS. Use a reverse proxy such as Nginx or Caddy to terminate TLS and forward requests to SeedPass. Example Nginx configuration:
```
server {
listen 443 ssl;
ssl_certificate /etc/letsencrypt/live/example.com/fullchain.pem;
ssl_certificate_key /etc/letsencrypt/live/example.com/privkey.pem;
location / {
proxy_pass http://127.0.0.1:8000;
proxy_set_header Host $host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
}
}
```
For local testing, Uvicorn can serve TLS directly:
```
uvicorn seedpass.api:app --ssl-certfile=cert.pem --ssl-keyfile=key.pem
```
**Security Warning:** Accessing `/api/v1/parent-seed` exposes your master seed in plain text. Use it only from a trusted environment.
## Example Requests
@@ -80,7 +58,7 @@ Send requests with the token in the header:
```bash
curl -H "Authorization: Bearer <token>" \
"https://127.0.0.1:8000/api/v1/entry?query=email"
"http://127.0.0.1:8000/api/v1/entry?query=email"
```
### Creating an Entry
@@ -171,9 +149,8 @@ curl -X POST http://127.0.0.1:8000/api/v1/fingerprint/select \
Download an encrypted vault backup via `POST /api/v1/vault/export`:
```bash
curl -X POST https://127.0.0.1:8000/api/v1/vault/export \
curl -X POST http://127.0.0.1:8000/api/v1/vault/export \
-H "Authorization: Bearer <token>" \
-H "X-SeedPass-Password: <master-password>" \
-o backup.json
```
@@ -203,9 +180,8 @@ Trigger an encrypted seed backup with `/api/v1/vault/backup-parent-seed`:
```bash
curl -X POST http://127.0.0.1:8000/api/v1/vault/backup-parent-seed \
-H "Authorization: Bearer <token>" \
-H "X-SeedPass-Password: <master password>" \
-H "Content-Type: application/json" \
-d '{"path": "seed_backup.enc", "confirm": true}'
-d '{"path": "seed_backup.enc"}'
```
### Retrieving Vault Statistics

View File

@@ -3,8 +3,6 @@
SeedPass stores its password index in an encrypted JSON file. Each index contains
a `schema_version` field so the application knows how to upgrade older files.
> **Note:** Recent releases derive passwords and other artifacts using a new deterministic algorithm that works consistently across Python versions. Artifacts produced with older versions will not match outputs from this release and must be regenerated.
## How migrations work
When the vault loads the index, `Vault.load_index()` checks the version and

View File

@@ -14,17 +14,10 @@ python -m seedpass_gui
seedpass-gui
```
GUI dependencies are optional. Install them alongside SeedPass with:
```bash
pip install "seedpass[gui]"
# or when working from a local checkout
pip install -e .[gui]
```
After installing the optional GUI extras, add the BeeWare backend for your
platform:
Only `toga-core` and the headless `toga-dummy` backend ship with the project.
The installation scripts automatically install the correct BeeWare backend so
the GUI works out of the box. If you set up SeedPass manually, install the
backend for your platform:
```bash
# Linux

View File

@@ -10,10 +10,6 @@
This software was not developed by an experienced security expert and should be used with caution. There may be bugs and missing features. Each vault chunk is limited to 50KB and SeedPass periodically publishes a new snapshot to keep accumulated deltas small. The security of the program's memory management and logs has not been evaluated and may leak sensitive information. Loss or exposure of the parent seed places all derived passwords, accounts, and other artifacts at risk.
**🚨 Breaking Change**
Recent releases derive passwords and other artifacts using a fully deterministic algorithm that behaves consistently across Python versions. This improvement means artifacts generated with earlier versions of SeedPass will not match those produced now. Regenerate any previously derived data or retain the old version if you need to reproduce older passwords or keys.
---
### Supported OS
@@ -50,7 +46,6 @@ maintainable while enabling a consistent experience on multiple platforms.
- [Running the Application](#running-the-application)
- [Managing Multiple Seeds](#managing-multiple-seeds)
- [Additional Entry Types](#additional-entry-types)
- [Recovery](#recovery)
- [Security Considerations](#security-considerations)
- [Contributing](#contributing)
- [License](#license)
@@ -83,7 +78,7 @@ maintainable while enabling a consistent experience on multiple platforms.
- **Change Master Password:** Rotate your encryption password at any time.
- **Checksum Verification Utilities:** Verify or regenerate the script checksum.
- **Relay Management:** List, add, remove or reset configured Nostr relays.
- **Offline Mode (default):** SeedPass runs without network sync until you explicitly enable it.
- **Offline Mode:** Disable network sync to work entirely locally.
## Prerequisites
@@ -120,11 +115,6 @@ isn't on your PATH. If these tools are unavailable you'll see a link to download
the installer now attempts to download Python 3.12 automatically so you don't have to compile packages from source.
**Note:** If this fallback fails, install Python 3.12 manually or install the [Microsoft Visual C++ Build Tools](https://visualstudio.microsoft.com/visual-cpp-build-tools/) and rerun the installer.
#### Installer Dependency Checks
The installer verifies that core build tooling—C/C++ build tools, Rust, CMake, and the imaging/GTK libraries—are available before completing. Pass `--no-gui` to skip installing GUI packages. On Linux, ensure `xclip` or `wl-clipboard` is installed for clipboard support.
### Uninstall
Run the matching uninstaller if you need to remove a previous installation or clean up an old `seedpass` command:
@@ -195,22 +185,20 @@ When upgrading pip, use `python -m pip` inside the virtual environment so that p
```bash
python -m pip install --upgrade pip
python -m pip install --require-hashes -r requirements.lock
python -m pip install -r src/requirements.txt
python -m pip install -e .
```
#### Linux Clipboard Support
On Linux, `pyperclip` relies on external utilities like `xclip` or `xsel`.
SeedPass does not install these tools automatically. To use clipboard features
such as secret mode, install **xclip** manually:
SeedPass will attempt to install **xclip** automatically if neither tool is
available. If the automatic installation fails, you can install it manually:
```bash
sudo apt install xclip
sudo apt-get install xclip
```
After installing `xclip`, restart SeedPass to enable clipboard support.
## Quick Start
After installing dependencies, activate your virtual environment and install
@@ -415,22 +403,6 @@ SeedPass allows you to manage multiple seed profiles (previously referred to as
**Note:** The term "seed profile" is used to represent different sets of seeds you can manage within SeedPass. This provides an intuitive way to handle multiple identities or sets of passwords.
### Recovery
If you previously backed up your vault to Nostr you can restore it during the
initial setup. You must provide both your 12-word master seed and the master
password that encrypted the vault; without the correct password the retrieved
data cannot be decrypted.
1. Start SeedPass and choose option **4** when prompted to set up a seed.
2. Paste your BIP85 seed phrase when asked.
3. Enter the master password associated with that seed.
4. SeedPass initializes the profile and attempts to download the encrypted
vault from the configured relays.
5. A success message confirms the vault was restored. If no data is found a
failure message is shown and a new empty vault is created.
### Configuration File and Settings
SeedPass keeps per-profile settings in an encrypted file named `seedpass_config.json.enc` inside each profile directory under `~/.seedpass/`. This file stores your chosen Nostr relays and the optional settings PIN. New profiles start with the following default relays:
@@ -472,17 +444,17 @@ Back in the Settings menu you can:
whether both the encrypted database and the script itself pass checksum
validation.
* Choose `14` to toggle Secret Mode and set the clipboard clear delay.
* Select `15` to toggle Offline Mode. SeedPass starts offline; disable it here to enable Nostr syncing.
* Select `15` to toggle Offline Mode and work locally without contacting Nostr.
* Choose `16` to toggle Quick Unlock so subsequent actions skip the password prompt. Startup delay is unchanged.
* Select `17` to return to the main menu.
## Running Tests
SeedPass includes a small suite of unit tests located under `src/tests`. **Before running `pytest`, be sure to install the test requirements.** Activate your virtual environment and run `pip install --require-hashes -r requirements.lock` to ensure all testing dependencies are available. Then run the tests with **pytest**. Use `-vv` to see INFO-level log messages from each passing test:
SeedPass includes a small suite of unit tests located under `src/tests`. **Before running `pytest`, be sure to install the test requirements.** Activate your virtual environment and run `pip install -r src/requirements.txt` to ensure all testing dependencies are available. Then run the tests with **pytest**. Use `-vv` to see INFO-level log messages from each passing test:
```bash
pip install --require-hashes -r requirements.lock
pip install -r src/requirements.txt
pytest -vv
```
@@ -559,14 +531,14 @@ Mutation testing is disabled in the GitHub workflow due to reliability issues an
- **Backup Your Data:** Regularly back up your encrypted data and checksum files to prevent data loss.
- **Backup the Settings PIN:** Your settings PIN is stored in the encrypted configuration file. Keep a copy of this file or remember the PIN, as losing it will require deleting the file and reconfiguring your relays.
- **Protect Your Passwords:** Do not share your master password or seed phrases with anyone and ensure they are strong and unique.
- **Backing Up the Parent Seed:** Use the CLI `vault reveal-parent-seed` command or the `/api/v1/vault/backup-parent-seed` endpoint with explicit confirmation to create an encrypted backup. The API does not return the seed directly.
- **Revealing the Parent Seed:** The `vault reveal-parent-seed` command and `/api/v1/parent-seed` endpoint print your seed in plain text. Run them only in a secure environment.
- **No PBKDF2 Salt Needed:** SeedPass deliberately omits an explicit PBKDF2 salt. Every password is derived from a unique 512-bit BIP-85 child seed, which already provides stronger per-password uniqueness than a conventional 128-bit salt.
- **Checksum Verification:** Always verify the script's checksum to ensure its integrity and protect against unauthorized modifications.
- **Potential Bugs and Limitations:** Be aware that the software may contain bugs and lacks certain features. Snapshot chunks are capped at 50KB and the client rotates snapshots after enough delta events accumulate. The security of memory management and logs has not been thoroughly evaluated and may pose risks of leaking sensitive information.
- **Multiple Seeds Management:** While managing multiple seeds adds flexibility, it also increases the responsibility to secure each seed and its associated password.
- **No PBKDF2 Salt Required:** SeedPass deliberately omits an explicit PBKDF2 salt. Every password is derived from a unique 512-bit BIP-85 child seed, which already provides stronger per-password uniqueness than a conventional 128-bit salt.
- **Default KDF Iterations:** New profiles start with 50,000 PBKDF2 iterations. Use `seedpass config set kdf_iterations` to change this.
- **Offline Mode (default):** Nostr sync is disabled until you explicitly enable it via the Settings menu or `seedpass config toggle-offline`.
- **Offline Mode:** Disable Nostr sync to keep all operations local until you re-enable networking.
- **Quick Unlock:** Store a hashed copy of your password so future actions skip the prompt. Startup delay no longer changes. Use with caution on shared systems.
## Contributing

View File

@@ -1,33 +0,0 @@
# Nostr Setup
This guide explains how SeedPass uses the Nostr protocol for encrypted vault backups and how to configure relays. SeedPass starts in offline mode, so you must explicitly disable it before any network synchronization. Run `seedpass config toggle-offline` or use the Settings menu to enable online syncing.
## Relay Configuration
SeedPass communicates with the Nostr network through a list of relays. You can manage these relays from the CLI:
```bash
seedpass nostr list-relays # show configured relays
seedpass nostr add-relay <url> # add a relay URL
seedpass nostr remove-relay <n> # remove relay by index
```
At least one relay is required for publishing and retrieving backups. Choose relays you trust to remain online and avoid those that charge high fees or aggressively ratelimit connections.
## Manifest and Delta Events
Backups are published as parameterised replaceable events:
- **Kind 30070 Manifest:** describes the snapshot and lists chunk IDs. The optional `delta_since` field stores the UNIX timestamp of the latest delta event.
- **Kind 30071 Snapshot Chunk:** each 50 KB fragment of the compressed, encrypted vault.
- **Kind 30072 Delta:** captures changes since the last snapshot.
When restoring, SeedPass downloads the most recent manifest and applies any newer delta events.
## Troubleshooting
- **No events found:** ensure the relays are reachable and that the correct fingerprint is selected.
- **Connection failures:** some relays only support WebSocket over TLS; verify you are using `wss://` URLs where required.
- **Stale data:** if deltas accumulate without a fresh snapshot, run `seedpass nostr sync` to publish an updated snapshot.
Increasing log verbosity with `--verbose` can also help diagnose relay or network issues.

View File

@@ -1,38 +0,0 @@
# Packaging SeedPass
This guide describes how to build platform-native packages for SeedPass using [BeeWare Briefcase](https://briefcase.readthedocs.io/).
## Prerequisites
* Python 3.12 with development headers (`python3-dev` on Debian/Ubuntu).
* Briefcase installed in your virtual environment:
```bash
pip install briefcase
```
## Linux
The helper script in `packaging/build-linux.sh` performs `briefcase create`, `build`, and `package` for the current project.
```bash
./packaging/build-linux.sh
```
Briefcase outputs its build artifacts in `build/seedpass-gui/ubuntu/noble/`. These files can be bundled in container formats such as Flatpak or Snap. Example manifests are included:
* `packaging/flatpak/seedpass.yml` targets the `org.gnome.Platform` runtime and copies the Briefcase build into the Flatpak bundle.
* `packaging/snapcraft.yaml` stages the Briefcase build and lists GTK libraries in `stage-packages` so the Snap includes its GUI dependencies.
## macOS and Windows
Scripts are provided to document the commands expected on each platform. They must be run on their respective operating systems:
* `packaging/build-macos.sh`
* `packaging/build-windows.ps1`
Each script runs Briefcase's `create`, `build`, and `package` steps with `--no-input`.
## Reproducible Releases
The `packaging/` directory contains the scripts and manifests needed to regenerate desktop packages. Invoke the appropriate script on the target OS, then use the supplied Flatpak or Snap manifest to bundle additional dependencies for Linux.

View File

@@ -1,17 +0,0 @@
# Secret Scanning
SeedPass uses [Gitleaks](https://github.com/gitleaks/gitleaks) to scan the repository for accidentally committed secrets. The scan runs automatically for pull requests and on a nightly schedule. Any findings will cause the build to fail.
## Suppressing False Positives
If a file or string triggers the scanner but does not contain a real secret, add it to the allowlist in `.gitleaks.toml`.
```toml
[allowlist]
# Ignore specific files
paths = ["path/to/file.txt"]
# Ignore strings that match a regular expression
regexes = ["""dummy_api_key"""]
```
Commit the updated `.gitleaks.toml` to stop future alerts for the allowed items.

View File

@@ -1,30 +0,0 @@
# Security Testing and Calibration
This project includes fuzz tests and a calibration routine to tune Argon2 parameters for your hardware.
## Running Fuzz Tests
The fuzz tests exercise encryption and decryption with random data using [Hypothesis](https://hypothesis.readthedocs.io/).
Activate the project's virtual environment and run:
```bash
pytest src/tests/test_encryption_fuzz.py
```
Running the entire test suite will also execute these fuzz tests.
## Calibrating Argon2 Time Cost
Argon2 performance varies by device. To calibrate the `time_cost` parameter, run the helper function:
```bash
python - <<'PY'
from seedpass.core.config_manager import ConfigManager
from utils.key_derivation import calibrate_argon2_time_cost
# assuming ``cfg`` is a ConfigManager for your profile
calibrate_argon2_time_cost(cfg)
PY
```
The selected `time_cost` is stored in the profile's configuration and used for subsequent key derivations.

View File

@@ -202,8 +202,6 @@ flowchart TD
<p>SeedPass allows you to manage multiple seed profiles (fingerprints). You can switch between different seeds to compartmentalize your passwords.</p>
<h3 class="subsection-title">Nostr Relay Integration</h3>
<p>SeedPass publishes your encrypted vault to Nostr in 50&#8201;KB chunks using parameterised replaceable events. A manifest describes each snapshot while deltas record updates. When too many deltas accumulate, a new snapshot is rotated in automatically.</p>
<h3 class="subsection-title">Recovery from Nostr</h3>
<p>Restoring a vault on a new device requires both your 12&#8201;word master seed and the master password that encrypted the vault. Without the correct password the downloaded archive cannot be decrypted.</p>
<h3 class="subsection-title">Checksum Verification</h3>
<p>Built-in checksum verification ensures your SeedPass installation hasn't been tampered with.</p>
<h3 class="subsection-title">Interactive TUI</h3>

View File

@@ -1,5 +0,0 @@
#!/bin/bash
set -e
briefcase create linux --no-input
briefcase build linux --no-input
briefcase package linux --no-input

View File

@@ -1,5 +0,0 @@
#!/bin/bash
set -e
briefcase create macos --no-input
briefcase build macos --no-input
briefcase package macos --no-input

View File

@@ -1,3 +0,0 @@
briefcase create windows --no-input
briefcase build windows --no-input
briefcase package windows --no-input

View File

@@ -1,18 +0,0 @@
app-id: io.seedpass.SeedPass
runtime: org.gnome.Platform
runtime-version: '46'
sdk: org.gnome.Sdk
command: seedpass-gui
modules:
- name: seedpass
buildsystem: simple
build-commands:
- mkdir -p /app/bin
- cp -r ../../build/seedpass-gui/ubuntu/noble/* /app/bin/
sources:
- type: dir
path: ../../
finish-args:
- --share=network
- --socket=fallback-x11
- --socket=wayland

View File

@@ -1,22 +0,0 @@
name: seedpass
base: core22
version: '0.1.0'
summary: Deterministic password manager
description: |
SeedPass deterministically generates passwords using BIP-39 seeds.
grade: devel
confinement: strict
apps:
seedpass-gui:
command: bin/seedpass-gui
plugs:
- network
- x11
parts:
seedpass:
plugin: dump
source: build/seedpass-gui/ubuntu/noble/app
stage-packages:
- libgtk-3-0
- libglib2.0-0
- libgdk-pixbuf2.0-0

3629
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,63 +1,12 @@
[tool.poetry]
[project]
name = "seedpass"
version = "0.1.0"
description = "Deterministic password manager with a BeeWare GUI"
authors = []
[tool.poetry.dependencies]
python = ">=3.10,<3.13"
colorama = ">=0.4.6"
termcolor = ">=1.1.0"
cryptography = ">=40.0.2"
bip-utils = ">=2.5.0"
bech32 = "1.2.0"
coincurve = ">=18.0.0"
mnemonic = "*"
aiohttp = ">=3.12.15"
bcrypt = "*"
portalocker = ">=2.8"
nostr-sdk = ">=0.43"
websocket-client = "1.7.0"
websockets = ">=15.0.0"
tomli = "*"
pgpy = "0.6.0"
pyotp = ">=2.8.0"
pyperclip = "*"
qrcode = ">=8.2"
typer = ">=0.12.3"
fastapi = ">=0.116.0"
uvicorn = ">=0.35.0"
httpx = ">=0.28.1"
requests = ">=2.32"
python-multipart = ">=0.0.20"
orjson = "*"
argon2-cffi = "*"
PyJWT = ">=2.8.0"
slowapi = "^0.1.9"
toga-core = { version = ">=0.5.2", optional = true }
pillow = { version = "*", optional = true }
toga-gtk = { version = ">=0.5.2", optional = true }
toga-winforms = { version = ">=0.5.2", optional = true }
toga-cocoa = { version = ">=0.5.2", optional = true }
[build-system]
requires = ["setuptools>=61", "wheel"]
build-backend = "setuptools.build_meta"
[tool.poetry.extras]
gui = ["toga-core", "pillow"]
gui-gtk = ["toga-gtk"]
gui-win = ["toga-winforms"]
gui-mac = ["toga-cocoa"]
[tool.poetry.group.dev.dependencies]
pytest = "^8.2"
coverage = "^7.5"
black = "^24.3"
pip-audit = "^2.7"
pytest-xdist = "^3.5"
hypothesis = "^6.98"
freezegun = "^1.5"
toga-dummy = ">=0.5.2"
Pillow = "^10.4"
[tool.poetry.scripts]
[project.scripts]
seedpass = "seedpass.cli:app"
seedpass-gui = "seedpass_gui.app:main"
@@ -66,15 +15,10 @@ python_version = "3.11"
strict = true
mypy_path = "src"
[tool.briefcase]
project_name = "SeedPass"
bundle = "io.seedpass"
version = "0.1.0"
[tool.briefcase.app.seedpass-gui]
formal-name = "SeedPass"
description = "Deterministic password manager with a BeeWare GUI"
sources = ["src/seedpass_gui"]
sources = ["src"]
requires = [
"toga-core>=0.5.2",
"colorama>=0.4.6",
@@ -84,7 +28,7 @@ requires = [
"bech32==1.2.0",
"coincurve>=18.0.0",
"mnemonic",
"aiohttp>=3.12.15",
"aiohttp>=3.12.14",
"bcrypt",
"portalocker>=2.8",
"nostr-sdk>=0.43",
@@ -100,14 +44,8 @@ requires = [
"uvicorn>=0.35.0",
"httpx>=0.28.1",
"requests>=2.32",
"python-multipart>=0.0.20",
"python-multipart",
"orjson",
"argon2-cffi",
]
icon = "logo/png/SeedPass-Logo-24.png"
license = { file = "LICENSE" }
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"

View File

@@ -3,7 +3,7 @@ addopts = -n auto
log_cli = true
log_cli_level = WARNING
log_level = WARNING
testpaths = src/tests tests
testpaths = src/tests
markers =
network: tests that require network connectivity
stress: long running stress tests

File diff suppressed because it is too large Load Diff

View File

@@ -1,9 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
# Run pip-audit against the pinned requirements
if ! command -v pip-audit >/dev/null 2>&1; then
python -m pip install --quiet pip-audit
fi
pip-audit -r requirements.lock "$@"

View File

@@ -43,7 +43,6 @@ from seedpass.core.vault import Vault
from seedpass.core.config_manager import ConfigManager
from seedpass.core.backup import BackupManager
from seedpass.core.entry_management import EntryManager
from seedpass.core.state_manager import StateManager
from nostr.client import NostrClient
from utils.fingerprint import generate_fingerprint
from utils.fingerprint_manager import FingerprintManager
@@ -80,7 +79,7 @@ def initialize_profile(
profile_dir = APP_DIR / fingerprint
profile_dir.mkdir(parents=True, exist_ok=True)
seed_key = derive_key_from_password(DEFAULT_PASSWORD, fingerprint)
seed_key = derive_key_from_password(DEFAULT_PASSWORD)
seed_mgr = EncryptionManager(seed_key, profile_dir)
seed_file = profile_dir / "parent_seed.enc"
clear_path = profile_dir / "seed_phrase.txt"
@@ -196,13 +195,11 @@ def main() -> None:
encrypted = entry_mgr.vault.get_encrypted_index()
if encrypted:
idx = StateManager(dir_path).state.get("nostr_account_idx", 0)
client = NostrClient(
entry_mgr.vault.encryption_manager,
fingerprint or dir_path.name,
parent_seed=seed,
config_manager=cfg_mgr,
account_index=idx,
)
asyncio.run(client.publish_snapshot(encrypted))
print("[+] Data synchronized to Nostr.")

View File

@@ -2,12 +2,10 @@
# SeedPass Universal Installer for Windows
#
# Supports installing from a specific branch using the -Branch parameter.
# Use -IncludeGui to install the optional BeeWare GUI backend.
# Example: .\install.ps1 -Branch beta -IncludeGui
# Example: .\install.ps1 -Branch beta
param(
[string]$Branch = "main", # The git branch to install from
[switch]$IncludeGui # Install BeeWare GUI components
[string]$Branch = "main" # The git branch to install from
)
# --- Configuration ---
@@ -251,30 +249,20 @@ if ($LASTEXITCODE -ne 0) {
Write-Error "Failed to upgrade pip"
}
& "$VenvDir\Scripts\python.exe" -m pip install --require-hashes -r "requirements.lock"
& "$VenvDir\Scripts\python.exe" -m pip install -r "src\requirements.txt"
if ($LASTEXITCODE -ne 0) {
Write-Warning "Failed to install Python dependencies. If errors mention C++, install Microsoft C++ Build Tools: https://visualstudio.microsoft.com/visual-cpp-build-tools/"
Write-Error "Dependency installation failed."
}
if ($IncludeGui) {
& "$VenvDir\Scripts\python.exe" -m pip install -e .[gui]
} else {
& "$VenvDir\Scripts\python.exe" -m pip install -e .
}
& "$VenvDir\Scripts\python.exe" -m pip install -e .
if ($LASTEXITCODE -ne 0) {
Write-Error "Failed to install SeedPass package"
}
if ($IncludeGui) {
Write-Info "Installing BeeWare GUI backend..."
try {
& "$VenvDir\Scripts\python.exe" -m pip install toga-winforms
if ($LASTEXITCODE -ne 0) { throw "toga-winforms installation failed" }
} catch {
Write-Warning "Failed to install GUI backend. Install Microsoft C++ Build Tools from https://visualstudio.microsoft.com/visual-cpp-build-tools/ and rerun the installer."
}
}
Write-Info "Installing BeeWare GUI backend..."
& "$VenvDir\Scripts\python.exe" -m pip install toga-winforms
if ($LASTEXITCODE -ne 0) { Write-Warning "Failed to install GUI backend" }
# 5. Create launcher script
Write-Info "Creating launcher script..."

View File

@@ -5,9 +5,7 @@
# Supports installing from a specific branch using the -b or --branch flag.
# Example: ./install.sh -b beta
set -euo pipefail
IFS=$'\n\t'
trap 'echo "[ERROR] Line $LINENO failed"; exit 1' ERR
set -e
# --- Configuration ---
REPO_URL="https://github.com/PR0M3TH3AN/SeedPass.git"
@@ -17,52 +15,41 @@ VENV_DIR="$INSTALL_DIR/venv"
LAUNCHER_DIR="$HOME/.local/bin"
LAUNCHER_PATH="$LAUNCHER_DIR/seedpass"
BRANCH="main" # Default branch
MODE="tui"
INSTALL_GUI=false
# --- Helper Functions ---
print_info() { echo -e "\033[1;34m[INFO]\033[0m" "$1"; }
print_success() { echo -e "\033[1;32m[SUCCESS]\033[0m" "$1"; }
print_warning() { echo -e "\033[1;33m[WARNING]\033[0m" "$1"; }
print_error() { echo -e "\033[1;31m[ERROR]\033[0m" "$1" >&2; exit 1; }
print_info() { echo -e "\033[1;34m[INFO]\033[0m $1"; }
print_success() { echo -e "\033[1;32m[SUCCESS]\033[0m $1"; }
print_warning() { echo -e "\033[1;33m[WARNING]\033[0m $1"; }
print_error() { echo -e "\033[1;31m[ERROR]\033[0m $1" >&2; exit 1; }
# Install build dependencies for Gtk/GObject if available via the system package manager
install_dependencies() {
print_info "Installing system packages required for Gtk bindings..."
if command -v apt-get &>/dev/null; then
sudo apt-get update && sudo apt-get install -y \\
build-essential pkg-config libcairo2 libcairo2-dev \\
libgirepository1.0-dev gobject-introspection \\
gir1.2-gtk-3.0 libgtk-3-dev python3-dev libffi-dev libssl-dev \\
cmake rustc cargo zlib1g-dev libjpeg-dev libpng-dev \\
libfreetype6-dev xclip wl-clipboard
sudo apt-get update && sudo apt-get install -y \
build-essential pkg-config libcairo2 libcairo2-dev \
libgirepository1.0-dev gobject-introspection \
gir1.2-gtk-3.0 python3-dev libffi-dev libssl-dev xclip
elif command -v yum &>/dev/null; then
sudo yum install -y @'Development Tools' cairo cairo-devel \\
gobject-introspection-devel gtk3-devel python3-devel \\
libffi-devel openssl-devel cmake rust cargo zlib-devel \\
libjpeg-turbo-devel libpng-devel freetype-devel xclip \\
wl-clipboard
sudo yum install -y @'Development Tools' cairo cairo-devel \
gobject-introspection-devel gtk3-devel python3-devel \
libffi-devel openssl-devel xclip
elif command -v dnf &>/dev/null; then
sudo dnf groupinstall -y "Development Tools" && sudo dnf install -y \\
cairo cairo-devel gobject-introspection-devel gtk3-devel \\
python3-devel libffi-devel openssl-devel cmake rust cargo \\
zlib-devel libjpeg-turbo-devel libpng-devel freetype-devel \\
xclip wl-clipboard
sudo dnf groupinstall -y "Development Tools" && sudo dnf install -y \
cairo cairo-devel gobject-introspection-devel gtk3-devel \
python3-devel libffi-devel openssl-devel xclip
elif command -v pacman &>/dev/null; then
sudo pacman -Syu --noconfirm base-devel pkgconf cmake rustup \\
gtk3 gobject-introspection cairo libjpeg-turbo zlib \\
libpng freetype xclip wl-clipboard && rustup default stable
sudo pacman -Syu --noconfirm base-devel pkgconf cairo \
gobject-introspection gtk3 python xclip
elif command -v brew &>/dev/null; then
brew install pkg-config cairo gobject-introspection gtk+3 cmake rustup-init && \\
rustup-init -y
brew install pkg-config cairo gobject-introspection gtk+3
else
print_warning "Unsupported package manager. Please install Gtk/GObject dependencies manually."
fi
}
usage() {
echo "Usage: $0 [-b | --branch <branch_name>] [-m | --mode <tui|gui|both>] [-h | --help]"
echo "Usage: $0 [-b | --branch <branch_name>] [-h | --help]"
echo " -b, --branch Specify the git branch to install (default: main)"
echo " -m, --mode Installation mode: tui, gui, both (default: tui)"
echo " -h, --help Display this help message"
exit 0
}
@@ -83,40 +70,12 @@ main() {
-h|--help)
usage
;;
-m|--mode)
if [ -n "$2" ]; then
MODE="$2"
shift 2
else
print_error "Error: --mode requires an argument (tui|gui|both)."
fi
;;
*)
print_error "Unknown parameter passed: $1"; usage
;;
esac
done
case "$MODE" in
tui|gui|both) ;;
*)
print_error "Invalid mode: $MODE. Use 'tui', 'gui', or 'both'."
;;
esac
DISPLAY_DETECTED=false
if [ -n "${DISPLAY:-}" ] || [ -n "${WAYLAND_DISPLAY:-}" ]; then
DISPLAY_DETECTED=true
fi
if [[ "$MODE" == "gui" || "$MODE" == "both" ]]; then
if [ "$DISPLAY_DETECTED" = true ]; then
INSTALL_GUI=true
else
print_warning "No display detected. Skipping GUI installation."
fi
fi
# 1. Detect OS
OS_NAME=$(uname -s)
print_info "Installing SeedPass from branch: '$BRANCH'"
@@ -151,14 +110,12 @@ main() {
fi
# 3. Install OS-specific dependencies
if [ "$INSTALL_GUI" = true ]; then
print_info "Checking for Gtk development libraries..."
if command -v pkg-config &>/dev/null && pkg-config --exists girepository-2.0; then
print_info "Gtk bindings already available."
else
print_warning "Gtk introspection bindings not found. Installing dependencies..."
install_dependencies
fi
print_info "Checking for Gtk development libraries..."
if ! python3 -c "import gi" &>/dev/null; then
print_warning "Gtk introspection bindings not found. Installing dependencies..."
install_dependencies
else
print_info "Gtk bindings already available."
fi
# 4. Clone or update the repository
@@ -182,43 +139,18 @@ main() {
source "$VENV_DIR/bin/activate"
# 6. Install/Update Python dependencies
print_info "Installing/updating Python dependencies from requirements.lock..."
print_info "Installing/updating Python dependencies from src/requirements.txt..."
pip install --upgrade pip
pip install --require-hashes -r requirements.lock
if [ "$INSTALL_GUI" = true ]; then
GUI_READY=true
if [ "$OS_NAME" = "Linux" ]; then
if ! (command -v pkg-config &>/dev/null && pkg-config --exists girepository-2.0); then
print_warning "GTK libraries (girepository-2.0) not found. Install them with: sudo apt install libgirepository1.0-dev"
read -r -p "Continue with GUI installation anyway? (y/N) " CONTINUE_GUI
if [[ ! "$CONTINUE_GUI" =~ ^[Yy]$ ]]; then
GUI_READY=false
fi
fi
fi
if [ "$GUI_READY" = true ]; then
if [ "$OS_NAME" = "Linux" ]; then
print_info "Installing Linux GUI dependencies..."
pip install -e ".[gui-gtk]"
elif [ "$OS_NAME" = "Darwin" ]; then
print_info "Installing macOS GUI dependencies..."
pip install -e ".[gui-mac]"
else
print_warning "Unsupported OS for GUI installation. Installing core package only."
pip install -e .
fi
else
print_warning "Skipping GUI installation."
pip install -e .
fi
else
pip install -e .
pip install -r src/requirements.txt
pip install -e .
print_info "Installing platform-specific Toga backend..."
if [ "$OS_NAME" = "Linux" ]; then
print_info "Installing toga-gtk for Linux..."
pip install toga-gtk
elif [ "$OS_NAME" = "Darwin" ]; then
print_info "Installing toga-cocoa for macOS..."
pip install toga-cocoa
fi
if ! "$VENV_DIR/bin/python" -c "import seedpass.cli; print('ok')"; then
print_error "SeedPass CLI import check failed."
fi
deactivate
# 7. Create launcher script

View File

@@ -34,9 +34,13 @@ def initialize_app() -> None:
"""Ensure the application directory exists."""
try:
APP_DIR.mkdir(exist_ok=True, parents=True)
logger.debug("Application directory created at %s", APP_DIR)
if logger.isEnabledFor(logging.DEBUG):
logger.info(f"Application directory created at {APP_DIR}")
except Exception as exc:
logger.error("Failed to create application directory: %s", exc, exc_info=True)
if logger.isEnabledFor(logging.DEBUG):
logger.error(
f"Failed to create application directory: {exc}", exc_info=True
)
# -----------------------------------

View File

@@ -1,15 +1,17 @@
# bip85/__init__.py
import logging
import traceback
logger = logging.getLogger(__name__)
try:
from .bip85 import BIP85
except Exception as exc:
logger.error("Failed to import BIP85 module: %s", exc, exc_info=True)
raise ImportError(
"BIP85 dependencies are missing. Install 'bip_utils', 'cryptography', and 'colorama'."
) from exc
if logger.isEnabledFor(logging.DEBUG):
logger.info("BIP85 module imported successfully.")
except Exception as e:
if logger.isEnabledFor(logging.DEBUG):
logger.error(f"Failed to import BIP85 module: {e}", exc_info=True)
__all__ = ["BIP85"]

View File

@@ -18,8 +18,7 @@ import hashlib
import hmac
import logging
import os
from typing import Union
import traceback
from colorama import Fore
from bip_utils import Bip32Slip10Secp256k1, Bip39MnemonicGenerator, Bip39Languages
@@ -39,19 +38,13 @@ class Bip85Error(Exception):
class BIP85:
def __init__(self, seed_or_xprv: Union[bytes, str]):
"""Initialize from seed bytes or an ``xprv`` string.
Parameters:
seed_or_xprv (Union[bytes, str]): Either raw BIP39 seed bytes
or a BIP32 extended private key (``xprv``) string.
"""
def __init__(self, seed_bytes: bytes | str):
"""Initialize from BIP39 seed bytes or BIP32 xprv string."""
try:
if isinstance(seed_or_xprv, (bytes, bytearray)):
self.bip32_ctx = Bip32Slip10Secp256k1.FromSeed(seed_or_xprv)
if isinstance(seed_bytes, (bytes, bytearray)):
self.bip32_ctx = Bip32Slip10Secp256k1.FromSeed(seed_bytes)
else:
self.bip32_ctx = Bip32Slip10Secp256k1.FromExtendedKey(seed_or_xprv)
self.bip32_ctx = Bip32Slip10Secp256k1.FromExtendedKey(seed_bytes)
logging.debug("BIP32 context initialized successfully.")
except Exception as e:
logging.error(f"Error initializing BIP32 context: {e}", exc_info=True)
@@ -59,34 +52,26 @@ class BIP85:
raise Bip85Error(f"Error initializing BIP32 context: {e}")
def derive_entropy(
self,
index: int,
entropy_bytes: int,
app_no: int = 39,
word_count: int | None = None,
self, index: int, bytes_len: int, app_no: int = 39, words_len: int | None = None
) -> bytes:
"""Derive entropy using the BIP-85 HMAC-SHA512 method.
"""
Derives entropy using BIP-85 HMAC-SHA512 method.
Parameters:
index (int): Index for the child entropy.
entropy_bytes (int): Number of bytes of entropy to derive.
app_no (int): Application number (default 39 for BIP39).
word_count (int | None): Number of words used in the derivation path
for BIP39. If ``None`` and ``app_no`` is ``39``, ``word_count``
defaults to ``entropy_bytes``. The final segment of the
derivation path becomes ``m/83696968'/39'/0'/word_count'/index'``.
bytes_len (int): Number of bytes to derive for the entropy.
app_no (int): Application number (default 39 for BIP39)
Returns:
bytes: Derived entropy of length ``entropy_bytes``.
bytes: Derived entropy.
Raises:
SystemExit: If derivation fails or the derived entropy length is
invalid.
SystemExit: If derivation fails or entropy length is invalid.
"""
if app_no == 39:
if word_count is None:
word_count = entropy_bytes
path = f"m/83696968'/{app_no}'/0'/{word_count}'/{index}'"
if words_len is None:
words_len = bytes_len
path = f"m/83696968'/{app_no}'/0'/{words_len}'/{index}'"
elif app_no == 32:
path = f"m/83696968'/{app_no}'/{index}'"
else:
@@ -102,17 +87,17 @@ class BIP85:
hmac_result = hmac.new(hmac_key, k, hashlib.sha512).digest()
logging.debug(f"HMAC-SHA512 result: {hmac_result.hex()}")
entropy = hmac_result[:entropy_bytes]
entropy = hmac_result[:bytes_len]
if len(entropy) != entropy_bytes:
if len(entropy) != bytes_len:
logging.error(
f"Derived entropy length is {len(entropy)} bytes; expected {entropy_bytes} bytes."
f"Derived entropy length is {len(entropy)} bytes; expected {bytes_len} bytes."
)
print(
f"{Fore.RED}Error: Derived entropy length is {len(entropy)} bytes; expected {entropy_bytes} bytes."
f"{Fore.RED}Error: Derived entropy length is {len(entropy)} bytes; expected {bytes_len} bytes."
)
raise Bip85Error(
f"Derived entropy length is {len(entropy)} bytes; expected {entropy_bytes} bytes."
f"Derived entropy length is {len(entropy)} bytes; expected {bytes_len} bytes."
)
logging.debug(f"Derived entropy: {entropy.hex()}")
@@ -123,17 +108,14 @@ class BIP85:
raise Bip85Error(f"Error deriving entropy: {e}")
def derive_mnemonic(self, index: int, words_num: int) -> str:
entropy_bytes = {12: 16, 18: 24, 24: 32}.get(words_num)
if not entropy_bytes:
bytes_len = {12: 16, 18: 24, 24: 32}.get(words_num)
if not bytes_len:
logging.error(f"Unsupported number of words: {words_num}")
print(f"{Fore.RED}Error: Unsupported number of words: {words_num}")
raise Bip85Error(f"Unsupported number of words: {words_num}")
entropy = self.derive_entropy(
index=index,
entropy_bytes=entropy_bytes,
app_no=39,
word_count=words_num,
index=index, bytes_len=bytes_len, app_no=39, words_len=words_num
)
try:
mnemonic = Bip39MnemonicGenerator(Bip39Languages.ENGLISH).FromEntropy(
@@ -149,7 +131,7 @@ class BIP85:
def derive_symmetric_key(self, index: int = 0, app_no: int = 2) -> bytes:
"""Derive 32 bytes of entropy for symmetric key usage."""
try:
key = self.derive_entropy(index=index, entropy_bytes=32, app_no=app_no)
key = self.derive_entropy(index=index, bytes_len=32, app_no=app_no)
logging.debug(f"Derived symmetric key: {key.hex()}")
return key
except Exception as e:

View File

@@ -12,23 +12,19 @@ import logging
import signal
import time
import argparse
import asyncio
import gzip
import tomli
from tomli import TOMLDecodeError
from colorama import init as colorama_init
from termcolor import colored
from utils.color_scheme import color_text
import importlib
import traceback
from seedpass.core.manager import PasswordManager, restore_backup_index
from seedpass.core.manager import PasswordManager
from nostr.client import NostrClient
from seedpass.core.entry_types import EntryType
from seedpass.core.config_manager import ConfigManager
from constants import INACTIVITY_TIMEOUT, initialize_app
from utils.password_prompt import (
PasswordPromptError,
prompt_existing_password,
prompt_new_password,
)
from utils.password_prompt import PasswordPromptError
from utils import (
timed_input,
copy_to_clipboard,
@@ -36,37 +32,12 @@ from utils import (
pause,
clear_header_with_notification,
)
from utils.clipboard import ClipboardUnavailableError
from utils.atomic_write import atomic_write
from utils.logging_utils import (
ConsolePauseFilter,
ChecksumWarningFilter,
pause_logging_for_ui,
)
import queue
from local_bip85.bip85 import Bip85Error
colorama_init()
OPTIONAL_DEPENDENCIES = {
"pyperclip": "clipboard support for secret mode",
"qrcode": "QR code generation for TOTP setup",
"toga": "desktop GUI features",
}
def _warn_missing_optional_dependencies() -> None:
"""Log warnings for any optional packages that are not installed."""
for module, feature in OPTIONAL_DEPENDENCIES.items():
try:
importlib.import_module(module)
except ModuleNotFoundError:
logging.debug(
"Optional dependency '%s' is not installed; %s will be unavailable.",
module,
feature,
)
def load_global_config() -> dict:
"""Load configuration from ~/.seedpass/config.toml if present."""
@@ -76,44 +47,48 @@ def load_global_config() -> dict:
try:
with open(config_path, "rb") as f:
return tomli.load(f)
except (OSError, TOMLDecodeError) as exc:
except Exception as exc:
logging.warning(f"Failed to read {config_path}: {exc}")
return {}
def configure_logging() -> None:
"""Configure application-wide logging handlers."""
def configure_logging():
logger = logging.getLogger()
logger.setLevel(logging.DEBUG) # Keep this as DEBUG to capture all logs
# Remove all handlers associated with the root logger object
for handler in logger.handlers[:]:
logger.removeHandler(handler)
# Ensure the 'logs' directory exists
log_directory = Path("logs")
log_directory.mkdir(parents=True, exist_ok=True)
if not log_directory.exists():
log_directory.mkdir(parents=True, exist_ok=True)
console_handler = logging.StreamHandler(sys.stderr)
console_handler.setLevel(logging.WARNING)
console_handler.addFilter(ConsolePauseFilter())
console_handler.addFilter(ChecksumWarningFilter())
# Create handlers
c_handler = logging.StreamHandler(sys.stdout)
f_handler = logging.FileHandler(log_directory / "main.log")
file_handler = logging.FileHandler(log_directory / "main.log")
file_handler.setLevel(logging.DEBUG)
# Set levels: only errors and critical messages will be shown in the console
c_handler.setLevel(logging.ERROR)
f_handler.setLevel(logging.DEBUG)
# Create formatters and add them to handlers
formatter = logging.Formatter(
"%(asctime)s [%(levelname)s] %(message)s [%(filename)s:%(lineno)d]",
"%(asctime)s [%(levelname)s] %(message)s [%(filename)s:%(lineno)d]"
)
console_handler.setFormatter(formatter)
file_handler.setFormatter(formatter)
c_handler.setFormatter(formatter)
f_handler.setFormatter(formatter)
root_logger = logging.getLogger()
root_logger.setLevel(logging.DEBUG)
root_logger.handlers.clear()
root_logger.addHandler(console_handler)
root_logger.addHandler(file_handler)
# Add handlers to the logger
logger.addHandler(c_handler)
logger.addHandler(f_handler)
logging.captureWarnings(True)
logging.getLogger("monstr").setLevel(logging.ERROR)
logging.getLogger("nostr").setLevel(logging.ERROR)
# Set logging level for third-party libraries to WARNING to suppress their debug logs
logging.getLogger("monstr").setLevel(logging.WARNING)
logging.getLogger("nostr").setLevel(logging.WARNING)
@pause_logging_for_ui
def confirm_action(prompt: str) -> bool:
"""
Prompts the user for confirmation.
@@ -162,7 +137,6 @@ def get_notification_text(pm: PasswordManager) -> str:
return color_text(getattr(note, "message", ""), category)
@pause_logging_for_ui
def handle_switch_fingerprint(password_manager: PasswordManager):
"""
Handles switching the active fingerprint.
@@ -191,13 +165,6 @@ def handle_switch_fingerprint(password_manager: PasswordManager):
return
selected_fingerprint = fingerprints[int(choice) - 1]
if selected_fingerprint == password_manager.current_fingerprint:
print(
colored(
f"Seed profile {selected_fingerprint} is already active.", "yellow"
)
)
return
if password_manager.select_fingerprint(selected_fingerprint):
print(colored(f"Switched to seed profile {selected_fingerprint}.", "green"))
else:
@@ -221,7 +188,11 @@ def handle_add_new_fingerprint(password_manager: PasswordManager):
def handle_remove_fingerprint(password_manager: PasswordManager):
"""Handle removing an existing seed profile."""
"""
Handles removing an existing seed profile.
:param password_manager: An instance of PasswordManager.
"""
try:
fingerprints = password_manager.fingerprint_manager.list_fingerprints()
if not fingerprints:
@@ -240,24 +211,12 @@ def handle_remove_fingerprint(password_manager: PasswordManager):
selected_fingerprint = fingerprints[int(choice) - 1]
confirm = confirm_action(
f"Are you sure you want to remove seed profile {selected_fingerprint}? This will delete all associated data. (Y/N):"
f"Are you sure you want to remove seed profile {selected_fingerprint}? This will delete all associated data. (Y/N): "
)
if confirm:
def _cleanup_and_exit() -> None:
password_manager.current_fingerprint = None
password_manager.is_dirty = False
getattr(password_manager, "cleanup", lambda: None)()
print(colored("All seed profiles removed. Exiting.", "yellow"))
sys.exit(0)
if password_manager.fingerprint_manager.remove_fingerprint(
selected_fingerprint, _cleanup_and_exit
selected_fingerprint
):
password_manager.current_fingerprint = (
password_manager.fingerprint_manager.current_fingerprint
)
password_manager.is_dirty = False
print(
colored(
f"Seed profile {selected_fingerprint} removed successfully.",
@@ -346,33 +305,7 @@ def _display_live_stats(
stats_mgr.reset()
return
# Flush any pending input so an accidental newline doesn't exit immediately
try: # pragma: no cover - depends on platform
import termios
termios.tcflush(sys.stdin, termios.TCIFLUSH)
except Exception:
try: # pragma: no cover - Windows fallback
import msvcrt
while msvcrt.kbhit():
msvcrt.getwch()
except Exception:
pass
while True:
# Break out immediately if the user has already pressed Enter
try: # pragma: no cover - non-interactive environments
import select
ready, _, _ = select.select([sys.stdin], [], [], 0)
if ready:
line = sys.stdin.readline().strip()
if line == "" or line.lower() == "b":
break
except Exception:
pass
if callable(sync_fn):
try:
sync_fn()
@@ -471,36 +404,34 @@ def handle_post_to_nostr(
def handle_retrieve_from_nostr(password_manager: PasswordManager):
"""Retrieve the encrypted password index from Nostr."""
"""
Handles the action of retrieving the encrypted password index from Nostr.
"""
try:
password_manager.sync_index_from_nostr()
if password_manager.nostr_client.last_error:
password_manager.nostr_client.fingerprint = password_manager.current_fingerprint
result = asyncio.run(password_manager.nostr_client.fetch_latest_snapshot())
if result:
manifest, chunks = result
encrypted = gzip.decompress(b"".join(chunks))
if manifest.delta_since:
version = int(manifest.delta_since)
deltas = asyncio.run(
password_manager.nostr_client.fetch_deltas_since(version)
)
if deltas:
encrypted = deltas[-1]
password_manager.encryption_manager.decrypt_and_save_index_from_nostr(
encrypted
)
print(colored("Encrypted index retrieved and saved successfully.", "green"))
logging.info("Encrypted index retrieved and saved successfully from Nostr.")
else:
msg = (
f"No Nostr events found for fingerprint"
f" {password_manager.current_fingerprint}."
if "Snapshot not found" in password_manager.nostr_client.last_error
else password_manager.nostr_client.last_error
)
print(colored(msg, "red"))
logging.error(msg)
else:
try:
legacy_pub = (
password_manager.nostr_client.key_manager.generate_legacy_nostr_keys().public_key_hex()
)
if password_manager.nostr_client.keys.public_key_hex() == legacy_pub:
note = "Restored index from legacy Nostr backup."
print(colored(note, "yellow"))
logging.info(note)
except Exception:
pass
print(
colored(
"Encrypted index retrieved and saved successfully.",
"green",
)
)
logging.info("Encrypted index retrieved and saved successfully from Nostr.")
except Exception as e:
logging.error(f"Failed to retrieve from Nostr: {e}", exc_info=True)
print(colored(f"Error: Failed to retrieve from Nostr: {e}", "red"))
@@ -671,49 +602,33 @@ def handle_set_inactivity_timeout(password_manager: PasswordManager) -> None:
def handle_set_kdf_iterations(password_manager: PasswordManager) -> None:
"""Interactive slider for PBKDF2 iteration strength with benchmarking."""
import hashlib
import time
"""Change the PBKDF2 iteration count."""
cfg_mgr = password_manager.config_manager
if cfg_mgr is None:
print(colored("Configuration manager unavailable.", "red"))
return
levels = [
("1", "Very Fast", 10_000),
("2", "Fast", 50_000),
("3", "Balanced", 100_000),
("4", "Slow", 200_000),
("5", "Paranoid", 500_000),
]
try:
current = cfg_mgr.get_kdf_iterations()
print(colored(f"Current iterations: {current}", "cyan"))
except Exception as e:
logging.error(f"Error loading iterations: {e}")
print(colored(f"Error: {e}", "red"))
return
print(colored(f"Current iterations: {current}", "cyan"))
for key, label, iters in levels:
marker = "*" if iters == current else " "
print(colored(f"{key}. {label} ({iters}) {marker}", "menu"))
print(colored("b. Benchmark current setting", "menu"))
choice = input("Select strength or 'b' to benchmark: ").strip().lower()
if not choice:
print(colored("No change made.", "yellow"))
return
if choice == "b":
start = time.perf_counter()
hashlib.pbkdf2_hmac("sha256", b"bench", b"salt", current)
elapsed = time.perf_counter() - start
print(colored(f"{current} iterations took {elapsed:.2f}s", "green"))
return
selected = {k: v for k, _, v in levels}.get(choice)
if not selected:
print(colored("Invalid choice.", "red"))
value = input("Enter new iteration count: ").strip()
if not value:
print(colored("No iteration count entered.", "yellow"))
return
try:
cfg_mgr.set_kdf_iterations(selected)
print(colored(f"KDF iteration count set to {selected}.", "green"))
iterations = int(value)
if iterations <= 0:
print(colored("Iterations must be positive.", "red"))
return
except ValueError:
print(colored("Invalid number.", "red"))
return
try:
cfg_mgr.set_kdf_iterations(iterations)
print(colored("KDF iteration count updated.", "green"))
except Exception as e:
logging.error(f"Error saving iterations: {e}")
print(colored(f"Error: {e}", "red"))
@@ -752,7 +667,8 @@ def handle_set_additional_backup_location(pm: PasswordManager) -> None:
path = Path(value).expanduser()
path.mkdir(parents=True, exist_ok=True)
test_file = path / ".seedpass_write_test"
atomic_write(test_file, lambda f: f.write("test"))
with open(test_file, "w") as f:
f.write("test")
test_file.unlink()
except Exception as e:
print(colored(f"Path not writable: {e}", "red"))
@@ -791,18 +707,8 @@ def handle_toggle_secret_mode(pm: PasswordManager) -> None:
"""Toggle secret mode and adjust clipboard delay."""
cfg = pm.config_manager
if cfg is None:
vault = getattr(pm, "vault", None)
fingerprint_dir = getattr(pm, "fingerprint_dir", None)
if vault is not None and fingerprint_dir is not None:
try:
cfg = pm.config_manager = ConfigManager(vault, fingerprint_dir)
except Exception as exc:
logging.error(f"Failed to initialize ConfigManager: {exc}")
print(colored("Configuration manager unavailable.", "red"))
return
else:
print(colored("Configuration manager unavailable.", "red"))
return
print(colored("Configuration manager unavailable.", "red"))
return
try:
enabled = cfg.get_secret_mode_enabled()
delay = cfg.get_clipboard_clear_delay()
@@ -842,18 +748,8 @@ def handle_toggle_quick_unlock(pm: PasswordManager) -> None:
"""Enable or disable Quick Unlock."""
cfg = pm.config_manager
if cfg is None:
vault = getattr(pm, "vault", None)
fingerprint_dir = getattr(pm, "fingerprint_dir", None)
if vault is not None and fingerprint_dir is not None:
try:
cfg = pm.config_manager = ConfigManager(vault, fingerprint_dir)
except Exception as exc:
logging.error(f"Failed to initialize ConfigManager: {exc}")
print(colored("Configuration manager unavailable.", "red"))
return
else:
print(colored("Configuration manager unavailable.", "red"))
return
print(colored("Configuration manager unavailable.", "red"))
return
try:
enabled = cfg.get_quick_unlock()
except Exception as exc:
@@ -879,18 +775,8 @@ def handle_toggle_offline_mode(pm: PasswordManager) -> None:
"""Enable or disable offline mode."""
cfg = pm.config_manager
if cfg is None:
vault = getattr(pm, "vault", None)
fingerprint_dir = getattr(pm, "fingerprint_dir", None)
if vault is not None and fingerprint_dir is not None:
try:
cfg = pm.config_manager = ConfigManager(vault, fingerprint_dir)
except Exception as exc:
logging.error(f"Failed to initialize ConfigManager: {exc}")
print(colored("Configuration manager unavailable.", "red"))
return
else:
print(colored("Configuration manager unavailable.", "red"))
return
print(colored("Configuration manager unavailable.", "red"))
return
try:
enabled = cfg.get_offline_mode()
except Exception as exc:
@@ -1031,12 +917,12 @@ def handle_settings(password_manager: PasswordManager) -> None:
print(color_text("8. Import database", "menu"))
print(color_text("9. Export 2FA codes", "menu"))
print(color_text("10. Set additional backup location", "menu"))
print(color_text("11. KDF strength & benchmark", "menu"))
print(color_text("11. Set KDF iterations", "menu"))
print(color_text("12. Set inactivity timeout", "menu"))
print(color_text("13. Lock Vault", "menu"))
print(color_text("14. Stats", "menu"))
print(color_text("15. Toggle Secret Mode", "menu"))
print(color_text("16. Toggle Offline Mode (default ON)", "menu"))
print(color_text("16. Toggle Offline Mode", "menu"))
print(color_text("17. Toggle Quick Unlock", "menu"))
choice = input("Select an option or press Enter to go back: ").strip()
if choice == "1":
@@ -1044,16 +930,7 @@ def handle_settings(password_manager: PasswordManager) -> None:
elif choice == "2":
handle_nostr_menu(password_manager)
elif choice == "3":
try:
old_pw = prompt_existing_password("Enter your current password: ")
new_pw = prompt_new_password()
password_manager.change_password(old_pw, new_pw)
except ValueError:
print(colored("Incorrect password.", "red"))
except PasswordPromptError:
pass
except Exception as e:
print(colored(f"Error: {e}", "red"))
password_manager.change_password()
pause()
elif choice == "4":
password_manager.handle_verify_checksum()
@@ -1131,7 +1008,6 @@ def display_menu(
getattr(password_manager, "start_background_relay_check", lambda: None)()
_display_live_stats(password_manager)
while True:
getattr(password_manager, "poll_background_errors", lambda: None)()
fp, parent_fp, child_fp = getattr(
password_manager,
"header_fingerprint_args",
@@ -1152,15 +1028,11 @@ def display_menu(
getattr(password_manager, "start_background_relay_check", lambda: None)()
continue
# Periodically push updates to Nostr
current_fp = getattr(password_manager, "current_fingerprint", None)
if current_fp:
if (
password_manager.is_dirty
and time.time() - password_manager.last_update >= sync_interval
):
handle_post_to_nostr(password_manager)
password_manager.is_dirty = False
else:
if (
password_manager.is_dirty
and time.time() - password_manager.last_update >= sync_interval
):
handle_post_to_nostr(password_manager)
password_manager.is_dirty = False
# Flush logging handlers
@@ -1294,7 +1166,6 @@ def main(argv: list[str] | None = None, *, fingerprint: str | None = None) -> in
Optional seed profile fingerprint to select automatically.
"""
configure_logging()
_warn_missing_optional_dependencies()
initialize_app()
logger = logging.getLogger(__name__)
logger.info("Starting SeedPass Password Manager")
@@ -1302,35 +1173,10 @@ def main(argv: list[str] | None = None, *, fingerprint: str | None = None) -> in
load_global_config()
parser = argparse.ArgumentParser()
parser.add_argument("--fingerprint")
parser.add_argument(
"--restore-backup",
help="Restore index from backup file before starting",
)
parser.add_argument(
"--no-clipboard",
action="store_true",
help="Disable clipboard support and print secrets",
)
parser.add_argument(
"--deterministic-totp",
action="store_true",
help="Derive TOTP secrets deterministically",
)
parser.add_argument(
"--max-prompt-attempts",
type=int,
default=None,
help="Maximum number of password/seed prompt attempts (0 to disable)",
)
sub = parser.add_subparsers(dest="command")
exp = sub.add_parser("export")
exp.add_argument("--file")
exp.add_argument(
"--unencrypted",
action="store_true",
help="Export without encryption",
)
imp = sub.add_parser("import")
imp.add_argument("--file")
@@ -1346,44 +1192,6 @@ def main(argv: list[str] | None = None, *, fingerprint: str | None = None) -> in
args = parser.parse_args(argv)
if args.restore_backup:
fp_target = args.fingerprint or fingerprint
if fp_target is None:
print(
colored(
"Error: --fingerprint is required when using --restore-backup.",
"red",
)
)
return 1
try:
restore_backup_index(Path(args.restore_backup), fp_target)
logger.info("Restored backup from %s", args.restore_backup)
except Exception as e:
logger.error(f"Failed to restore backup: {e}", exc_info=True)
print(colored(f"Error: Failed to restore backup: {e}", "red"))
return 1
elif args.command is None:
print("Startup Options:")
print("1. Continue")
print("2. Restore from backup")
choice = input("Select an option: ").strip()
if choice == "2":
path = input("Enter backup file path: ").strip()
fp_target = args.fingerprint or fingerprint
if fp_target is None:
fp_target = input("Enter fingerprint for restore: ").strip()
try:
restore_backup_index(Path(path), fp_target)
logger.info("Restored backup from %s", path)
except Exception as e:
logger.error(f"Failed to restore backup: {e}", exc_info=True)
print(colored(f"Error: Failed to restore backup: {e}", "red"))
return 1
if args.max_prompt_attempts is not None:
os.environ["SEEDPASS_MAX_PROMPT_ATTEMPTS"] = str(args.max_prompt_attempts)
try:
password_manager = PasswordManager(fingerprint=args.fingerprint or fingerprint)
logger.info("PasswordManager initialized successfully.")
@@ -1396,15 +1204,8 @@ def main(argv: list[str] | None = None, *, fingerprint: str | None = None) -> in
print(colored(f"Error: Failed to initialize PasswordManager: {e}", "red"))
return 1
if args.no_clipboard:
password_manager.secret_mode_enabled = False
if args.deterministic_totp:
password_manager.deterministic_totp = True
if args.command == "export":
password_manager.handle_export_database(
Path(args.file), encrypt=not args.unencrypted
)
password_manager.handle_export_database(Path(args.file))
return 0
if args.command == "import":
password_manager.handle_import_database(Path(args.file))
@@ -1446,22 +1247,15 @@ def main(argv: list[str] | None = None, *, fingerprint: str | None = None) -> in
if entry.get("type") != EntryType.TOTP.value:
print(colored("Entry is not a TOTP entry.", "red"))
return 1
key = getattr(password_manager, "KEY_TOTP_DET", None) or getattr(
password_manager, "parent_seed", None
code = password_manager.entry_manager.get_totp_code(
idx, password_manager.parent_seed
)
code = password_manager.entry_manager.get_totp_code(idx, key)
print(code)
try:
if copy_to_clipboard(code, password_manager.clipboard_clear_delay):
print(colored("Code copied to clipboard", "green"))
except ClipboardUnavailableError as exc:
print(
colored(
f"Clipboard unavailable: {exc}\n"
"Re-run with '--no-clipboard' to print codes instead.",
"yellow",
)
)
copy_to_clipboard(code, password_manager.clipboard_clear_delay)
print(colored("Code copied to clipboard", "green"))
except Exception as exc:
logging.warning(f"Clipboard copy failed: {exc}")
return 0
def signal_handler(sig, _frame):

View File

@@ -25,4 +25,3 @@ class Manifest:
algo: str
chunks: List[ChunkMeta]
delta_since: Optional[int] = None
nonce: Optional[str] = None

View File

@@ -1,39 +1,36 @@
import asyncio
# src/nostr/client.py
import base64
import json
import logging
import time
from typing import List, Optional, Tuple, TYPE_CHECKING
import hashlib
import asyncio
import gzip
import threading
from datetime import timedelta
from typing import List, Optional, TYPE_CHECKING
import websockets
# Imports from the nostr-sdk library
from nostr_sdk import (
Client,
Keys,
NostrSigner,
EventBuilder,
Filter,
Kind,
KindStandard,
NostrSigner,
Tag,
RelayUrl,
PublicKey,
)
from nostr_sdk import EventId, Keys, Timestamp
from datetime import timedelta
from nostr_sdk import EventId, Timestamp
from constants import MAX_RETRIES, RETRY_DELAY
from seedpass.core.encryption import EncryptionManager
from .backup_models import (
ChunkMeta,
KIND_DELTA,
KIND_MANIFEST,
KIND_SNAPSHOT_CHUNK,
Manifest,
)
from .connection import ConnectionHandler, DEFAULT_RELAYS
from .key_manager import KeyManager as SeedPassKeyManager
from .snapshot import SnapshotHandler, prepare_snapshot
from .backup_models import Manifest, ChunkMeta, KIND_MANIFEST, KIND_SNAPSHOT_CHUNK
from seedpass.core.encryption import EncryptionManager
from constants import MAX_RETRIES, RETRY_DELAY
from utils.file_lock import exclusive_lock
if TYPE_CHECKING: # pragma: no cover - imported for type hints
from seedpass.core.config_manager import ConfigManager
@@ -45,8 +42,56 @@ ClientBuilder = Client
logger = logging.getLogger(__name__)
logger.setLevel(logging.WARNING)
DEFAULT_RELAYS = [
"wss://relay.snort.social",
"wss://nostr.oxtr.dev",
"wss://relay.primal.net",
]
class NostrClient(ConnectionHandler, SnapshotHandler):
# Identifier prefix for replaceable manifest events
MANIFEST_ID_PREFIX = "seedpass-manifest-"
def prepare_snapshot(
encrypted_bytes: bytes, limit: int
) -> Tuple[Manifest, list[bytes]]:
"""Compress and split the encrypted vault into chunks.
Each chunk is hashed with SHA-256 and described in the returned
:class:`Manifest`.
Parameters
----------
encrypted_bytes : bytes
The encrypted vault contents.
limit : int
Maximum chunk size in bytes.
Returns
-------
Tuple[Manifest, list[bytes]]
The manifest describing all chunks and the list of chunk bytes.
"""
compressed = gzip.compress(encrypted_bytes)
chunks = [compressed[i : i + limit] for i in range(0, len(compressed), limit)]
metas: list[ChunkMeta] = []
for i, chunk in enumerate(chunks):
metas.append(
ChunkMeta(
id=f"seedpass-chunk-{i:04d}",
size=len(chunk),
hash=hashlib.sha256(chunk).hexdigest(),
event_id=None,
)
)
manifest = Manifest(ver=1, algo="gzip", chunks=metas)
return manifest, chunks
class NostrClient:
"""Interact with the Nostr network using nostr-sdk."""
def __init__(
@@ -57,8 +102,6 @@ class NostrClient(ConnectionHandler, SnapshotHandler):
parent_seed: Optional[str] = None,
offline_mode: bool = False,
config_manager: Optional["ConfigManager"] = None,
key_index: bytes | None = None,
account_index: int | None = None,
) -> None:
self.encryption_manager = encryption_manager
self.fingerprint = fingerprint
@@ -70,7 +113,7 @@ class NostrClient(ConnectionHandler, SnapshotHandler):
parent_seed = self.encryption_manager.decrypt_parent_seed()
# Use our project's KeyManager to derive the private key
self.key_manager = KeyManager(parent_seed, fingerprint, account_index)
self.key_manager = KeyManager(parent_seed, fingerprint)
# Create a nostr-sdk Keys object from our derived private key
private_key_hex = self.key_manager.keys.private_key_hex()
@@ -101,7 +144,6 @@ class NostrClient(ConnectionHandler, SnapshotHandler):
self.current_manifest: Manifest | None = None
self.current_manifest_id: str | None = None
self._delta_events: list[str] = []
self.key_index = key_index or b""
# Configure and initialize the nostr-sdk Client
signer = NostrSigner.keys(self.keys)
@@ -109,9 +151,540 @@ class NostrClient(ConnectionHandler, SnapshotHandler):
self._connected = False
def connect(self) -> None:
"""Connect the client to all configured relays."""
if self.offline_mode or not self.relays:
return
if not self._connected:
self.initialize_client_pool()
__all__ = [
"NostrClient",
"prepare_snapshot",
"DEFAULT_RELAYS",
]
def initialize_client_pool(self) -> None:
"""Add relays to the client and connect."""
if self.offline_mode or not self.relays:
return
asyncio.run(self._initialize_client_pool())
async def _connect_async(self) -> None:
"""Ensure the client is connected within an async context."""
if self.offline_mode or not self.relays:
return
if not self._connected:
await self._initialize_client_pool()
async def _initialize_client_pool(self) -> None:
if self.offline_mode or not self.relays:
return
formatted = []
for relay in self.relays:
if isinstance(relay, str):
try:
formatted.append(RelayUrl.parse(relay))
except Exception:
logger.error("Invalid relay URL: %s", relay)
else:
formatted.append(relay)
if hasattr(self.client, "add_relays"):
await self.client.add_relays(formatted)
else:
for relay in formatted:
await self.client.add_relay(relay)
await self.client.connect()
self._connected = True
logger.info("NostrClient connected to relays: %s", formatted)
async def _ping_relay(self, relay: str, timeout: float) -> bool:
"""Attempt to retrieve the latest event from a single relay."""
sub_id = "seedpass-health"
pubkey = self.keys.public_key().to_hex()
req = json.dumps(
["REQ", sub_id, {"kinds": [1], "authors": [pubkey], "limit": 1}]
)
try:
async with websockets.connect(
relay, open_timeout=timeout, close_timeout=timeout
) as ws:
await ws.send(req)
while True:
msg = await asyncio.wait_for(ws.recv(), timeout=timeout)
data = json.loads(msg)
if data[0] in {"EVENT", "EOSE"}:
return True
except Exception:
return False
async def _check_relay_health(self, min_relays: int, timeout: float) -> int:
tasks = [self._ping_relay(r, timeout) for r in self.relays]
results = await asyncio.gather(*tasks, return_exceptions=True)
healthy = sum(1 for r in results if r is True)
if healthy < min_relays:
logger.warning(
"Only %s relays responded with data; consider adding more.", healthy
)
return healthy
def check_relay_health(self, min_relays: int = 2, timeout: float = 5.0) -> int:
"""Ping relays and return the count of those providing data."""
if self.offline_mode or not self.relays:
return 0
return asyncio.run(self._check_relay_health(min_relays, timeout))
def publish_json_to_nostr(
self,
encrypted_json: bytes,
to_pubkey: str | None = None,
alt_summary: str | None = None,
) -> str | None:
"""Builds and publishes a Kind 1 text note or direct message.
Parameters
----------
encrypted_json : bytes
The encrypted index data to publish.
to_pubkey : str | None, optional
If provided, send as a direct message to this public key.
alt_summary : str | None, optional
If provided, include an ``alt`` tag so uploads can be
associated with a specific event like a password change.
"""
if self.offline_mode or not self.relays:
return None
self.connect()
self.last_error = None
try:
content = base64.b64encode(encrypted_json).decode("utf-8")
if to_pubkey:
receiver = PublicKey.parse(to_pubkey)
event_output = self.client.send_private_msg_to(
self.relays, receiver, content
)
else:
builder = EventBuilder.text_note(content)
if alt_summary:
builder = builder.tags([Tag.alt(alt_summary)])
event = builder.build(self.keys.public_key()).sign_with_keys(self.keys)
event_output = self.publish_event(event)
event_id_hex = (
event_output.id.to_hex()
if hasattr(event_output, "id")
else str(event_output)
)
logger.info(f"Successfully published event with ID: {event_id_hex}")
return event_id_hex
except Exception as e:
self.last_error = str(e)
logger.error(f"Failed to publish JSON to Nostr: {e}")
return None
def publish_event(self, event):
"""Publish a prepared event to the configured relays."""
if self.offline_mode or not self.relays:
return None
self.connect()
return asyncio.run(self._publish_event(event))
async def _publish_event(self, event):
if self.offline_mode or not self.relays:
return None
await self._connect_async()
return await self.client.send_event(event)
def update_relays(self, new_relays: List[str]) -> None:
"""Reconnect the client using a new set of relays."""
self.close_client_pool()
self.relays = new_relays
signer = NostrSigner.keys(self.keys)
self.client = Client(signer)
self._connected = False
# Immediately reconnect using the updated relay list
self.initialize_client_pool()
def retrieve_json_from_nostr_sync(
self, retries: int | None = None, delay: float | None = None
) -> Optional[bytes]:
"""Retrieve the latest Kind 1 event from the author with optional retries."""
if self.offline_mode or not self.relays:
return None
if retries is None or delay is None:
if self.config_manager is None:
from seedpass.core.config_manager import ConfigManager
from seedpass.core.vault import Vault
cfg_mgr = ConfigManager(
Vault(self.encryption_manager, self.fingerprint_dir),
self.fingerprint_dir,
)
else:
cfg_mgr = self.config_manager
cfg = cfg_mgr.load_config(require_pin=False)
retries = int(cfg.get("nostr_max_retries", MAX_RETRIES))
delay = float(cfg.get("nostr_retry_delay", RETRY_DELAY))
self.connect()
self.last_error = None
for attempt in range(retries):
try:
result = asyncio.run(self._retrieve_json_from_nostr())
if result is not None:
return result
except Exception as e:
self.last_error = str(e)
logger.error("Failed to retrieve events from Nostr: %s", e)
if attempt < retries - 1:
sleep_time = delay * (2**attempt)
time.sleep(sleep_time)
return None
async def _retrieve_json_from_nostr(self) -> Optional[bytes]:
if self.offline_mode or not self.relays:
return None
await self._connect_async()
# Filter for the latest text note (Kind 1) from our public key
pubkey = self.keys.public_key()
f = Filter().author(pubkey).kind(Kind.from_std(KindStandard.TEXT_NOTE)).limit(1)
timeout = timedelta(seconds=10)
events = (await self.client.fetch_events(f, timeout)).to_vec()
if not events:
self.last_error = "No events found on relays for this user."
logger.warning(self.last_error)
return None
latest_event = events[0]
content_b64 = latest_event.content()
if content_b64:
return base64.b64decode(content_b64.encode("utf-8"))
self.last_error = "Latest event contained no content"
return None
async def publish_snapshot(
self, encrypted_bytes: bytes, limit: int = 50_000
) -> tuple[Manifest, str]:
"""Publish a compressed snapshot split into chunks.
Parameters
----------
encrypted_bytes : bytes
Vault contents already encrypted with the user's key.
limit : int, optional
Maximum chunk size in bytes. Defaults to 50 kB.
"""
start = time.perf_counter()
if self.offline_mode or not self.relays:
return Manifest(ver=1, algo="gzip", chunks=[]), ""
await self.ensure_manifest_is_current()
await self._connect_async()
manifest, chunks = prepare_snapshot(encrypted_bytes, limit)
for meta, chunk in zip(manifest.chunks, chunks):
content = base64.b64encode(chunk).decode("utf-8")
builder = EventBuilder(Kind(KIND_SNAPSHOT_CHUNK), content).tags(
[Tag.identifier(meta.id)]
)
event = builder.build(self.keys.public_key()).sign_with_keys(self.keys)
result = await self.client.send_event(event)
try:
meta.event_id = (
result.id.to_hex() if hasattr(result, "id") else str(result)
)
except Exception:
meta.event_id = None
manifest_json = json.dumps(
{
"ver": manifest.ver,
"algo": manifest.algo,
"chunks": [meta.__dict__ for meta in manifest.chunks],
"delta_since": manifest.delta_since,
}
)
manifest_identifier = f"{MANIFEST_ID_PREFIX}{self.fingerprint}"
manifest_event = (
EventBuilder(Kind(KIND_MANIFEST), manifest_json)
.tags([Tag.identifier(manifest_identifier)])
.build(self.keys.public_key())
.sign_with_keys(self.keys)
)
await self.client.send_event(manifest_event)
with self._state_lock:
self.current_manifest = manifest
self.current_manifest_id = manifest_identifier
# Record when this snapshot was published for future delta events
self.current_manifest.delta_since = int(time.time())
self._delta_events = []
if getattr(self, "verbose_timing", False):
duration = time.perf_counter() - start
logger.info("publish_snapshot completed in %.2f seconds", duration)
return manifest, manifest_identifier
async def _fetch_chunks_with_retry(
self, manifest_event
) -> tuple[Manifest, list[bytes]] | None:
"""Retrieve all chunks referenced by ``manifest_event`` with retries."""
pubkey = self.keys.public_key()
timeout = timedelta(seconds=10)
try:
data = json.loads(manifest_event.content())
manifest = Manifest(
ver=data["ver"],
algo=data["algo"],
chunks=[ChunkMeta(**c) for c in data["chunks"]],
delta_since=(
int(data["delta_since"])
if data.get("delta_since") is not None
else None
),
)
except Exception:
return None
if self.config_manager is None:
from seedpass.core.config_manager import ConfigManager
from seedpass.core.vault import Vault
cfg_mgr = ConfigManager(
Vault(self.encryption_manager, self.fingerprint_dir),
self.fingerprint_dir,
)
else:
cfg_mgr = self.config_manager
cfg = cfg_mgr.load_config(require_pin=False)
max_retries = int(cfg.get("nostr_max_retries", MAX_RETRIES))
delay = float(cfg.get("nostr_retry_delay", RETRY_DELAY))
chunks: list[bytes] = []
for meta in manifest.chunks:
chunk_bytes: bytes | None = None
for attempt in range(max_retries):
cf = Filter().author(pubkey).kind(Kind(KIND_SNAPSHOT_CHUNK))
if meta.event_id:
cf = cf.id(EventId.parse(meta.event_id))
else:
cf = cf.identifier(meta.id)
cf = cf.limit(1)
cev = (await self.client.fetch_events(cf, timeout)).to_vec()
if cev:
candidate = base64.b64decode(cev[0].content().encode("utf-8"))
if hashlib.sha256(candidate).hexdigest() == meta.hash:
chunk_bytes = candidate
break
if attempt < max_retries - 1:
await asyncio.sleep(delay * (2**attempt))
if chunk_bytes is None:
return None
chunks.append(chunk_bytes)
ident = None
try:
tags_obj = manifest_event.tags()
ident = tags_obj.identifier()
except Exception:
tags = getattr(manifest_event, "tags", None)
if callable(tags):
tags = tags()
if tags:
tag = tags[0]
if hasattr(tag, "as_vec"):
vec = tag.as_vec()
if vec and len(vec) >= 2:
ident = vec[1]
elif isinstance(tag, (list, tuple)) and len(tag) >= 2:
ident = tag[1]
elif isinstance(tag, str):
ident = tag
with self._state_lock:
self.current_manifest = manifest
self.current_manifest_id = ident
return manifest, chunks
async def fetch_latest_snapshot(self) -> Tuple[Manifest, list[bytes]] | None:
"""Retrieve the latest manifest and all snapshot chunks."""
if self.offline_mode or not self.relays:
return None
await self._connect_async()
self.last_error = None
pubkey = self.keys.public_key()
ident = f"{MANIFEST_ID_PREFIX}{self.fingerprint}"
f = Filter().author(pubkey).kind(Kind(KIND_MANIFEST)).identifier(ident).limit(1)
timeout = timedelta(seconds=10)
try:
events = (await self.client.fetch_events(f, timeout)).to_vec()
except Exception as e: # pragma: no cover - network errors
self.last_error = str(e)
logger.error(
"Failed to fetch manifest from relays %s: %s",
self.relays,
e,
)
return None
if not events:
return None
for manifest_event in events:
try:
result = await self._fetch_chunks_with_retry(manifest_event)
if result is not None:
return result
except Exception as e: # pragma: no cover - network errors
self.last_error = str(e)
logger.error(
"Error retrieving snapshot from relays %s: %s",
self.relays,
e,
)
if self.last_error is None:
self.last_error = "Snapshot not found on relays"
return None
async def ensure_manifest_is_current(self) -> None:
"""Verify the local manifest is up to date before publishing."""
if self.offline_mode or not self.relays:
return
await self._connect_async()
pubkey = self.keys.public_key()
ident = f"{MANIFEST_ID_PREFIX}{self.fingerprint}"
f = Filter().author(pubkey).kind(Kind(KIND_MANIFEST)).identifier(ident).limit(1)
timeout = timedelta(seconds=10)
try:
events = (await self.client.fetch_events(f, timeout)).to_vec()
except Exception:
return
if not events:
return
try:
data = json.loads(events[0].content())
remote = data.get("delta_since")
if remote is not None:
remote = int(remote)
except Exception:
return
with self._state_lock:
local = self.current_manifest.delta_since if self.current_manifest else None
if remote is not None and (local is None or remote > local):
self.last_error = "Manifest out of date"
raise RuntimeError("Manifest out of date")
async def publish_delta(self, delta_bytes: bytes, manifest_id: str) -> str:
"""Publish a delta event referencing a manifest."""
if self.offline_mode or not self.relays:
return ""
await self.ensure_manifest_is_current()
await self._connect_async()
content = base64.b64encode(delta_bytes).decode("utf-8")
tag = Tag.event(EventId.parse(manifest_id))
builder = EventBuilder(Kind(KIND_DELTA), content).tags([tag])
event = builder.build(self.keys.public_key()).sign_with_keys(self.keys)
result = await self.client.send_event(event)
delta_id = result.id.to_hex() if hasattr(result, "id") else str(result)
created_at = getattr(
event, "created_at", getattr(event, "timestamp", int(time.time()))
)
if hasattr(created_at, "secs"):
created_at = created_at.secs
manifest_event = None
with self._state_lock:
if self.current_manifest is not None:
self.current_manifest.delta_since = int(created_at)
manifest_json = json.dumps(
{
"ver": self.current_manifest.ver,
"algo": self.current_manifest.algo,
"chunks": [
meta.__dict__ for meta in self.current_manifest.chunks
],
"delta_since": self.current_manifest.delta_since,
}
)
manifest_event = (
EventBuilder(Kind(KIND_MANIFEST), manifest_json)
.tags([Tag.identifier(self.current_manifest_id)])
.build(self.keys.public_key())
.sign_with_keys(self.keys)
)
self._delta_events.append(delta_id)
if manifest_event is not None:
await self.client.send_event(manifest_event)
return delta_id
async def fetch_deltas_since(self, version: int) -> list[bytes]:
"""Retrieve delta events newer than the given version."""
if self.offline_mode or not self.relays:
return []
await self._connect_async()
pubkey = self.keys.public_key()
f = (
Filter()
.author(pubkey)
.kind(Kind(KIND_DELTA))
.since(Timestamp.from_secs(version))
)
timeout = timedelta(seconds=10)
events = (await self.client.fetch_events(f, timeout)).to_vec()
events.sort(
key=lambda ev: getattr(ev, "created_at", getattr(ev, "timestamp", 0))
)
deltas: list[bytes] = []
for ev in events:
deltas.append(base64.b64decode(ev.content().encode("utf-8")))
manifest = self.get_current_manifest()
if manifest is not None:
snap_size = sum(c.size for c in manifest.chunks)
if (
len(deltas) >= self.delta_threshold
or sum(len(d) for d in deltas) > snap_size
):
# Publish a new snapshot to consolidate deltas
joined = b"".join(deltas)
await self.publish_snapshot(joined)
exp = Timestamp.from_secs(int(time.time()))
for ev in events:
exp_builder = EventBuilder(Kind(KIND_DELTA), ev.content()).tags(
[Tag.expiration(exp)]
)
exp_event = exp_builder.build(
self.keys.public_key()
).sign_with_keys(self.keys)
await self.client.send_event(exp_event)
return deltas
def get_current_manifest(self) -> Manifest | None:
"""Thread-safe access to ``current_manifest``."""
with self._state_lock:
return self.current_manifest
def get_current_manifest_id(self) -> str | None:
"""Thread-safe access to ``current_manifest_id``."""
with self._state_lock:
return self.current_manifest_id
def get_delta_events(self) -> list[str]:
"""Thread-safe snapshot of pending delta event IDs."""
with self._state_lock:
return list(self._delta_events)
def close_client_pool(self) -> None:
"""Disconnects the client from all relays."""
try:
asyncio.run(self.client.disconnect())
self._connected = False
logger.info("NostrClient disconnected from relays.")
except Exception as e:
logger.error("Error during NostrClient shutdown: %s", e)

View File

@@ -27,8 +27,7 @@ class Keys:
@staticmethod
def hex_to_bech32(key_str: str, prefix: str = "npub") -> str:
# Pad to align with 5-bit groups as expected for Bech32 encoding
data = convertbits(bytes.fromhex(key_str), 8, 5, True)
data = convertbits(bytes.fromhex(key_str), 8, 5)
return bech32_encode(prefix, data)
@staticmethod

View File

@@ -1,232 +0,0 @@
import asyncio
import base64
import json
import logging
from datetime import timedelta
from typing import List, Optional
import websockets
from . import client as nostr_client
from constants import MAX_RETRIES, RETRY_DELAY
logger = logging.getLogger("nostr.client")
logger.setLevel(logging.WARNING)
DEFAULT_RELAYS = [
"wss://relay.snort.social",
"wss://nostr.oxtr.dev",
"wss://relay.primal.net",
]
class ConnectionHandler:
"""Mixin providing relay connection and retry logic."""
async def connect(self) -> None:
"""Connect the client to all configured relays."""
if self.offline_mode or not self.relays:
return
if not getattr(self, "_connected", False):
await self._initialize_client_pool()
def initialize_client_pool(self) -> None:
"""Add relays to the client and connect."""
if self.offline_mode or not self.relays:
return
asyncio.run(self._initialize_client_pool())
async def _connect_async(self) -> None:
"""Ensure the client is connected within an async context."""
if self.offline_mode or not self.relays:
return
if not getattr(self, "_connected", False):
await self._initialize_client_pool()
async def _initialize_client_pool(self) -> None:
if self.offline_mode or not self.relays:
return
formatted = []
for relay in self.relays:
if isinstance(relay, str):
try:
formatted.append(nostr_client.RelayUrl.parse(relay))
except Exception:
logger.error("Invalid relay URL: %s", relay)
else:
formatted.append(relay)
if hasattr(self.client, "add_relays"):
await self.client.add_relays(formatted)
else:
for relay in formatted:
await self.client.add_relay(relay)
await self.client.connect()
self._connected = True
logger.info("NostrClient connected to relays: %s", formatted)
async def _ping_relay(self, relay: str, timeout: float) -> bool:
"""Attempt to retrieve the latest event from a single relay."""
sub_id = "seedpass-health"
pubkey = self.keys.public_key().to_hex()
req = json.dumps(
[
"REQ",
sub_id,
{"kinds": [1], "authors": [pubkey], "limit": 1},
]
)
try:
async with websockets.connect(
relay, open_timeout=timeout, close_timeout=timeout
) as ws:
await ws.send(req)
while True:
msg = await asyncio.wait_for(ws.recv(), timeout=timeout)
data = json.loads(msg)
if data[0] in {"EVENT", "EOSE"}:
return True
except Exception:
return False
async def _check_relay_health(self, min_relays: int, timeout: float) -> int:
tasks = [self._ping_relay(r, timeout) for r in self.relays]
results = await asyncio.gather(*tasks, return_exceptions=True)
healthy = sum(1 for r in results if r is True)
if healthy < min_relays:
logger.warning(
"Only %s relays responded with data; consider adding more.", healthy
)
return healthy
def check_relay_health(self, min_relays: int = 2, timeout: float = 5.0) -> int:
"""Ping relays and return the count of those providing data."""
if self.offline_mode or not self.relays:
return 0
return asyncio.run(self._check_relay_health(min_relays, timeout))
async def publish_json_to_nostr(
self,
encrypted_json: bytes,
to_pubkey: str | None = None,
alt_summary: str | None = None,
) -> str | None:
"""Build and publish a Kind 1 text note or direct message."""
if self.offline_mode or not self.relays:
return None
await self.connect()
self.last_error = None
try:
content = base64.b64encode(encrypted_json).decode("utf-8")
if to_pubkey:
receiver = nostr_client.PublicKey.parse(to_pubkey)
event_output = self.client.send_private_msg_to(
self.relays, receiver, content
)
else:
builder = nostr_client.EventBuilder.text_note(content)
if alt_summary:
builder = builder.tags([nostr_client.Tag.alt(alt_summary)])
event = builder.build(self.keys.public_key()).sign_with_keys(self.keys)
event_output = await self.publish_event(event)
event_id_hex = (
event_output.id.to_hex()
if hasattr(event_output, "id")
else str(event_output)
)
logger.info("Successfully published event with ID: %s", event_id_hex)
return event_id_hex
except Exception as e:
self.last_error = str(e)
logger.error("Failed to publish JSON to Nostr: %s", e)
return None
async def publish_event(self, event):
"""Publish a prepared event to the configured relays."""
if self.offline_mode or not self.relays:
return None
await self.connect()
return await self.client.send_event(event)
def update_relays(self, new_relays: List[str]) -> None:
"""Reconnect the client using a new set of relays."""
self.close_client_pool()
self.relays = new_relays
signer = nostr_client.NostrSigner.keys(self.keys)
self.client = nostr_client.Client(signer)
self._connected = False
self.initialize_client_pool()
async def retrieve_json_from_nostr(
self, retries: int | None = None, delay: float | None = None
) -> Optional[bytes]:
"""Retrieve the latest Kind 1 event from the author with optional retries."""
if self.offline_mode or not self.relays:
return None
if retries is None or delay is None:
if self.config_manager is None:
from seedpass.core.config_manager import ConfigManager
from seedpass.core.vault import Vault
cfg_mgr = ConfigManager(
Vault(self.encryption_manager, self.fingerprint_dir),
self.fingerprint_dir,
)
else:
cfg_mgr = self.config_manager
cfg = cfg_mgr.load_config(require_pin=False)
retries = int(cfg.get("nostr_max_retries", MAX_RETRIES))
delay = float(cfg.get("nostr_retry_delay", RETRY_DELAY))
await self.connect()
self.last_error = None
for attempt in range(retries):
try:
result = await self._retrieve_json_from_nostr()
if result is not None:
return result
except Exception as e:
self.last_error = str(e)
logger.error("Failed to retrieve events from Nostr: %s", e)
if attempt < retries - 1:
sleep_time = delay * (2**attempt)
await asyncio.sleep(sleep_time)
return None
async def _retrieve_json_from_nostr(self) -> Optional[bytes]:
if self.offline_mode or not self.relays:
return None
await self._connect_async()
pubkey = self.keys.public_key()
f = (
nostr_client.Filter()
.author(pubkey)
.kind(nostr_client.Kind.from_std(nostr_client.KindStandard.TEXT_NOTE))
.limit(1)
)
timeout = timedelta(seconds=10)
events = (await self.client.fetch_events(f, timeout)).to_vec()
if not events:
self.last_error = "No events found on relays for this user."
logger.warning(self.last_error)
return None
latest_event = events[0]
content_b64 = latest_event.content()
if content_b64:
return base64.b64decode(content_b64.encode("utf-8"))
self.last_error = "Latest event contained no content"
return None
def close_client_pool(self) -> None:
"""Disconnect the client from all relays."""
try:
asyncio.run(self.client.disconnect())
self._connected = False
logger.info("NostrClient disconnected from relays.")
except Exception as e:
logger.error("Error during NostrClient shutdown: %s", e)

View File

@@ -2,8 +2,16 @@
import time
import logging
import traceback
from nostr_sdk import Event
try:
from monstr.event.event import Event
except ImportError: # pragma: no cover - optional dependency
class Event: # minimal placeholder for type hints when monstr is absent
id: str
created_at: int
content: str
# Instantiate the logger
@@ -19,15 +27,26 @@ class EventHandler:
pass # Initialize if needed
def handle_new_event(self, evt: Event):
"""Process and log details from a Nostr event."""
"""
Processes incoming events by logging their details.
:param evt: The received Event object.
"""
try:
created_at = evt.created_at().as_secs()
created_at_str = time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(created_at))
event_id = evt.id().to_hex()
# Assuming evt.created_at is always an integer Unix timestamp
if isinstance(evt.created_at, int):
created_at_str = time.strftime(
"%Y-%m-%d %H:%M:%S", time.gmtime(evt.created_at)
)
else:
# Handle unexpected types gracefully
created_at_str = str(evt.created_at)
# Log the event details without extra newlines
logger.info(
f"[New Event] ID: {event_id} | Created At: {created_at_str} | Content: {evt.content()}"
f"[New Event] ID: {evt.id} | Created At: {created_at_str} | Content: {evt.content}"
)
except Exception as e:
logger.error(f"Error handling new event: {e}", exc_info=True)
# Optionally, handle the exception without re-raising
# For example, continue processing other events

View File

@@ -2,36 +2,28 @@
import hashlib
import logging
import traceback
from bech32 import bech32_encode, convertbits
from local_bip85.bip85 import BIP85
from bip_utils import Bip39SeedGenerator
from .coincurve_keys import Keys
# BIP-85 application numbers for Nostr key derivation
NOSTR_KEY_APP_ID = 1237
LEGACY_NOSTR_KEY_APP_ID = 0
logger = logging.getLogger(__name__)
class KeyManager:
"""Manages key generation, encoding, and derivation for ``NostrClient``."""
"""
Manages key generation, encoding, and derivation for NostrClient.
"""
def __init__(
self, parent_seed: str, fingerprint: str, account_index: int | None = None
):
"""Initialize the key manager.
def __init__(self, parent_seed: str, fingerprint: str):
"""
Initializes the KeyManager with the provided parent_seed and fingerprint.
Parameters
----------
parent_seed:
The BIP-39 seed used as the root for derivations.
fingerprint:
Seed profile fingerprint used for legacy derivations and logging.
account_index:
Optional explicit index for BIP-85 Nostr key derivation. When ``None``
the index defaults to ``0``.
Parameters:
parent_seed (str): The parent seed used for key derivation.
fingerprint (str): The fingerprint to differentiate key derivations.
"""
try:
if not isinstance(parent_seed, str):
@@ -45,15 +37,12 @@ class KeyManager:
self.parent_seed = parent_seed
self.fingerprint = fingerprint
self.account_index = account_index
logger.debug(
"KeyManager initialized with parent_seed, fingerprint and account index."
)
logger.debug(f"KeyManager initialized with parent_seed and fingerprint.")
# Initialize BIP85
self.bip85 = self.initialize_bip85()
# Generate Nostr keys using the provided account index
# Generate Nostr keys using the fingerprint
self.keys = self.generate_nostr_keys()
logger.debug("Nostr Keys initialized successfully.")
@@ -78,45 +67,31 @@ class KeyManager:
raise
def generate_nostr_keys(self) -> Keys:
"""Derive a Nostr key pair using the configured ``account_index``."""
try:
index = self.account_index if self.account_index is not None else 0
entropy_bytes = self.bip85.derive_entropy(
index=index, entropy_bytes=32, app_no=NOSTR_KEY_APP_ID
)
private_key_hex = entropy_bytes.hex()
keys = Keys(priv_k=private_key_hex)
logger.debug("Nostr keys generated for account index %s", index)
return keys
except Exception as e:
logger.error(f"Failed to generate Nostr keys: {e}", exc_info=True)
raise
def generate_v1_nostr_keys(self) -> Keys:
"""Derive keys using the legacy fingerprint-hash method."""
"""
Derives a unique Nostr key pair for the given fingerprint using BIP-85.
Returns:
Keys: An instance of Keys containing the Nostr key pair.
"""
try:
# Convert fingerprint to an integer index (using a hash function)
index = int(hashlib.sha256(self.fingerprint.encode()).hexdigest(), 16) % (
2**31
)
entropy_bytes = self.bip85.derive_entropy(
index=index, entropy_bytes=32, app_no=NOSTR_KEY_APP_ID
)
return Keys(priv_k=entropy_bytes.hex())
except Exception as e:
logger.error(f"Failed to generate v1 Nostr keys: {e}", exc_info=True)
raise
def generate_legacy_nostr_keys(self) -> Keys:
"""Derive Nostr keys using the legacy application ID."""
try:
entropy = self.bip85.derive_entropy(
index=0, entropy_bytes=32, app_no=LEGACY_NOSTR_KEY_APP_ID
# Derive entropy for Nostr key (32 bytes)
entropy_bytes = self.bip85.derive_entropy(
index=index,
bytes_len=32, # Adjust parameter name and value as per your method signature
)
return Keys(priv_k=entropy.hex())
# Generate Nostr key pair from entropy
private_key_hex = entropy_bytes.hex()
keys = Keys(priv_k=private_key_hex)
logger.debug(f"Nostr keys generated for fingerprint {self.fingerprint}.")
return keys
except Exception as e:
logger.error(f"Failed to generate legacy Nostr keys: {e}", exc_info=True)
logger.error(f"Failed to generate Nostr keys: {e}", exc_info=True)
raise
def get_public_key_hex(self) -> str:

View File

@@ -0,0 +1,41 @@
# nostr/logging_config.py
import logging
import os
# Comment out or remove the configure_logging function to avoid conflicts
# def configure_logging():
# """
# Configures logging with both file and console handlers.
# Logs include the timestamp, log level, message, filename, and line number.
# Only ERROR and higher-level messages are shown in the terminal, while all messages
# are logged in the log file.
# """
# logger = logging.getLogger()
# logger.setLevel(logging.DEBUG) # Set root logger to DEBUG
#
# # Prevent adding multiple handlers if configure_logging is called multiple times
# if not logger.handlers:
# # Create the 'logs' folder if it doesn't exist
# log_directory = 'logs'
# if not os.path.exists(log_directory):
# os.makedirs(log_directory)
#
# # Create handlers
# c_handler = logging.StreamHandler()
# f_handler = logging.FileHandler(os.path.join(log_directory, 'app.log'))
#
# # Set levels: only errors and critical messages will be shown in the console
# c_handler.setLevel(logging.ERROR)
# f_handler.setLevel(logging.DEBUG)
#
# # Create formatters and add them to handlers, include file and line number in log messages
# formatter = logging.Formatter(
# '%(asctime)s [%(levelname)s] %(message)s [%(filename)s:%(lineno)d]'
# )
# c_handler.setFormatter(formatter)
# f_handler.setFormatter(formatter)
#
# # Add handlers to the logger
# logger.addHandler(c_handler)
# logger.addHandler(f_handler)

View File

@@ -1,443 +0,0 @@
import asyncio
import base64
import gzip
import hashlib
import hmac
import json
import logging
import os
import time
from datetime import timedelta
from typing import Tuple
from . import client as nostr_client
from constants import MAX_RETRIES, RETRY_DELAY
from .backup_models import (
ChunkMeta,
Manifest,
KIND_DELTA,
KIND_MANIFEST,
KIND_SNAPSHOT_CHUNK,
)
logger = logging.getLogger("nostr.client")
logger.setLevel(logging.WARNING)
def prepare_snapshot(
encrypted_bytes: bytes, limit: int
) -> Tuple[Manifest, list[bytes]]:
"""Compress and split the encrypted vault into chunks."""
compressed = gzip.compress(encrypted_bytes)
chunks = [compressed[i : i + limit] for i in range(0, len(compressed), limit)]
metas: list[ChunkMeta] = []
for i, chunk in enumerate(chunks):
metas.append(
ChunkMeta(
id=f"seedpass-chunk-{i:04d}",
size=len(chunk),
hash=hashlib.sha256(chunk).hexdigest(),
event_id=None,
)
)
manifest = Manifest(ver=1, algo="gzip", chunks=metas)
return manifest, chunks
def new_manifest_id(key_index: bytes) -> tuple[str, bytes]:
"""Return a new manifest identifier and nonce.
The identifier is computed as HMAC-SHA256 of ``b"manifest|" + nonce``
using ``key_index`` as the HMAC key. The nonce is returned so it can be
embedded inside the manifest itself.
"""
nonce = os.urandom(16)
digest = hmac.new(key_index, b"manifest|" + nonce, hashlib.sha256).hexdigest()
return digest, nonce
class SnapshotHandler:
"""Mixin providing chunk and manifest handling."""
async def publish_snapshot(
self, encrypted_bytes: bytes, limit: int = 50_000
) -> tuple[Manifest, str]:
start = time.perf_counter()
if self.offline_mode or not self.relays:
return Manifest(ver=1, algo="gzip", chunks=[]), ""
await self.ensure_manifest_is_current()
await self._connect_async()
manifest, chunks = prepare_snapshot(encrypted_bytes, limit)
existing: dict[str, str] = {}
if self.current_manifest:
for old in self.current_manifest.chunks:
if old.hash and old.event_id:
existing[old.hash] = old.event_id
for meta, chunk in zip(manifest.chunks, chunks):
cached_id = existing.get(meta.hash)
if cached_id:
meta.event_id = cached_id
continue
content = base64.b64encode(chunk).decode("utf-8")
builder = nostr_client.EventBuilder(
nostr_client.Kind(KIND_SNAPSHOT_CHUNK), content
).tags([nostr_client.Tag.identifier(meta.id)])
event = builder.build(self.keys.public_key()).sign_with_keys(self.keys)
result = await self.client.send_event(event)
try:
meta.event_id = (
result.id.to_hex() if hasattr(result, "id") else str(result)
)
except Exception:
meta.event_id = None
if (
self.current_manifest_id
and self.current_manifest
and getattr(self.current_manifest, "nonce", None)
):
manifest_id = self.current_manifest_id
manifest.nonce = self.current_manifest.nonce
else:
manifest_id, nonce = new_manifest_id(self.key_index)
manifest.nonce = base64.b64encode(nonce).decode("utf-8")
manifest_json = json.dumps(
{
"ver": manifest.ver,
"algo": manifest.algo,
"chunks": [meta.__dict__ for meta in manifest.chunks],
"delta_since": manifest.delta_since,
"nonce": manifest.nonce,
}
)
manifest_event = (
nostr_client.EventBuilder(nostr_client.Kind(KIND_MANIFEST), manifest_json)
.tags([nostr_client.Tag.identifier(manifest_id)])
.build(self.keys.public_key())
.sign_with_keys(self.keys)
)
await self.client.send_event(manifest_event)
with self._state_lock:
self.current_manifest = manifest
self.current_manifest_id = manifest_id
self.current_manifest.delta_since = int(time.time())
self._delta_events = []
if getattr(self, "verbose_timing", False):
duration = time.perf_counter() - start
logger.info("publish_snapshot completed in %.2f seconds", duration)
return manifest, manifest_id
async def _fetch_chunks_with_retry(
self, manifest_event
) -> tuple[Manifest, list[bytes]] | None:
pubkey = self.keys.public_key()
timeout = timedelta(seconds=10)
try:
data = json.loads(manifest_event.content())
manifest = Manifest(
ver=data["ver"],
algo=data["algo"],
chunks=[ChunkMeta(**c) for c in data["chunks"]],
delta_since=(
int(data["delta_since"])
if data.get("delta_since") is not None
else None
),
nonce=data.get("nonce"),
)
except Exception:
return None
if self.config_manager is None:
from seedpass.core.config_manager import ConfigManager
from seedpass.core.vault import Vault
cfg_mgr = ConfigManager(
Vault(self.encryption_manager, self.fingerprint_dir),
self.fingerprint_dir,
)
else:
cfg_mgr = self.config_manager
cfg = cfg_mgr.load_config(require_pin=False)
max_retries = int(cfg.get("nostr_max_retries", MAX_RETRIES))
delay = float(cfg.get("nostr_retry_delay", RETRY_DELAY))
chunks: list[bytes] = []
for meta in manifest.chunks:
chunk_bytes: bytes | None = None
for attempt in range(max_retries):
cf = (
nostr_client.Filter()
.author(pubkey)
.kind(nostr_client.Kind(KIND_SNAPSHOT_CHUNK))
)
if meta.event_id:
cf = cf.id(nostr_client.EventId.parse(meta.event_id))
else:
cf = cf.identifier(meta.id)
cf = cf.limit(1)
cev = (await self.client.fetch_events(cf, timeout)).to_vec()
if cev:
candidate = base64.b64decode(cev[0].content().encode("utf-8"))
if hashlib.sha256(candidate).hexdigest() == meta.hash:
chunk_bytes = candidate
break
if attempt < max_retries - 1:
await asyncio.sleep(delay * (2**attempt))
if chunk_bytes is None:
return None
chunks.append(chunk_bytes)
ident = None
try:
tags_obj = manifest_event.tags()
ident = tags_obj.identifier()
except Exception:
tags = getattr(manifest_event, "tags", None)
if callable(tags):
tags = tags()
if tags:
tag = tags[0]
if hasattr(tag, "as_vec"):
vec = tag.as_vec()
if vec and len(vec) >= 2:
ident = vec[1]
elif isinstance(tag, (list, tuple)) and len(tag) >= 2:
ident = tag[1]
elif isinstance(tag, str):
ident = tag
with self._state_lock:
self.current_manifest = manifest
self.current_manifest_id = ident
return manifest, chunks
async def _fetch_manifest_with_keys(
self, keys_obj: nostr_client.Keys
) -> tuple[Manifest, list[bytes]] | None:
"""Retrieve the manifest and chunks using ``keys_obj``."""
self.keys = keys_obj
pubkey = self.keys.public_key()
timeout = timedelta(seconds=10)
ident = self.current_manifest_id
f = nostr_client.Filter().author(pubkey).kind(nostr_client.Kind(KIND_MANIFEST))
if ident:
f = f.identifier(ident)
f = f.limit(1)
try:
events = (await self.client.fetch_events(f, timeout)).to_vec()
except Exception as e: # pragma: no cover - network errors
self.last_error = str(e)
logger.error(
"Failed to fetch manifest from relays %s: %s",
self.relays,
e,
)
return None
if not events and ident:
f = (
nostr_client.Filter()
.author(pubkey)
.kind(nostr_client.Kind(KIND_MANIFEST))
.limit(1)
)
try:
events = (await self.client.fetch_events(f, timeout)).to_vec()
except Exception as e: # pragma: no cover - network errors
self.last_error = str(e)
logger.error(
"Failed to fetch manifest from relays %s: %s",
self.relays,
e,
)
return None
if not events:
return None
for manifest_event in events:
try:
result = await self._fetch_chunks_with_retry(manifest_event)
if result is not None:
return result
except Exception as e: # pragma: no cover - network errors
self.last_error = str(e)
logger.error(
"Error retrieving snapshot from relays %s: %s",
self.relays,
e,
)
return None
async def fetch_latest_snapshot(self) -> Tuple[Manifest, list[bytes]] | None:
"""Retrieve the latest manifest and all snapshot chunks."""
if self.offline_mode or not self.relays:
return None
await self._connect_async()
self.last_error = None
logger.debug("Searching for backup with current keys...")
try:
primary_keys = nostr_client.Keys.parse(
self.key_manager.keys.private_key_hex()
)
except Exception:
primary_keys = self.keys
result = await self._fetch_manifest_with_keys(primary_keys)
if result is not None:
return result
logger.warning(
"No backup found with current keys. Falling back to legacy key derivation..."
)
try:
legacy_keys = self.key_manager.generate_legacy_nostr_keys()
legacy_sdk_keys = nostr_client.Keys.parse(legacy_keys.private_key_hex())
except Exception as e:
self.last_error = str(e)
return None
result = await self._fetch_manifest_with_keys(legacy_sdk_keys)
if result is not None:
logger.info("Found legacy backup with old key derivation.")
return result
if self.last_error is None:
self.last_error = "No backup found on Nostr relays."
return None
async def ensure_manifest_is_current(self) -> None:
"""Verify the local manifest is up to date before publishing."""
if self.offline_mode or not self.relays:
return
await self._connect_async()
pubkey = self.keys.public_key()
ident = self.current_manifest_id
if ident is None:
return
f = (
nostr_client.Filter()
.author(pubkey)
.kind(nostr_client.Kind(KIND_MANIFEST))
.identifier(ident)
.limit(1)
)
timeout = timedelta(seconds=10)
try:
events = (await self.client.fetch_events(f, timeout)).to_vec()
except Exception:
return
if not events:
return
try:
data = json.loads(events[0].content())
remote = data.get("delta_since")
if remote is not None:
remote = int(remote)
except Exception:
return
with self._state_lock:
local = self.current_manifest.delta_since if self.current_manifest else None
if remote is not None and (local is None or remote > local):
self.last_error = "Manifest out of date"
raise RuntimeError("Manifest out of date")
async def publish_delta(self, delta_bytes: bytes, manifest_id: str) -> str:
if self.offline_mode or not self.relays:
return ""
await self.ensure_manifest_is_current()
await self._connect_async()
content = base64.b64encode(delta_bytes).decode("utf-8")
tag = nostr_client.Tag.event(nostr_client.EventId.parse(manifest_id))
builder = nostr_client.EventBuilder(
nostr_client.Kind(KIND_DELTA), content
).tags([tag])
event = builder.build(self.keys.public_key()).sign_with_keys(self.keys)
result = await self.client.send_event(event)
delta_id = result.id.to_hex() if hasattr(result, "id") else str(result)
created_at = getattr(
event, "created_at", getattr(event, "timestamp", int(time.time()))
)
if hasattr(created_at, "secs"):
created_at = created_at.secs
manifest_event = None
with self._state_lock:
if self.current_manifest is not None:
self.current_manifest.delta_since = int(created_at)
manifest_json = json.dumps(
{
"ver": self.current_manifest.ver,
"algo": self.current_manifest.algo,
"chunks": [
meta.__dict__ for meta in self.current_manifest.chunks
],
"delta_since": self.current_manifest.delta_since,
"nonce": self.current_manifest.nonce,
}
)
manifest_event = (
nostr_client.EventBuilder(
nostr_client.Kind(KIND_MANIFEST), manifest_json
)
.tags([nostr_client.Tag.identifier(self.current_manifest_id)])
.build(self.keys.public_key())
.sign_with_keys(self.keys)
)
self._delta_events.append(delta_id)
if manifest_event is not None:
await self.client.send_event(manifest_event)
return delta_id
async def fetch_deltas_since(self, version: int) -> list[bytes]:
if self.offline_mode or not self.relays:
return []
await self._connect_async()
pubkey = self.keys.public_key()
f = (
nostr_client.Filter()
.author(pubkey)
.kind(nostr_client.Kind(KIND_DELTA))
.since(nostr_client.Timestamp.from_secs(version))
)
timeout = timedelta(seconds=10)
events = (await self.client.fetch_events(f, timeout)).to_vec()
events.sort(
key=lambda ev: getattr(ev, "created_at", getattr(ev, "timestamp", 0))
)
deltas: list[bytes] = []
for ev in events:
deltas.append(base64.b64decode(ev.content().encode("utf-8")))
manifest = self.get_current_manifest()
if manifest is not None:
snap_size = sum(c.size for c in manifest.chunks)
if (
len(deltas) >= self.delta_threshold
or sum(len(d) for d in deltas) > snap_size
):
joined = b"".join(deltas)
await self.publish_snapshot(joined)
exp = nostr_client.Timestamp.from_secs(int(time.time()))
for ev in events:
exp_builder = nostr_client.EventBuilder(
nostr_client.Kind(KIND_DELTA), ev.content()
).tags([nostr_client.Tag.expiration(exp)])
exp_event = exp_builder.build(
self.keys.public_key()
).sign_with_keys(self.keys)
await self.client.send_event(exp_event)
return deltas
def get_current_manifest(self) -> Manifest | None:
with self._state_lock:
return self.current_manifest
def get_current_manifest_id(self) -> str | None:
with self._state_lock:
return self.current_manifest_id
def get_delta_events(self) -> list[str]:
with self._state_lock:
return list(self._delta_events)

8
src/nostr/utils.py Normal file
View File

@@ -0,0 +1,8 @@
# nostr/utils.py
import logging
# Example utility function (if any specific to nostr package)
def some_helper_function():
pass # Implement as needed

View File

@@ -28,6 +28,7 @@ Generated on: 2025-04-06
├── encryption_manager.py
├── event_handler.py
├── key_manager.py
├── logging_config.py
├── utils.py
├── utils/
├── __init__.py
@@ -3081,6 +3082,52 @@ __all__ = ['NostrClient']
```
## nostr/logging_config.py
```python
# nostr/logging_config.py
import logging
import os
# Comment out or remove the configure_logging function to avoid conflicts
# def configure_logging():
# """
# Configures logging with both file and console handlers.
# Logs include the timestamp, log level, message, filename, and line number.
# Only ERROR and higher-level messages are shown in the terminal, while all messages
# are logged in the log file.
# """
# logger = logging.getLogger()
# logger.setLevel(logging.DEBUG) # Set root logger to DEBUG
#
# # Prevent adding multiple handlers if configure_logging is called multiple times
# if not logger.handlers:
# # Create the 'logs' folder if it doesn't exist
# log_directory = 'logs'
# if not os.path.exists(log_directory):
# os.makedirs(log_directory)
#
# # Create handlers
# c_handler = logging.StreamHandler()
# f_handler = logging.FileHandler(os.path.join(log_directory, 'app.log'))
#
# # Set levels: only errors and critical messages will be shown in the console
# c_handler.setLevel(logging.ERROR)
# f_handler.setLevel(logging.DEBUG)
#
# # Create formatters and add them to handlers, include file and line number in log messages
# formatter = logging.Formatter(
# '%(asctime)s [%(levelname)s] %(message)s [%(filename)s:%(lineno)d]'
# )
# c_handler.setFormatter(formatter)
# f_handler.setFormatter(formatter)
#
# # Add handlers to the logger
# logger.addHandler(c_handler)
# logger.addHandler(f_handler)
```
## nostr/event_handler.py
```python
# nostr/event_handler.py

View File

@@ -1,42 +1,38 @@
colorama>=0.4.6,<1
termcolor>=1.1.0,<4
cryptography>=40.0.2,<46
bip-utils>=2.5.0,<3
bech32>=1.2,<2
coincurve>=18.0.0,<22
mnemonic>=0.21,<1
aiohttp>=3.9,<4
bcrypt>=4,<5
pytest>=7,<9
pytest-cov>=4,<7
pytest-xdist>=3,<4
portalocker>=2.8,<4
nostr-sdk>=0.43,<1
websocket-client>=1.7,<2
colorama>=0.4.6
termcolor>=1.1.0
cryptography>=40.0.2
bip-utils>=2.5.0
bech32==1.2.0
coincurve>=18.0.0
mnemonic
aiohttp>=3.12.14
bcrypt
pytest>=7.0
pytest-cov
pytest-xdist
portalocker>=2.8
nostr-sdk>=0.43
websocket-client==1.7.0
websockets>=15,<16
tomli>=2,<3
hypothesis>=6,<7
mutmut>=2.4.4,<4
pgpy>=0.6,<1
pyotp>=2.8,<3
websockets>=15.0.0
tomli
hypothesis
mutmut==2.4.4
pgpy==0.6.0
pyotp>=2.8.0
freezegun>=1.5.4,<2
typer>=0.12.3,<1
# Optional dependencies - install as needed for additional features
pyperclip>=1.9,<2 # Clipboard support for secret mode
qrcode>=8.2,<9 # Generate QR codes for TOTP setup
fastapi>=0.110,<1 # API server
uvicorn>=0.29,<1 # API server
starlette>=0.47.2,<1 # API server
httpx>=0.28.1,<1 # API server
requests>=2.32,<3 # API server
python-multipart>=0.0.20,<0.1 # API server file uploads
PyJWT>=2.10.1,<3 # JWT authentication for API server
orjson>=3.11.1,<4 # Fast JSON serialization for API server
argon2-cffi>=21,<26 # Password hashing for API server
toga-core>=0.5.2,<1 # Desktop GUI
pillow>=11.3,<12 # Image support for GUI
toga-dummy>=0.5.2,<1 # Headless GUI tests
slowapi>=0.1.9,<1 # Rate limiting for API server
freezegun
pyperclip
qrcode>=8.2
typer>=0.12.3
fastapi>=0.116.1
uvicorn>=0.35.0
starlette>=0.47.2
httpx>=0.28.1
requests>=2.32
python-multipart
orjson
argon2-cffi
toga-core>=0.5.2
pillow
toga-dummy>=0.5.2 # for headless GUI tests

View File

@@ -1,35 +1,30 @@
# Runtime dependencies for vendoring/packaging only
# Generated from requirements.txt with all test-only packages removed
colorama>=0.4.6,<1
termcolor>=1.1.0,<4
cryptography>=40.0.2,<46
bip-utils>=2.5.0,<3
bech32>=1.2,<2
coincurve>=18.0.0,<22
mnemonic>=0.21,<1
aiohttp>=3.9,<4
bcrypt>=4,<5
portalocker>=2.8,<4
nostr-sdk>=0.43,<1
websocket-client>=1.7,<2
colorama>=0.4.6
termcolor>=1.1.0
cryptography>=40.0.2
bip-utils>=2.5.0
bech32==1.2.0
coincurve>=18.0.0
mnemonic
aiohttp>=3.12.14
bcrypt
portalocker>=2.8
nostr-sdk>=0.43
websocket-client==1.7.0
websockets>=15,<16
tomli>=2,<3
pgpy>=0.6,<1
pyotp>=2.8,<3
pyperclip>=1.9,<2
qrcode>=8.2,<9
typer>=0.12.3,<1
fastapi>=0.110,<1
uvicorn>=0.29,<1
starlette>=0.47.2,<1
httpx>=0.28.1,<1
requests>=2.32,<3
python-multipart>=0.0.20,<0.1
PyJWT>=2.10.1,<3
orjson>=3.11.1,<4
argon2-cffi>=21,<26
toga-core>=0.5.2,<1
pillow>=11.3,<12
toga-dummy>=0.5.2,<1
slowapi>=0.1.9,<1
websockets>=15.0.0
tomli
pgpy==0.6.0
pyotp>=2.8.0
pyperclip
qrcode>=8.2
typer>=0.12.3
fastapi>=0.116.0
uvicorn>=0.35.0
httpx>=0.28.1
requests>=2.32
python-multipart
orjson
argon2-cffi
toga-core>=0.5.2

View File

@@ -3,3 +3,4 @@ selected_directories:
- utils/
- nostr/
- local_bip85/
- password_manager/

View File

@@ -1,3 +0,0 @@
"""SeedPass package initialization."""
# Optionally re-export selected symbols here.

View File

@@ -9,84 +9,56 @@ import secrets
import queue
from typing import Any, List, Optional
import logging
from fastapi import FastAPI, Header, HTTPException, Request, Response
from fastapi.concurrency import run_in_threadpool
import asyncio
import sys
from fastapi.middleware.cors import CORSMiddleware
import bcrypt
from slowapi import Limiter, _rate_limit_exceeded_handler
from slowapi.errors import RateLimitExceeded
from slowapi.util import get_remote_address
from slowapi.middleware import SlowAPIMiddleware
from seedpass.core.manager import PasswordManager
from seedpass.core.entry_types import EntryType
from seedpass.core.api import UtilityService
_RATE_LIMIT = int(os.getenv("SEEDPASS_RATE_LIMIT", "100"))
_RATE_WINDOW = int(os.getenv("SEEDPASS_RATE_WINDOW", "60"))
_RATE_LIMIT_STR = f"{_RATE_LIMIT}/{_RATE_WINDOW} seconds"
limiter = Limiter(key_func=get_remote_address, default_limits=[_RATE_LIMIT_STR])
app = FastAPI()
logger = logging.getLogger(__name__)
_pm: Optional[PasswordManager] = None
_token: str = ""
def _get_pm(request: Request) -> PasswordManager:
pm = getattr(request.app.state, "pm", None)
assert pm is not None
return pm
def _check_token(request: Request, auth: str | None) -> None:
if auth is None or not auth.startswith("Bearer "):
raise HTTPException(status_code=401, detail="Unauthorized")
token = auth.split(" ", 1)[1].encode()
token_hash = getattr(request.app.state, "token_hash", b"")
if not token_hash or not bcrypt.checkpw(token, token_hash):
def _check_token(auth: str | None) -> None:
if auth != f"Bearer {_token}":
raise HTTPException(status_code=401, detail="Unauthorized")
def _reload_relays(request: Request, relays: list[str]) -> None:
def _reload_relays(relays: list[str]) -> None:
"""Reload the Nostr client with a new relay list."""
pm = _get_pm(request)
assert _pm is not None
try:
pm.nostr_client.close_client_pool()
except (OSError, RuntimeError, ValueError) as exc:
logger.warning("Failed to close NostrClient pool: %s", exc)
_pm.nostr_client.close_client_pool()
except Exception:
pass
try:
pm.nostr_client.relays = relays
pm.nostr_client.initialize_client_pool()
except (OSError, RuntimeError, ValueError) as exc:
logger.error("Failed to initialize NostrClient with relays %s: %s", relays, exc)
_pm.nostr_client.relays = relays
_pm.nostr_client.initialize_client_pool()
except Exception:
pass
def start_server(fingerprint: str | None = None) -> str:
"""Initialize global state and return a random API token.
"""Initialize global state and return the API token.
Parameters
----------
fingerprint:
Optional seed profile fingerprint to select before starting the server.
"""
global _pm, _token
if fingerprint is None:
pm = PasswordManager()
_pm = PasswordManager()
else:
pm = PasswordManager(fingerprint=fingerprint)
app.state.pm = pm
raw_token = secrets.token_urlsafe(32)
app.state.token_hash = bcrypt.hashpw(raw_token.encode(), bcrypt.gensalt())
if not getattr(app.state, "limiter", None):
app.state.limiter = limiter
app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler)
app.add_middleware(SlowAPIMiddleware)
_pm = PasswordManager(fingerprint=fingerprint)
_token = secrets.token_urlsafe(16)
print(f"API token: {_token}")
origins = [
o.strip()
for o in os.getenv("SEEDPASS_CORS_ORIGINS", "").split(",")
@@ -99,35 +71,14 @@ def start_server(fingerprint: str | None = None) -> str:
allow_methods=["*"],
allow_headers=["*"],
)
return raw_token
def _require_password(request: Request, password: str | None) -> None:
pm = _get_pm(request)
if password is None or not pm.verify_password(password):
raise HTTPException(status_code=401, detail="Invalid password")
def _validate_encryption_path(request: Request, path: Path) -> Path:
"""Validate and normalize ``path`` within the active fingerprint directory.
Returns the resolved absolute path if validation succeeds.
"""
pm = _get_pm(request)
try:
return pm.encryption_manager.resolve_relative_path(path)
except ValueError as e:
raise HTTPException(status_code=400, detail=str(e))
return _token
@app.get("/api/v1/entry")
async def search_entry(
request: Request, query: str, authorization: str | None = Header(None)
) -> List[Any]:
_check_token(request, authorization)
pm = _get_pm(request)
results = await run_in_threadpool(pm.entry_manager.search_entries, query)
def search_entry(query: str, authorization: str | None = Header(None)) -> List[Any]:
_check_token(authorization)
assert _pm is not None
results = _pm.entry_manager.search_entries(query)
return [
{
"id": idx,
@@ -142,24 +93,17 @@ async def search_entry(
@app.get("/api/v1/entry/{entry_id}")
async def get_entry(
request: Request,
entry_id: int,
authorization: str | None = Header(None),
password: str | None = Header(None, alias="X-SeedPass-Password"),
) -> Any:
_check_token(request, authorization)
_require_password(request, password)
pm = _get_pm(request)
entry = await run_in_threadpool(pm.entry_manager.retrieve_entry, entry_id)
def get_entry(entry_id: int, authorization: str | None = Header(None)) -> Any:
_check_token(authorization)
assert _pm is not None
entry = _pm.entry_manager.retrieve_entry(entry_id)
if entry is None:
raise HTTPException(status_code=404, detail="Not found")
return entry
@app.post("/api/v1/entry")
async def create_entry(
request: Request,
def create_entry(
entry: dict,
authorization: str | None = Header(None),
) -> dict[str, Any]:
@@ -169,8 +113,8 @@ async def create_entry(
on, the corresponding entry type is created. When omitted or set to
``password`` the behaviour matches the legacy password-entry API.
"""
_check_token(request, authorization)
pm = _get_pm(request)
_check_token(authorization)
assert _pm is not None
etype = (entry.get("type") or entry.get("kind") or "password").lower()
@@ -186,9 +130,7 @@ async def create_entry(
"min_special",
]
kwargs = {k: entry.get(k) for k in policy_keys if entry.get(k) is not None}
index = await run_in_threadpool(
pm.entry_manager.add_entry,
index = _pm.entry_manager.add_entry(
entry.get("label"),
int(entry.get("length", 12)),
entry.get("username"),
@@ -198,27 +140,23 @@ async def create_entry(
return {"id": index}
if etype == "totp":
index = await run_in_threadpool(pm.entry_manager.get_next_index)
uri = await run_in_threadpool(
pm.entry_manager.add_totp,
index = _pm.entry_manager.get_next_index()
uri = _pm.entry_manager.add_totp(
entry.get("label"),
pm.KEY_TOTP_DET if entry.get("deterministic", False) else None,
_pm.parent_seed,
secret=entry.get("secret"),
index=entry.get("index"),
period=int(entry.get("period", 30)),
digits=int(entry.get("digits", 6)),
notes=entry.get("notes", ""),
archived=entry.get("archived", False),
deterministic=entry.get("deterministic", False),
)
return {"id": index, "uri": uri}
if etype == "ssh":
index = await run_in_threadpool(
pm.entry_manager.add_ssh_key,
index = _pm.entry_manager.add_ssh_key(
entry.get("label"),
pm.parent_seed,
_pm.parent_seed,
index=entry.get("index"),
notes=entry.get("notes", ""),
archived=entry.get("archived", False),
@@ -226,10 +164,9 @@ async def create_entry(
return {"id": index}
if etype == "pgp":
index = await run_in_threadpool(
pm.entry_manager.add_pgp_key,
index = _pm.entry_manager.add_pgp_key(
entry.get("label"),
pm.parent_seed,
_pm.parent_seed,
index=entry.get("index"),
key_type=entry.get("key_type", "ed25519"),
user_id=entry.get("user_id", ""),
@@ -239,10 +176,9 @@ async def create_entry(
return {"id": index}
if etype == "nostr":
index = await run_in_threadpool(
pm.entry_manager.add_nostr_key,
index = _pm.entry_manager.add_nostr_key(
entry.get("label"),
pm.parent_seed,
_pm.parent_seed,
index=entry.get("index"),
notes=entry.get("notes", ""),
archived=entry.get("archived", False),
@@ -250,8 +186,7 @@ async def create_entry(
return {"id": index}
if etype == "key_value":
index = await run_in_threadpool(
pm.entry_manager.add_key_value,
index = _pm.entry_manager.add_key_value(
entry.get("label"),
entry.get("key"),
entry.get("value"),
@@ -261,14 +196,13 @@ async def create_entry(
if etype in {"seed", "managed_account"}:
func = (
pm.entry_manager.add_seed
_pm.entry_manager.add_seed
if etype == "seed"
else pm.entry_manager.add_managed_account
else _pm.entry_manager.add_managed_account
)
index = await run_in_threadpool(
func,
index = func(
entry.get("label"),
pm.parent_seed,
_pm.parent_seed,
index=entry.get("index"),
notes=entry.get("notes", ""),
)
@@ -279,7 +213,6 @@ async def create_entry(
@app.put("/api/v1/entry/{entry_id}")
def update_entry(
request: Request,
entry_id: int,
entry: dict,
authorization: str | None = Header(None),
@@ -289,10 +222,10 @@ def update_entry(
Additional fields like ``period``, ``digits`` and ``value`` are forwarded for
specialized entry types (e.g. TOTP or key/value entries).
"""
_check_token(request, authorization)
pm = _get_pm(request)
_check_token(authorization)
assert _pm is not None
try:
pm.entry_manager.modify_entry(
_pm.entry_manager.modify_entry(
entry_id,
username=entry.get("username"),
url=entry.get("url"),
@@ -310,34 +243,31 @@ def update_entry(
@app.post("/api/v1/entry/{entry_id}/archive")
def archive_entry(
request: Request, entry_id: int, authorization: str | None = Header(None)
entry_id: int, authorization: str | None = Header(None)
) -> dict[str, str]:
"""Archive an entry."""
_check_token(request, authorization)
pm = _get_pm(request)
pm.entry_manager.archive_entry(entry_id)
_check_token(authorization)
assert _pm is not None
_pm.entry_manager.archive_entry(entry_id)
return {"status": "archived"}
@app.post("/api/v1/entry/{entry_id}/unarchive")
def unarchive_entry(
request: Request, entry_id: int, authorization: str | None = Header(None)
entry_id: int, authorization: str | None = Header(None)
) -> dict[str, str]:
"""Restore an archived entry."""
_check_token(request, authorization)
pm = _get_pm(request)
pm.entry_manager.restore_entry(entry_id)
_check_token(authorization)
assert _pm is not None
_pm.entry_manager.restore_entry(entry_id)
return {"status": "active"}
@app.get("/api/v1/config/{key}")
def get_config(
request: Request, key: str, authorization: str | None = Header(None)
) -> Any:
_check_token(request, authorization)
pm = _get_pm(request)
value = pm.config_manager.load_config(require_pin=False).get(key)
def get_config(key: str, authorization: str | None = Header(None)) -> Any:
_check_token(authorization)
assert _pm is not None
value = _pm.config_manager.load_config(require_pin=False).get(key)
if value is None:
raise HTTPException(status_code=404, detail="Not found")
return {"key": key, "value": value}
@@ -345,15 +275,12 @@ def get_config(
@app.put("/api/v1/config/{key}")
def update_config(
request: Request,
key: str,
data: dict,
authorization: str | None = Header(None),
key: str, data: dict, authorization: str | None = Header(None)
) -> dict[str, str]:
"""Update a configuration setting."""
_check_token(request, authorization)
pm = _get_pm(request)
cfg = pm.config_manager
_check_token(authorization)
assert _pm is not None
cfg = _pm.config_manager
mapping = {
"relays": lambda v: cfg.set_relays(v, require_pin=False),
"pin": cfg.set_pin,
@@ -366,7 +293,6 @@ def update_config(
}
action = mapping.get(key)
if action is None:
raise HTTPException(status_code=400, detail="Unknown key")
@@ -379,105 +305,84 @@ def update_config(
@app.post("/api/v1/secret-mode")
def set_secret_mode(
request: Request, data: dict, authorization: str | None = Header(None)
data: dict, authorization: str | None = Header(None)
) -> dict[str, str]:
"""Enable/disable secret mode and set the clipboard delay."""
_check_token(request, authorization)
pm = _get_pm(request)
_check_token(authorization)
assert _pm is not None
enabled = data.get("enabled")
delay = data.get("delay")
if enabled is None or delay is None:
raise HTTPException(status_code=400, detail="Missing fields")
cfg = pm.config_manager
cfg = _pm.config_manager
cfg.set_secret_mode_enabled(bool(enabled))
cfg.set_clipboard_clear_delay(int(delay))
pm.secret_mode_enabled = bool(enabled)
pm.clipboard_clear_delay = int(delay)
_pm.secret_mode_enabled = bool(enabled)
_pm.clipboard_clear_delay = int(delay)
return {"status": "ok"}
@app.get("/api/v1/fingerprint")
def list_fingerprints(
request: Request, authorization: str | None = Header(None)
) -> List[str]:
_check_token(request, authorization)
pm = _get_pm(request)
return pm.fingerprint_manager.list_fingerprints()
def list_fingerprints(authorization: str | None = Header(None)) -> List[str]:
_check_token(authorization)
assert _pm is not None
return _pm.fingerprint_manager.list_fingerprints()
@app.post("/api/v1/fingerprint")
def add_fingerprint(
request: Request, authorization: str | None = Header(None)
) -> dict[str, str]:
def add_fingerprint(authorization: str | None = Header(None)) -> dict[str, str]:
"""Create a new seed profile."""
_check_token(request, authorization)
pm = _get_pm(request)
pm.add_new_fingerprint()
_check_token(authorization)
assert _pm is not None
_pm.add_new_fingerprint()
return {"status": "ok"}
@app.delete("/api/v1/fingerprint/{fingerprint}")
def remove_fingerprint(
request: Request, fingerprint: str, authorization: str | None = Header(None)
fingerprint: str, authorization: str | None = Header(None)
) -> dict[str, str]:
"""Remove a seed profile."""
_check_token(request, authorization)
pm = _get_pm(request)
pm.fingerprint_manager.remove_fingerprint(fingerprint)
_check_token(authorization)
assert _pm is not None
_pm.fingerprint_manager.remove_fingerprint(fingerprint)
return {"status": "deleted"}
@app.post("/api/v1/fingerprint/select")
def select_fingerprint(
request: Request, data: dict, authorization: str | None = Header(None)
data: dict, authorization: str | None = Header(None)
) -> dict[str, str]:
"""Switch the active seed profile."""
_check_token(request, authorization)
pm = _get_pm(request)
_check_token(authorization)
assert _pm is not None
fp = data.get("fingerprint")
if not fp:
raise HTTPException(status_code=400, detail="Missing fingerprint")
pm.select_fingerprint(fp)
_pm.select_fingerprint(fp)
return {"status": "ok"}
@app.get("/api/v1/totp/export")
def export_totp(
request: Request,
authorization: str | None = Header(None),
password: str | None = Header(None, alias="X-SeedPass-Password"),
) -> dict:
def export_totp(authorization: str | None = Header(None)) -> dict:
"""Return all stored TOTP entries in JSON format."""
_check_token(request, authorization)
_require_password(request, password)
pm = _get_pm(request)
key = getattr(pm, "KEY_TOTP_DET", None) or getattr(pm, "parent_seed", None)
return pm.entry_manager.export_totp_entries(key)
_check_token(authorization)
assert _pm is not None
return _pm.entry_manager.export_totp_entries(_pm.parent_seed)
@app.get("/api/v1/totp")
def get_totp_codes(
request: Request,
authorization: str | None = Header(None),
password: str | None = Header(None, alias="X-SeedPass-Password"),
) -> dict:
def get_totp_codes(authorization: str | None = Header(None)) -> dict:
"""Return active TOTP codes with remaining seconds."""
_check_token(request, authorization)
_require_password(request, password)
pm = _get_pm(request)
entries = pm.entry_manager.list_entries(
filter_kinds=[EntryType.TOTP.value], include_archived=False
_check_token(authorization)
assert _pm is not None
entries = _pm.entry_manager.list_entries(
filter_kind=EntryType.TOTP.value, include_archived=False
)
codes = []
for idx, label, _u, _url, _arch in entries:
key = getattr(pm, "KEY_TOTP_DET", None) or getattr(pm, "parent_seed", None)
code = pm.entry_manager.get_totp_code(idx, key)
rem = pm.entry_manager.get_totp_time_remaining(idx)
code = _pm.entry_manager.get_totp_code(idx, _pm.parent_seed)
rem = _pm.entry_manager.get_totp_time_remaining(idx)
codes.append(
{"id": idx, "label": label, "code": code, "seconds_remaining": rem}
)
@@ -485,138 +390,132 @@ def get_totp_codes(
@app.get("/api/v1/stats")
def get_profile_stats(
request: Request, authorization: str | None = Header(None)
) -> dict:
def get_profile_stats(authorization: str | None = Header(None)) -> dict:
"""Return statistics about the active seed profile."""
_check_token(request, authorization)
pm = _get_pm(request)
return pm.get_profile_stats()
_check_token(authorization)
assert _pm is not None
return _pm.get_profile_stats()
@app.get("/api/v1/notifications")
def get_notifications(
request: Request, authorization: str | None = Header(None)
) -> List[dict]:
def get_notifications(authorization: str | None = Header(None)) -> List[dict]:
"""Return and clear queued notifications."""
_check_token(request, authorization)
pm = _get_pm(request)
_check_token(authorization)
assert _pm is not None
notes = []
while True:
try:
note = pm.notifications.get_nowait()
note = _pm.notifications.get_nowait()
except queue.Empty:
break
notes.append({"level": note.level, "message": note.message})
return notes
@app.get("/api/v1/parent-seed")
def get_parent_seed(
authorization: str | None = Header(None), file: str | None = None
) -> dict:
"""Return the parent seed or save it as an encrypted backup."""
_check_token(authorization)
assert _pm is not None
if file:
path = Path(file)
_pm.encryption_manager.encrypt_and_save_file(
_pm.parent_seed.encode("utf-8"), path
)
return {"status": "saved", "path": str(path)}
return {"seed": _pm.parent_seed}
@app.get("/api/v1/nostr/pubkey")
def get_nostr_pubkey(request: Request, authorization: str | None = Header(None)) -> Any:
_check_token(request, authorization)
pm = _get_pm(request)
return {"npub": pm.nostr_client.key_manager.get_npub()}
def get_nostr_pubkey(authorization: str | None = Header(None)) -> Any:
_check_token(authorization)
assert _pm is not None
return {"npub": _pm.nostr_client.key_manager.get_npub()}
@app.get("/api/v1/relays")
def list_relays(request: Request, authorization: str | None = Header(None)) -> dict:
def list_relays(authorization: str | None = Header(None)) -> dict:
"""Return the configured Nostr relays."""
_check_token(request, authorization)
pm = _get_pm(request)
cfg = pm.config_manager.load_config(require_pin=False)
_check_token(authorization)
assert _pm is not None
cfg = _pm.config_manager.load_config(require_pin=False)
return {"relays": cfg.get("relays", [])}
@app.post("/api/v1/relays")
def add_relay(
request: Request, data: dict, authorization: str | None = Header(None)
) -> dict[str, str]:
def add_relay(data: dict, authorization: str | None = Header(None)) -> dict[str, str]:
"""Add a relay URL to the configuration."""
_check_token(request, authorization)
pm = _get_pm(request)
_check_token(authorization)
assert _pm is not None
url = data.get("url")
if not url:
raise HTTPException(status_code=400, detail="Missing url")
cfg = pm.config_manager.load_config(require_pin=False)
cfg = _pm.config_manager.load_config(require_pin=False)
relays = cfg.get("relays", [])
if url in relays:
raise HTTPException(status_code=400, detail="Relay already present")
relays.append(url)
pm.config_manager.set_relays(relays, require_pin=False)
_reload_relays(request, relays)
_pm.config_manager.set_relays(relays, require_pin=False)
_reload_relays(relays)
return {"status": "ok"}
@app.delete("/api/v1/relays/{idx}")
def remove_relay(
request: Request, idx: int, authorization: str | None = Header(None)
) -> dict[str, str]:
def remove_relay(idx: int, authorization: str | None = Header(None)) -> dict[str, str]:
"""Remove a relay by its index (1-based)."""
_check_token(request, authorization)
pm = _get_pm(request)
cfg = pm.config_manager.load_config(require_pin=False)
_check_token(authorization)
assert _pm is not None
cfg = _pm.config_manager.load_config(require_pin=False)
relays = cfg.get("relays", [])
if not (1 <= idx <= len(relays)):
raise HTTPException(status_code=400, detail="Invalid index")
if len(relays) == 1:
raise HTTPException(status_code=400, detail="At least one relay required")
relays.pop(idx - 1)
pm.config_manager.set_relays(relays, require_pin=False)
_reload_relays(request, relays)
_pm.config_manager.set_relays(relays, require_pin=False)
_reload_relays(relays)
return {"status": "ok"}
@app.post("/api/v1/relays/reset")
def reset_relays(
request: Request, authorization: str | None = Header(None)
) -> dict[str, str]:
def reset_relays(authorization: str | None = Header(None)) -> dict[str, str]:
"""Reset relay list to defaults."""
_check_token(request, authorization)
pm = _get_pm(request)
_check_token(authorization)
assert _pm is not None
from nostr.client import DEFAULT_RELAYS
relays = list(DEFAULT_RELAYS)
pm.config_manager.set_relays(relays, require_pin=False)
_reload_relays(request, relays)
_pm.config_manager.set_relays(relays, require_pin=False)
_reload_relays(relays)
return {"status": "ok"}
@app.post("/api/v1/checksum/verify")
def verify_checksum(
request: Request, authorization: str | None = Header(None)
) -> dict[str, str]:
def verify_checksum(authorization: str | None = Header(None)) -> dict[str, str]:
"""Verify the SeedPass script checksum."""
_check_token(request, authorization)
pm = _get_pm(request)
pm.handle_verify_checksum()
_check_token(authorization)
assert _pm is not None
_pm.handle_verify_checksum()
return {"status": "ok"}
@app.post("/api/v1/checksum/update")
def update_checksum(
request: Request, authorization: str | None = Header(None)
) -> dict[str, str]:
def update_checksum(authorization: str | None = Header(None)) -> dict[str, str]:
"""Regenerate the script checksum file."""
_check_token(request, authorization)
pm = _get_pm(request)
pm.handle_update_script_checksum()
_check_token(authorization)
assert _pm is not None
_pm.handle_update_script_checksum()
return {"status": "ok"}
@app.post("/api/v1/vault/export")
def export_vault(
request: Request,
authorization: str | None = Header(None),
password: str | None = Header(None, alias="X-SeedPass-Password"),
):
def export_vault(authorization: str | None = Header(None)):
"""Export the vault and return the encrypted file."""
_check_token(request, authorization)
_require_password(request, password)
pm = _get_pm(request)
path = pm.handle_export_database()
_check_token(authorization)
assert _pm is not None
path = _pm.handle_export_database()
if path is None:
raise HTTPException(status_code=500, detail="Export failed")
data = Path(path).read_bytes()
@@ -628,15 +527,13 @@ async def import_vault(
request: Request, authorization: str | None = Header(None)
) -> dict[str, str]:
"""Import a vault backup from a file upload or a server path."""
_check_token(request, authorization)
pm = _get_pm(request)
_check_token(authorization)
assert _pm is not None
ctype = request.headers.get("content-type", "")
if ctype.startswith("multipart/form-data"):
form = await request.form()
file = form.get("file")
if file is None:
raise HTTPException(status_code=400, detail="Missing file")
data = await file.read()
@@ -644,75 +541,54 @@ async def import_vault(
tmp.write(data)
tmp_path = Path(tmp.name)
try:
pm.handle_import_database(tmp_path)
_pm.handle_import_database(tmp_path)
finally:
os.unlink(tmp_path)
else:
body = await request.json()
path_str = body.get("path")
if not path_str:
path = body.get("path")
if not path:
raise HTTPException(status_code=400, detail="Missing file or path")
path = _validate_encryption_path(request, Path(path_str))
if not str(path).endswith(".json.enc"):
raise HTTPException(
status_code=400,
detail="Selected file must be a '.json.enc' backup",
)
pm.handle_import_database(path)
pm.sync_vault()
_pm.handle_import_database(Path(path))
_pm.sync_vault()
return {"status": "ok"}
@app.post("/api/v1/vault/backup-parent-seed")
def backup_parent_seed(
request: Request,
data: dict,
authorization: str | None = Header(None),
password: str | None = Header(None, alias="X-SeedPass-Password"),
data: dict | None = None, authorization: str | None = Header(None)
) -> dict[str, str]:
"""Create an encrypted backup of the parent seed after confirmation."""
_check_token(request, authorization)
_require_password(request, password)
pm = _get_pm(request)
if not data.get("confirm"):
raise HTTPException(status_code=400, detail="Confirmation required")
path_str = data.get("path")
if not path_str:
raise HTTPException(status_code=400, detail="Missing path")
path = Path(path_str)
_validate_encryption_path(request, path)
pm.encryption_manager.encrypt_and_save_file(pm.parent_seed.encode("utf-8"), path)
return {"status": "saved", "path": str(path)}
"""Backup and reveal the parent seed."""
_check_token(authorization)
assert _pm is not None
path = None
if data is not None:
p = data.get("path")
if p:
path = Path(p)
_pm.handle_backup_reveal_parent_seed(path)
return {"status": "ok"}
@app.post("/api/v1/change-password")
def change_password(
request: Request, data: dict, authorization: str | None = Header(None)
data: dict, authorization: str | None = Header(None)
) -> dict[str, str]:
"""Change the master password for the active profile."""
_check_token(request, authorization)
pm = _get_pm(request)
pm.change_password(data.get("old", ""), data.get("new", ""))
_check_token(authorization)
assert _pm is not None
_pm.change_password(data.get("old", ""), data.get("new", ""))
return {"status": "ok"}
@app.post("/api/v1/password")
def generate_password(
request: Request, data: dict, authorization: str | None = Header(None)
data: dict, authorization: str | None = Header(None)
) -> dict[str, str]:
"""Generate a password using optional policy overrides."""
_check_token(request, authorization)
pm = _get_pm(request)
_check_token(authorization)
assert _pm is not None
length = int(data.get("length", 12))
policy_keys = [
"include_special_chars",
"allowed_special_chars",
@@ -724,28 +600,22 @@ def generate_password(
"min_special",
]
kwargs = {k: data.get(k) for k in policy_keys if data.get(k) is not None}
util = UtilityService(pm)
util = UtilityService(_pm)
password = util.generate_password(length, **kwargs)
return {"password": password}
@app.post("/api/v1/vault/lock")
def lock_vault(
request: Request, authorization: str | None = Header(None)
) -> dict[str, str]:
def lock_vault(authorization: str | None = Header(None)) -> dict[str, str]:
"""Lock the vault and clear sensitive data from memory."""
_check_token(request, authorization)
pm = _get_pm(request)
pm.lock_vault()
_check_token(authorization)
assert _pm is not None
_pm.lock_vault()
return {"status": "locked"}
@app.post("/api/v1/shutdown")
async def shutdown_server(
request: Request, authorization: str | None = Header(None)
) -> dict[str, str]:
_check_token(request, authorization)
async def shutdown_server(authorization: str | None = Header(None)) -> dict[str, str]:
_check_token(authorization)
asyncio.get_event_loop().call_soon(sys.exit, 0)
return {"status": "shutting down"}

878
src/seedpass/cli.py Normal file
View File

@@ -0,0 +1,878 @@
from pathlib import Path
from typing import Optional, List
import json
import typer
import sys
from seedpass.core.manager import PasswordManager
from seedpass.core.entry_types import EntryType
from seedpass.core.api import (
VaultService,
ProfileService,
SyncService,
EntryService,
ConfigService,
UtilityService,
NostrService,
ChangePasswordRequest,
UnlockRequest,
BackupParentSeedRequest,
ProfileSwitchRequest,
ProfileRemoveRequest,
)
import uvicorn
from . import api as api_module
import importlib
import importlib.util
import subprocess
app = typer.Typer(
help="SeedPass command line interface",
invoke_without_command=True,
)
# Global option shared across all commands
fingerprint_option = typer.Option(
None,
"--fingerprint",
"-f",
help="Specify which seed profile to use",
)
# Sub command groups
entry_app = typer.Typer(help="Manage individual entries")
vault_app = typer.Typer(help="Manage the entire vault")
nostr_app = typer.Typer(help="Interact with Nostr relays")
config_app = typer.Typer(help="Get or set configuration values")
fingerprint_app = typer.Typer(help="Manage seed profiles")
util_app = typer.Typer(help="Utility commands")
api_app = typer.Typer(help="Run the API server")
app.add_typer(entry_app, name="entry")
app.add_typer(vault_app, name="vault")
app.add_typer(nostr_app, name="nostr")
app.add_typer(config_app, name="config")
app.add_typer(fingerprint_app, name="fingerprint")
app.add_typer(util_app, name="util")
app.add_typer(api_app, name="api")
def _get_pm(ctx: typer.Context) -> PasswordManager:
"""Return a PasswordManager optionally selecting a fingerprint."""
fp = ctx.obj.get("fingerprint")
if fp is None:
pm = PasswordManager()
else:
pm = PasswordManager(fingerprint=fp)
return pm
def _get_services(
ctx: typer.Context,
) -> tuple[VaultService, ProfileService, SyncService]:
"""Return service layer instances for the current context."""
pm = _get_pm(ctx)
return VaultService(pm), ProfileService(pm), SyncService(pm)
def _get_entry_service(ctx: typer.Context) -> EntryService:
pm = _get_pm(ctx)
return EntryService(pm)
def _get_config_service(ctx: typer.Context) -> ConfigService:
pm = _get_pm(ctx)
return ConfigService(pm)
def _get_util_service(ctx: typer.Context) -> UtilityService:
pm = _get_pm(ctx)
return UtilityService(pm)
def _get_nostr_service(ctx: typer.Context) -> NostrService:
pm = _get_pm(ctx)
return NostrService(pm)
def _gui_backend_available() -> bool:
"""Return True if a platform-specific BeeWare backend is installed."""
for pkg in ("toga_gtk", "toga_winforms", "toga_cocoa"):
if importlib.util.find_spec(pkg) is not None:
return True
return False
@app.callback(invoke_without_command=True)
def main(ctx: typer.Context, fingerprint: Optional[str] = fingerprint_option) -> None:
"""SeedPass CLI entry point.
When called without a subcommand this launches the interactive TUI.
"""
ctx.obj = {"fingerprint": fingerprint}
if ctx.invoked_subcommand is None:
tui = importlib.import_module("main")
raise typer.Exit(tui.main(fingerprint=fingerprint))
@entry_app.command("list")
def entry_list(
ctx: typer.Context,
sort: str = typer.Option(
"index", "--sort", help="Sort by 'index', 'label', or 'updated'"
),
kind: Optional[str] = typer.Option(None, "--kind", help="Filter by entry type"),
archived: bool = typer.Option(False, "--archived", help="Include archived"),
) -> None:
"""List entries in the vault."""
service = _get_entry_service(ctx)
entries = service.list_entries(
sort_by=sort, filter_kind=kind, include_archived=archived
)
for idx, label, username, url, is_archived in entries:
line = f"{idx}: {label}"
if username:
line += f" ({username})"
if url:
line += f" {url}"
if is_archived:
line += " [archived]"
typer.echo(line)
@entry_app.command("search")
def entry_search(
ctx: typer.Context,
query: str,
kind: List[str] = typer.Option(
None,
"--kind",
"-k",
help="Filter by entry kinds (can be repeated)",
),
) -> None:
"""Search entries."""
service = _get_entry_service(ctx)
kinds = list(kind) if kind else None
results = service.search_entries(query, kinds=kinds)
if not results:
typer.echo("No matching entries found")
return
for idx, label, username, url, _arch, etype in results:
line = f"{idx}: {etype.value.replace('_', ' ').title()} - {label}"
if username:
line += f" ({username})"
if url:
line += f" {url}"
typer.echo(line)
@entry_app.command("get")
def entry_get(ctx: typer.Context, query: str) -> None:
"""Retrieve a single entry's secret."""
service = _get_entry_service(ctx)
matches = service.search_entries(query)
if len(matches) == 0:
typer.echo("No matching entries found")
raise typer.Exit(code=1)
if len(matches) > 1:
typer.echo("Matches:")
for idx, label, username, _url, _arch, etype in matches:
name = f"{idx}: {etype.value.replace('_', ' ').title()} - {label}"
if username:
name += f" ({username})"
typer.echo(name)
raise typer.Exit(code=1)
index = matches[0][0]
entry = service.retrieve_entry(index)
etype = entry.get("type", entry.get("kind"))
if etype == EntryType.PASSWORD.value:
length = int(entry.get("length", 12))
password = service.generate_password(length, index)
typer.echo(password)
elif etype == EntryType.TOTP.value:
code = service.get_totp_code(index)
typer.echo(code)
else:
typer.echo("Unsupported entry type")
raise typer.Exit(code=1)
@entry_app.command("add")
def entry_add(
ctx: typer.Context,
label: str,
length: int = typer.Option(12, "--length"),
username: Optional[str] = typer.Option(None, "--username"),
url: Optional[str] = typer.Option(None, "--url"),
no_special: bool = typer.Option(
False, "--no-special", help="Exclude special characters", is_flag=True
),
allowed_special_chars: Optional[str] = typer.Option(
None, "--allowed-special-chars", help="Explicit set of special characters"
),
special_mode: Optional[str] = typer.Option(
None,
"--special-mode",
help="Special character mode",
),
exclude_ambiguous: bool = typer.Option(
False,
"--exclude-ambiguous",
help="Exclude ambiguous characters",
is_flag=True,
),
min_uppercase: Optional[int] = typer.Option(None, "--min-uppercase"),
min_lowercase: Optional[int] = typer.Option(None, "--min-lowercase"),
min_digits: Optional[int] = typer.Option(None, "--min-digits"),
min_special: Optional[int] = typer.Option(None, "--min-special"),
) -> None:
"""Add a new password entry and output its index."""
service = _get_entry_service(ctx)
kwargs = {}
if no_special:
kwargs["include_special_chars"] = False
if allowed_special_chars is not None:
kwargs["allowed_special_chars"] = allowed_special_chars
if special_mode is not None:
kwargs["special_mode"] = special_mode
if exclude_ambiguous:
kwargs["exclude_ambiguous"] = True
if min_uppercase is not None:
kwargs["min_uppercase"] = min_uppercase
if min_lowercase is not None:
kwargs["min_lowercase"] = min_lowercase
if min_digits is not None:
kwargs["min_digits"] = min_digits
if min_special is not None:
kwargs["min_special"] = min_special
index = service.add_entry(label, length, username, url, **kwargs)
typer.echo(str(index))
@entry_app.command("add-totp")
def entry_add_totp(
ctx: typer.Context,
label: str,
index: Optional[int] = typer.Option(None, "--index", help="Derivation index"),
secret: Optional[str] = typer.Option(None, "--secret", help="Import secret"),
period: int = typer.Option(30, "--period", help="TOTP period in seconds"),
digits: int = typer.Option(6, "--digits", help="Number of TOTP digits"),
) -> None:
"""Add a TOTP entry and output the otpauth URI."""
service = _get_entry_service(ctx)
uri = service.add_totp(
label,
index=index,
secret=secret,
period=period,
digits=digits,
)
typer.echo(uri)
@entry_app.command("add-ssh")
def entry_add_ssh(
ctx: typer.Context,
label: str,
index: Optional[int] = typer.Option(None, "--index", help="Derivation index"),
notes: str = typer.Option("", "--notes", help="Entry notes"),
) -> None:
"""Add an SSH key entry and output its index."""
service = _get_entry_service(ctx)
idx = service.add_ssh_key(
label,
index=index,
notes=notes,
)
typer.echo(str(idx))
@entry_app.command("add-pgp")
def entry_add_pgp(
ctx: typer.Context,
label: str,
index: Optional[int] = typer.Option(None, "--index", help="Derivation index"),
key_type: str = typer.Option("ed25519", "--key-type", help="Key type"),
user_id: str = typer.Option("", "--user-id", help="User ID"),
notes: str = typer.Option("", "--notes", help="Entry notes"),
) -> None:
"""Add a PGP key entry and output its index."""
service = _get_entry_service(ctx)
idx = service.add_pgp_key(
label,
index=index,
key_type=key_type,
user_id=user_id,
notes=notes,
)
typer.echo(str(idx))
@entry_app.command("add-nostr")
def entry_add_nostr(
ctx: typer.Context,
label: str,
index: Optional[int] = typer.Option(None, "--index", help="Derivation index"),
notes: str = typer.Option("", "--notes", help="Entry notes"),
) -> None:
"""Add a Nostr key entry and output its index."""
service = _get_entry_service(ctx)
idx = service.add_nostr_key(
label,
index=index,
notes=notes,
)
typer.echo(str(idx))
@entry_app.command("add-seed")
def entry_add_seed(
ctx: typer.Context,
label: str,
index: Optional[int] = typer.Option(None, "--index", help="Derivation index"),
words: int = typer.Option(24, "--words", help="Word count"),
notes: str = typer.Option("", "--notes", help="Entry notes"),
) -> None:
"""Add a derived seed phrase entry and output its index."""
service = _get_entry_service(ctx)
idx = service.add_seed(
label,
index=index,
words=words,
notes=notes,
)
typer.echo(str(idx))
@entry_app.command("add-key-value")
def entry_add_key_value(
ctx: typer.Context,
label: str,
key: str = typer.Option(..., "--key", help="Key name"),
value: str = typer.Option(..., "--value", help="Stored value"),
notes: str = typer.Option("", "--notes", help="Entry notes"),
) -> None:
"""Add a key/value entry and output its index."""
service = _get_entry_service(ctx)
idx = service.add_key_value(label, key, value, notes=notes)
typer.echo(str(idx))
@entry_app.command("add-managed-account")
def entry_add_managed_account(
ctx: typer.Context,
label: str,
index: Optional[int] = typer.Option(None, "--index", help="Derivation index"),
notes: str = typer.Option("", "--notes", help="Entry notes"),
) -> None:
"""Add a managed account seed entry and output its index."""
service = _get_entry_service(ctx)
idx = service.add_managed_account(
label,
index=index,
notes=notes,
)
typer.echo(str(idx))
@entry_app.command("modify")
def entry_modify(
ctx: typer.Context,
entry_id: int,
label: Optional[str] = typer.Option(None, "--label"),
username: Optional[str] = typer.Option(None, "--username"),
url: Optional[str] = typer.Option(None, "--url"),
notes: Optional[str] = typer.Option(None, "--notes"),
period: Optional[int] = typer.Option(
None, "--period", help="TOTP period in seconds"
),
digits: Optional[int] = typer.Option(None, "--digits", help="TOTP digits"),
key: Optional[str] = typer.Option(None, "--key", help="New key"),
value: Optional[str] = typer.Option(None, "--value", help="New value"),
) -> None:
"""Modify an existing entry."""
service = _get_entry_service(ctx)
try:
service.modify_entry(
entry_id,
username=username,
url=url,
notes=notes,
label=label,
period=period,
digits=digits,
key=key,
value=value,
)
except ValueError as e:
typer.echo(str(e))
sys.stdout.flush()
raise typer.Exit(code=1)
@entry_app.command("archive")
def entry_archive(ctx: typer.Context, entry_id: int) -> None:
"""Archive an entry."""
service = _get_entry_service(ctx)
service.archive_entry(entry_id)
typer.echo(str(entry_id))
@entry_app.command("unarchive")
def entry_unarchive(ctx: typer.Context, entry_id: int) -> None:
"""Restore an archived entry."""
service = _get_entry_service(ctx)
service.restore_entry(entry_id)
typer.echo(str(entry_id))
@entry_app.command("totp-codes")
def entry_totp_codes(ctx: typer.Context) -> None:
"""Display all current TOTP codes."""
service = _get_entry_service(ctx)
service.display_totp_codes()
@entry_app.command("export-totp")
def entry_export_totp(
ctx: typer.Context, file: str = typer.Option(..., help="Output file")
) -> None:
"""Export all TOTP secrets to a JSON file."""
service = _get_entry_service(ctx)
data = service.export_totp_entries()
Path(file).write_text(json.dumps(data, indent=2))
typer.echo(str(file))
@vault_app.command("export")
def vault_export(
ctx: typer.Context, file: str = typer.Option(..., help="Output file")
) -> None:
"""Export the vault profile to an encrypted file."""
vault_service, _profile, _sync = _get_services(ctx)
data = vault_service.export_profile()
Path(file).write_bytes(data)
typer.echo(str(file))
@vault_app.command("import")
def vault_import(
ctx: typer.Context, file: str = typer.Option(..., help="Input file")
) -> None:
"""Import a vault profile from an encrypted file."""
vault_service, _profile, _sync = _get_services(ctx)
data = Path(file).read_bytes()
vault_service.import_profile(data)
typer.echo(str(file))
@vault_app.command("change-password")
def vault_change_password(ctx: typer.Context) -> None:
"""Change the master password used for encryption."""
vault_service, _profile, _sync = _get_services(ctx)
old_pw = typer.prompt("Current password", hide_input=True)
new_pw = typer.prompt("New password", hide_input=True, confirmation_prompt=True)
try:
vault_service.change_password(
ChangePasswordRequest(old_password=old_pw, new_password=new_pw)
)
except Exception as exc: # pragma: no cover - pass through errors
typer.echo(f"Error: {exc}")
raise typer.Exit(code=1)
typer.echo("Password updated")
@vault_app.command("unlock")
def vault_unlock(ctx: typer.Context) -> None:
"""Unlock the vault for the active profile."""
vault_service, _profile, _sync = _get_services(ctx)
password = typer.prompt("Master password", hide_input=True)
try:
resp = vault_service.unlock(UnlockRequest(password=password))
except Exception as exc: # pragma: no cover - pass through errors
typer.echo(f"Error: {exc}")
raise typer.Exit(code=1)
typer.echo(f"Unlocked in {resp.duration:.2f}s")
@vault_app.command("lock")
def vault_lock(ctx: typer.Context) -> None:
"""Lock the vault and clear sensitive data from memory."""
vault_service, _profile, _sync = _get_services(ctx)
vault_service.lock()
typer.echo("locked")
@app.command("lock")
def root_lock(ctx: typer.Context) -> None:
"""Lock the vault for the active profile."""
vault_service, _profile, _sync = _get_services(ctx)
vault_service.lock()
typer.echo("locked")
@vault_app.command("stats")
def vault_stats(ctx: typer.Context) -> None:
"""Display statistics about the current seed profile."""
vault_service, _profile, _sync = _get_services(ctx)
stats = vault_service.stats()
typer.echo(json.dumps(stats, indent=2))
@vault_app.command("reveal-parent-seed")
def vault_reveal_parent_seed(
ctx: typer.Context,
file: Optional[str] = typer.Option(
None, "--file", help="Save encrypted seed to this path"
),
) -> None:
"""Display the parent seed and optionally write an encrypted backup file."""
vault_service, _profile, _sync = _get_services(ctx)
password = typer.prompt("Master password", hide_input=True)
vault_service.backup_parent_seed(
BackupParentSeedRequest(path=Path(file) if file else None, password=password)
)
@nostr_app.command("sync")
def nostr_sync(ctx: typer.Context) -> None:
"""Sync with configured Nostr relays."""
_vault, _profile, sync_service = _get_services(ctx)
model = sync_service.sync()
if model:
typer.echo("Event IDs:")
typer.echo(f"- manifest: {model.manifest_id}")
for cid in model.chunk_ids:
typer.echo(f"- chunk: {cid}")
for did in model.delta_ids:
typer.echo(f"- delta: {did}")
else:
typer.echo("Error: Failed to sync vault")
@nostr_app.command("get-pubkey")
def nostr_get_pubkey(ctx: typer.Context) -> None:
"""Display the active profile's npub."""
service = _get_nostr_service(ctx)
npub = service.get_pubkey()
typer.echo(npub)
@nostr_app.command("list-relays")
def nostr_list_relays(ctx: typer.Context) -> None:
"""Display configured Nostr relays."""
service = _get_nostr_service(ctx)
relays = service.list_relays()
for i, r in enumerate(relays, 1):
typer.echo(f"{i}: {r}")
@nostr_app.command("add-relay")
def nostr_add_relay(ctx: typer.Context, url: str) -> None:
"""Add a relay URL."""
service = _get_nostr_service(ctx)
try:
service.add_relay(url)
except Exception as exc: # pragma: no cover - pass through errors
typer.echo(f"Error: {exc}")
raise typer.Exit(code=1)
typer.echo("Added")
@nostr_app.command("remove-relay")
def nostr_remove_relay(ctx: typer.Context, idx: int) -> None:
"""Remove a relay by index (1-based)."""
service = _get_nostr_service(ctx)
try:
service.remove_relay(idx)
except Exception as exc: # pragma: no cover - pass through errors
typer.echo(f"Error: {exc}")
raise typer.Exit(code=1)
typer.echo("Removed")
@config_app.command("get")
def config_get(ctx: typer.Context, key: str) -> None:
"""Get a configuration value."""
service = _get_config_service(ctx)
value = service.get(key)
if value is None:
typer.echo("Key not found")
else:
typer.echo(str(value))
@config_app.command("set")
def config_set(ctx: typer.Context, key: str, value: str) -> None:
"""Set a configuration value."""
service = _get_config_service(ctx)
try:
val = (
[r.strip() for r in value.split(",") if r.strip()]
if key == "relays"
else value
)
service.set(key, val)
except KeyError:
typer.echo("Unknown key")
raise typer.Exit(code=1)
except Exception as exc: # pragma: no cover - pass through errors
typer.echo(f"Error: {exc}")
raise typer.Exit(code=1)
typer.echo("Updated")
@config_app.command("toggle-secret-mode")
def config_toggle_secret_mode(ctx: typer.Context) -> None:
"""Interactively enable or disable secret mode.
When enabled, newly generated and retrieved passwords are copied to the
clipboard instead of printed to the screen.
"""
service = _get_config_service(ctx)
try:
enabled = service.get_secret_mode_enabled()
delay = service.get_clipboard_clear_delay()
except Exception as exc: # pragma: no cover - pass through errors
typer.echo(f"Error loading settings: {exc}")
raise typer.Exit(code=1)
typer.echo(f"Secret mode is currently {'ON' if enabled else 'OFF'}")
choice = (
typer.prompt(
"Enable secret mode? (y/n, blank to keep)", default="", show_default=False
)
.strip()
.lower()
)
if choice in ("y", "yes"):
enabled = True
elif choice in ("n", "no"):
enabled = False
inp = typer.prompt(
f"Clipboard clear delay in seconds [{delay}]", default="", show_default=False
).strip()
if inp:
try:
delay = int(inp)
if delay <= 0:
typer.echo("Delay must be positive")
raise typer.Exit(code=1)
except ValueError:
typer.echo("Invalid number")
raise typer.Exit(code=1)
try:
service.set_secret_mode(enabled, delay)
except Exception as exc: # pragma: no cover - pass through errors
typer.echo(f"Error: {exc}")
raise typer.Exit(code=1)
status = "enabled" if enabled else "disabled"
typer.echo(f"Secret mode {status}.")
@config_app.command("toggle-offline")
def config_toggle_offline(ctx: typer.Context) -> None:
"""Enable or disable offline mode."""
service = _get_config_service(ctx)
try:
enabled = service.get_offline_mode()
except Exception as exc: # pragma: no cover - pass through errors
typer.echo(f"Error loading settings: {exc}")
raise typer.Exit(code=1)
typer.echo(f"Offline mode is currently {'ON' if enabled else 'OFF'}")
choice = (
typer.prompt(
"Enable offline mode? (y/n, blank to keep)", default="", show_default=False
)
.strip()
.lower()
)
if choice in ("y", "yes"):
enabled = True
elif choice in ("n", "no"):
enabled = False
try:
service.set_offline_mode(enabled)
except Exception as exc: # pragma: no cover - pass through errors
typer.echo(f"Error: {exc}")
raise typer.Exit(code=1)
status = "enabled" if enabled else "disabled"
typer.echo(f"Offline mode {status}.")
@fingerprint_app.command("list")
def fingerprint_list(ctx: typer.Context) -> None:
"""List available seed profiles."""
_vault, profile_service, _sync = _get_services(ctx)
for fp in profile_service.list_profiles():
typer.echo(fp)
@fingerprint_app.command("add")
def fingerprint_add(ctx: typer.Context) -> None:
"""Create a new seed profile."""
_vault, profile_service, _sync = _get_services(ctx)
profile_service.add_profile()
@fingerprint_app.command("remove")
def fingerprint_remove(ctx: typer.Context, fingerprint: str) -> None:
"""Remove a seed profile."""
_vault, profile_service, _sync = _get_services(ctx)
profile_service.remove_profile(ProfileRemoveRequest(fingerprint=fingerprint))
@fingerprint_app.command("switch")
def fingerprint_switch(ctx: typer.Context, fingerprint: str) -> None:
"""Switch to another seed profile."""
_vault, profile_service, _sync = _get_services(ctx)
password = typer.prompt("Master password", hide_input=True)
profile_service.switch_profile(
ProfileSwitchRequest(fingerprint=fingerprint, password=password)
)
@util_app.command("generate-password")
def generate_password(
ctx: typer.Context,
length: int = 24,
no_special: bool = typer.Option(
False, "--no-special", help="Exclude special characters", is_flag=True
),
allowed_special_chars: Optional[str] = typer.Option(
None, "--allowed-special-chars", help="Explicit set of special characters"
),
special_mode: Optional[str] = typer.Option(
None,
"--special-mode",
help="Special character mode",
),
exclude_ambiguous: bool = typer.Option(
False,
"--exclude-ambiguous",
help="Exclude ambiguous characters",
is_flag=True,
),
min_uppercase: Optional[int] = typer.Option(None, "--min-uppercase"),
min_lowercase: Optional[int] = typer.Option(None, "--min-lowercase"),
min_digits: Optional[int] = typer.Option(None, "--min-digits"),
min_special: Optional[int] = typer.Option(None, "--min-special"),
) -> None:
"""Generate a strong password."""
service = _get_util_service(ctx)
kwargs = {}
if no_special:
kwargs["include_special_chars"] = False
if allowed_special_chars is not None:
kwargs["allowed_special_chars"] = allowed_special_chars
if special_mode is not None:
kwargs["special_mode"] = special_mode
if exclude_ambiguous:
kwargs["exclude_ambiguous"] = True
if min_uppercase is not None:
kwargs["min_uppercase"] = min_uppercase
if min_lowercase is not None:
kwargs["min_lowercase"] = min_lowercase
if min_digits is not None:
kwargs["min_digits"] = min_digits
if min_special is not None:
kwargs["min_special"] = min_special
password = service.generate_password(length, **kwargs)
typer.echo(password)
@util_app.command("verify-checksum")
def verify_checksum(ctx: typer.Context) -> None:
"""Verify the SeedPass script checksum."""
service = _get_util_service(ctx)
service.verify_checksum()
@util_app.command("update-checksum")
def update_checksum(ctx: typer.Context) -> None:
"""Regenerate the script checksum file."""
service = _get_util_service(ctx)
service.update_checksum()
@api_app.command("start")
def api_start(ctx: typer.Context, host: str = "127.0.0.1", port: int = 8000) -> None:
"""Start the SeedPass API server."""
token = api_module.start_server(ctx.obj.get("fingerprint"))
typer.echo(f"API token: {token}")
uvicorn.run(api_module.app, host=host, port=port)
@api_app.command("stop")
def api_stop(ctx: typer.Context, host: str = "127.0.0.1", port: int = 8000) -> None:
"""Stop the SeedPass API server."""
import requests
try:
requests.post(
f"http://{host}:{port}/api/v1/shutdown",
headers={"Authorization": f"Bearer {api_module._token}"},
timeout=2,
)
except Exception as exc: # pragma: no cover - best effort
typer.echo(f"Failed to stop server: {exc}")
@app.command()
def gui() -> None:
"""Launch the BeeWare GUI.
If the platform specific backend is missing, attempt to install it and
retry launching the GUI.
"""
if not _gui_backend_available():
if sys.platform.startswith("linux"):
pkg = "toga-gtk"
elif sys.platform == "win32":
pkg = "toga-winforms"
elif sys.platform == "darwin":
pkg = "toga-cocoa"
else:
typer.echo(
f"Unsupported platform '{sys.platform}' for BeeWare GUI.",
err=True,
)
raise typer.Exit(1)
typer.echo(f"Attempting to install {pkg} for GUI support...")
try:
subprocess.check_call([sys.executable, "-m", "pip", "install", pkg])
typer.echo(f"Successfully installed {pkg}.")
except subprocess.CalledProcessError as exc:
typer.echo(f"Failed to install {pkg}: {exc}", err=True)
raise typer.Exit(1)
if not _gui_backend_available():
typer.echo(
"BeeWare GUI backend still unavailable after installation attempt.",
err=True,
)
raise typer.Exit(1)
from seedpass_gui.app import main
main()
if __name__ == "__main__":
app()

View File

@@ -1,186 +0,0 @@
from __future__ import annotations
import importlib
import importlib.util
import subprocess
import sys
from typing import Optional
import typer
from .common import _get_services
from seedpass.core.errors import SeedPassError
app = typer.Typer(
help="SeedPass command line interface",
invoke_without_command=True,
)
# Global option shared across all commands
fingerprint_option = typer.Option(
None,
"--fingerprint",
"-f",
help="Specify which seed profile to use",
)
no_clipboard_option = typer.Option(
False,
"--no-clipboard",
help="Disable clipboard support and print secrets instead",
is_flag=True,
)
deterministic_totp_option = typer.Option(
False,
"--deterministic-totp",
help="Derive TOTP secrets deterministically",
is_flag=True,
)
# Sub command groups
from . import entry, vault, nostr, config, fingerprint, util, api
app.add_typer(entry.app, name="entry")
app.add_typer(vault.app, name="vault")
app.add_typer(nostr.app, name="nostr")
app.add_typer(config.app, name="config")
app.add_typer(fingerprint.app, name="fingerprint")
app.add_typer(util.app, name="util")
app.add_typer(api.app, name="api")
def run() -> None:
"""Invoke the CLI, handling SeedPass errors gracefully."""
try:
app()
except SeedPassError as exc:
typer.echo(str(exc), err=True)
raise typer.Exit(1) from exc
def _gui_backend_available() -> bool:
"""Return True if a platform-specific BeeWare backend is installed."""
for pkg in ("toga_gtk", "toga_winforms", "toga_cocoa"):
if importlib.util.find_spec(pkg) is not None:
return True
return False
@app.callback(invoke_without_command=True)
def main(
ctx: typer.Context,
fingerprint: Optional[str] = fingerprint_option,
no_clipboard: bool = no_clipboard_option,
deterministic_totp: bool = deterministic_totp_option,
) -> None:
"""SeedPass CLI entry point.
When called without a subcommand this launches the interactive TUI.
"""
ctx.obj = {
"fingerprint": fingerprint,
"no_clipboard": no_clipboard,
"deterministic_totp": deterministic_totp,
}
if ctx.invoked_subcommand is None:
tui = importlib.import_module("main")
raise typer.Exit(tui.main(fingerprint=fingerprint))
@app.command("lock")
def root_lock(ctx: typer.Context) -> None:
"""Lock the vault for the active profile."""
vault_service, _profile, _sync = _get_services(ctx)
vault_service.lock()
typer.echo("locked")
@app.command()
def gui(
install: bool = typer.Option(
False,
"--install",
help="Attempt to install the BeeWare GUI backend if missing",
)
) -> None:
"""Launch the BeeWare GUI.
If a platform specific backend is missing, inform the user how to
install it. Using ``--install`` will attempt installation after
confirmation.
"""
if not _gui_backend_available():
if sys.platform.startswith("linux"):
pkg = "toga-gtk"
version = "0.5.2"
sha256 = "15b346ac1a2584de5effe5e73a3888f055c68c93300aeb111db9d64186b31646"
elif sys.platform == "win32":
pkg = "toga-winforms"
version = "0.5.2"
sha256 = "83181309f204bcc4a34709d23fdfd68467ae8ecc39c906d13c661cb9a0ef581b"
elif sys.platform == "darwin":
pkg = "toga-cocoa"
version = "0.5.2"
sha256 = "a4d5d1546bf92372a6fb1b450164735fb107b2ee69d15bf87421fec3c78465f9"
else:
typer.echo(
f"Unsupported platform '{sys.platform}' for BeeWare GUI.",
err=True,
)
raise typer.Exit(1)
if not install:
typer.echo(
f"BeeWare GUI backend not found. Please install {pkg} manually or rerun "
"with '--install'.",
err=True,
)
raise typer.Exit(1)
if not typer.confirm(
f"Install {pkg}=={version} with hash verification?", default=False
):
typer.echo("Installation cancelled.", err=True)
raise typer.Exit(1)
typer.echo(
"SeedPass uses pinned versions and SHA256 hashes to verify the GUI backend "
"and protect against tampered packages."
)
try:
subprocess.check_call(
[
sys.executable,
"-m",
"pip",
"install",
"--require-hashes",
f"{pkg}=={version}",
f"--hash=sha256:{sha256}",
]
)
typer.echo(f"Successfully installed {pkg}=={version}.")
except subprocess.CalledProcessError as exc:
typer.echo(
"Secure installation failed. Please install the package manually "
f"from a trusted source. Details: {exc}",
err=True,
)
raise typer.Exit(1)
if not _gui_backend_available():
typer.echo(
"BeeWare GUI backend still unavailable after installation attempt.",
err=True,
)
raise typer.Exit(1)
from seedpass_gui.app import main
main()
if __name__ == "__main__": # pragma: no cover
run()

View File

@@ -1,38 +0,0 @@
from __future__ import annotations
import typer
import uvicorn
from .. import api as api_module
app = typer.Typer(help="Run the API server")
@app.command("start")
def api_start(ctx: typer.Context, host: str = "127.0.0.1", port: int = 8000) -> None:
"""Start the SeedPass API server."""
token = api_module.start_server(ctx.obj.get("fingerprint"))
typer.echo(
f"API token: {token}\nWARNING: Store this token securely; it cannot be recovered."
)
uvicorn.run(api_module.app, host=host, port=port)
@app.command("stop")
def api_stop(
token: str = typer.Option(..., help="API token"),
host: str = "127.0.0.1",
port: int = 8000,
) -> None:
"""Stop the SeedPass API server."""
import requests
try:
requests.post(
f"http://{host}:{port}/api/v1/shutdown",
headers={"Authorization": f"Bearer {token}"},
timeout=2,
)
except Exception as exc: # pragma: no cover - best effort
typer.echo(f"Failed to stop server: {exc}")

View File

@@ -1,63 +0,0 @@
from __future__ import annotations
import typer
from seedpass.core.manager import PasswordManager
from seedpass.core.entry_types import EntryType
from seedpass.core.api import (
VaultService,
ProfileService,
SyncService,
EntryService,
ConfigService,
UtilityService,
NostrService,
ChangePasswordRequest,
UnlockRequest,
BackupParentSeedRequest,
ProfileSwitchRequest,
ProfileRemoveRequest,
)
def _get_pm(ctx: typer.Context) -> PasswordManager:
"""Return a PasswordManager optionally selecting a fingerprint."""
fp = ctx.obj.get("fingerprint")
if fp is None:
pm = PasswordManager()
else:
pm = PasswordManager(fingerprint=fp)
if ctx.obj.get("no_clipboard"):
pm.secret_mode_enabled = False
if ctx.obj.get("deterministic_totp"):
pm.deterministic_totp = True
return pm
def _get_services(
ctx: typer.Context,
) -> tuple[VaultService, ProfileService, SyncService]:
"""Return service layer instances for the current context."""
pm = _get_pm(ctx)
return VaultService(pm), ProfileService(pm), SyncService(pm)
def _get_entry_service(ctx: typer.Context) -> EntryService:
pm = _get_pm(ctx)
return EntryService(pm)
def _get_config_service(ctx: typer.Context) -> ConfigService:
pm = _get_pm(ctx)
return ConfigService(pm)
def _get_util_service(ctx: typer.Context) -> UtilityService:
pm = _get_pm(ctx)
return UtilityService(pm)
def _get_nostr_service(ctx: typer.Context) -> NostrService:
pm = _get_pm(ctx)
return NostrService(pm)

View File

@@ -1,125 +0,0 @@
from __future__ import annotations
import typer
from .common import _get_config_service
app = typer.Typer(help="Get or set configuration values")
@app.command("get")
def config_get(ctx: typer.Context, key: str) -> None:
"""Get a configuration value."""
service = _get_config_service(ctx)
value = service.get(key)
if value is None:
typer.echo("Key not found")
else:
typer.echo(str(value))
@app.command("set")
def config_set(ctx: typer.Context, key: str, value: str) -> None:
"""Set a configuration value."""
service = _get_config_service(ctx)
try:
val = (
[r.strip() for r in value.split(",") if r.strip()]
if key == "relays"
else value
)
service.set(key, val)
except KeyError:
typer.echo("Unknown key")
raise typer.Exit(code=1)
except Exception as exc: # pragma: no cover - pass through errors
typer.echo(f"Error: {exc}")
raise typer.Exit(code=1)
typer.echo("Updated")
@app.command("toggle-secret-mode")
def config_toggle_secret_mode(ctx: typer.Context) -> None:
"""Interactively enable or disable secret mode.
When enabled, newly generated and retrieved passwords are copied to the
clipboard instead of printed to the screen.
"""
service = _get_config_service(ctx)
try:
enabled = service.get_secret_mode_enabled()
delay = service.get_clipboard_clear_delay()
except Exception as exc: # pragma: no cover - pass through errors
typer.echo(f"Error loading settings: {exc}")
raise typer.Exit(code=1)
typer.echo(f"Secret mode is currently {'ON' if enabled else 'OFF'}")
choice = (
typer.prompt(
"Enable secret mode? (y/n, blank to keep)", default="", show_default=False
)
.strip()
.lower()
)
if choice in ("y", "yes"):
enabled = True
elif choice in ("n", "no"):
enabled = False
inp = typer.prompt(
f"Clipboard clear delay in seconds [{delay}]", default="", show_default=False
).strip()
if inp:
try:
delay = int(inp)
if delay <= 0:
typer.echo("Delay must be positive")
raise typer.Exit(code=1)
except ValueError:
typer.echo("Invalid number")
raise typer.Exit(code=1)
try:
service.set_secret_mode(enabled, delay)
except Exception as exc: # pragma: no cover - pass through errors
typer.echo(f"Error: {exc}")
raise typer.Exit(code=1)
status = "enabled" if enabled else "disabled"
typer.echo(f"Secret mode {status}.")
@app.command("toggle-offline")
def config_toggle_offline(ctx: typer.Context) -> None:
"""Enable or disable offline mode."""
service = _get_config_service(ctx)
try:
enabled = service.get_offline_mode()
except Exception as exc: # pragma: no cover - pass through errors
typer.echo(f"Error loading settings: {exc}")
raise typer.Exit(code=1)
typer.echo(f"Offline mode is currently {'ON' if enabled else 'OFF'}")
choice = (
typer.prompt(
"Enable offline mode? (y/n, blank to keep)", default="", show_default=False
)
.strip()
.lower()
)
if choice in ("y", "yes"):
enabled = True
elif choice in ("n", "no"):
enabled = False
try:
service.set_offline_mode(enabled)
except Exception as exc: # pragma: no cover - pass through errors
typer.echo(f"Error: {exc}")
raise typer.Exit(code=1)
status = "enabled" if enabled else "disabled"
typer.echo(f"Offline mode {status}.")

View File

@@ -1,368 +0,0 @@
from __future__ import annotations
import json
import sys
from pathlib import Path
from typing import List, Optional
import typer
import click
from .common import _get_entry_service, EntryType
from seedpass.core.entry_types import ALL_ENTRY_TYPES
from utils.clipboard import ClipboardUnavailableError
app = typer.Typer(help="Manage individual entries")
@app.command("list")
def entry_list(
ctx: typer.Context,
sort: str = typer.Option(
"index", "--sort", help="Sort by 'index', 'label', or 'updated'"
),
kind: Optional[str] = typer.Option(
None,
"--kind",
help="Filter by entry type",
click_type=click.Choice(ALL_ENTRY_TYPES),
),
archived: bool = typer.Option(False, "--archived", help="Include archived"),
) -> None:
"""List entries in the vault."""
service = _get_entry_service(ctx)
entries = service.list_entries(
sort_by=sort,
filter_kinds=[kind] if kind else None,
include_archived=archived,
)
for idx, label, username, url, is_archived in entries:
line = f"{idx}: {label}"
if username:
line += f" ({username})"
if url:
line += f" {url}"
if is_archived:
line += " [archived]"
typer.echo(line)
@app.command("search")
def entry_search(
ctx: typer.Context,
query: str,
kinds: List[str] = typer.Option(
None,
"--kind",
"-k",
help="Filter by entry kinds (can be repeated)",
click_type=click.Choice(ALL_ENTRY_TYPES),
),
) -> None:
"""Search entries."""
service = _get_entry_service(ctx)
kinds = list(kinds) if kinds else None
results = service.search_entries(query, kinds=kinds)
if not results:
typer.echo("No matching entries found")
return
for idx, label, username, url, _arch, etype in results:
line = f"{idx}: {etype.value.replace('_', ' ').title()} - {label}"
if username:
line += f" ({username})"
if url:
line += f" {url}"
typer.echo(line)
@app.command("get")
def entry_get(ctx: typer.Context, query: str) -> None:
"""Retrieve a single entry's secret."""
service = _get_entry_service(ctx)
try:
matches = service.search_entries(query)
if len(matches) == 0:
typer.echo("No matching entries found")
raise typer.Exit(code=1)
if len(matches) > 1:
typer.echo("Matches:")
for idx, label, username, _url, _arch, etype in matches:
name = f"{idx}: {etype.value.replace('_', ' ').title()} - {label}"
if username:
name += f" ({username})"
typer.echo(name)
raise typer.Exit(code=1)
index = matches[0][0]
entry = service.retrieve_entry(index)
etype = entry.get("type", entry.get("kind"))
if etype == EntryType.PASSWORD.value:
length = int(entry.get("length", 12))
password = service.generate_password(length, index)
typer.echo(password)
elif etype == EntryType.TOTP.value:
code = service.get_totp_code(index)
typer.echo(code)
else:
typer.echo("Unsupported entry type")
raise typer.Exit(code=1)
except ClipboardUnavailableError as exc:
typer.echo(
f"Clipboard unavailable: {exc}\n"
"Re-run with '--no-clipboard' to print secrets instead.",
err=True,
)
raise typer.Exit(code=1)
@app.command("add")
def entry_add(
ctx: typer.Context,
label: str,
length: int = typer.Option(12, "--length"),
username: Optional[str] = typer.Option(None, "--username"),
url: Optional[str] = typer.Option(None, "--url"),
no_special: bool = typer.Option(
False, "--no-special", help="Exclude special characters", is_flag=True
),
allowed_special_chars: Optional[str] = typer.Option(
None, "--allowed-special-chars", help="Explicit set of special characters"
),
special_mode: Optional[str] = typer.Option(
None,
"--special-mode",
help="Special character mode",
),
exclude_ambiguous: bool = typer.Option(
False,
"--exclude-ambiguous",
help="Exclude ambiguous characters",
is_flag=True,
),
min_uppercase: Optional[int] = typer.Option(None, "--min-uppercase"),
min_lowercase: Optional[int] = typer.Option(None, "--min-lowercase"),
min_digits: Optional[int] = typer.Option(None, "--min-digits"),
min_special: Optional[int] = typer.Option(None, "--min-special"),
) -> None:
"""Add a new password entry and output its index."""
service = _get_entry_service(ctx)
kwargs = {}
if no_special:
kwargs["include_special_chars"] = False
if allowed_special_chars is not None:
kwargs["allowed_special_chars"] = allowed_special_chars
if special_mode is not None:
kwargs["special_mode"] = special_mode
if exclude_ambiguous:
kwargs["exclude_ambiguous"] = True
if min_uppercase is not None:
kwargs["min_uppercase"] = min_uppercase
if min_lowercase is not None:
kwargs["min_lowercase"] = min_lowercase
if min_digits is not None:
kwargs["min_digits"] = min_digits
if min_special is not None:
kwargs["min_special"] = min_special
index = service.add_entry(label, length, username, url, **kwargs)
typer.echo(str(index))
@app.command("add-totp")
def entry_add_totp(
ctx: typer.Context,
label: str,
index: Optional[int] = typer.Option(None, "--index", help="Derivation index"),
secret: Optional[str] = typer.Option(None, "--secret", help="Import secret"),
period: int = typer.Option(30, "--period", help="TOTP period in seconds"),
digits: int = typer.Option(6, "--digits", help="Number of TOTP digits"),
deterministic_totp: bool = typer.Option(
False, "--deterministic-totp", help="Derive secret deterministically"
),
) -> None:
"""Add a TOTP entry and output the otpauth URI."""
service = _get_entry_service(ctx)
uri = service.add_totp(
label,
index=index,
secret=secret,
period=period,
digits=digits,
deterministic=deterministic_totp,
)
typer.echo(uri)
@app.command("add-ssh")
def entry_add_ssh(
ctx: typer.Context,
label: str,
index: Optional[int] = typer.Option(None, "--index", help="Derivation index"),
notes: str = typer.Option("", "--notes", help="Entry notes"),
) -> None:
"""Add an SSH key entry and output its index."""
service = _get_entry_service(ctx)
idx = service.add_ssh_key(
label,
index=index,
notes=notes,
)
typer.echo(str(idx))
@app.command("add-pgp")
def entry_add_pgp(
ctx: typer.Context,
label: str,
index: Optional[int] = typer.Option(None, "--index", help="Derivation index"),
key_type: str = typer.Option("ed25519", "--key-type", help="Key type"),
user_id: str = typer.Option("", "--user-id", help="User ID"),
notes: str = typer.Option("", "--notes", help="Entry notes"),
) -> None:
"""Add a PGP key entry and output its index."""
service = _get_entry_service(ctx)
idx = service.add_pgp_key(
label,
index=index,
key_type=key_type,
user_id=user_id,
notes=notes,
)
typer.echo(str(idx))
@app.command("add-nostr")
def entry_add_nostr(
ctx: typer.Context,
label: str,
index: Optional[int] = typer.Option(None, "--index", help="Derivation index"),
notes: str = typer.Option("", "--notes", help="Entry notes"),
) -> None:
"""Add a Nostr key entry and output its index."""
service = _get_entry_service(ctx)
idx = service.add_nostr_key(
label,
index=index,
notes=notes,
)
typer.echo(str(idx))
@app.command("add-seed")
def entry_add_seed(
ctx: typer.Context,
label: str,
index: Optional[int] = typer.Option(None, "--index", help="Derivation index"),
words: int = typer.Option(24, "--words", help="Word count"),
notes: str = typer.Option("", "--notes", help="Entry notes"),
) -> None:
"""Add a derived seed phrase entry and output its index."""
service = _get_entry_service(ctx)
idx = service.add_seed(
label,
index=index,
words=words,
notes=notes,
)
typer.echo(str(idx))
@app.command("add-key-value")
def entry_add_key_value(
ctx: typer.Context,
label: str,
key: str = typer.Option(..., "--key", help="Key name"),
value: str = typer.Option(..., "--value", help="Stored value"),
notes: str = typer.Option("", "--notes", help="Entry notes"),
) -> None:
"""Add a key/value entry and output its index."""
service = _get_entry_service(ctx)
idx = service.add_key_value(label, key, value, notes=notes)
typer.echo(str(idx))
@app.command("add-managed-account")
def entry_add_managed_account(
ctx: typer.Context,
label: str,
index: Optional[int] = typer.Option(None, "--index", help="Derivation index"),
notes: str = typer.Option("", "--notes", help="Entry notes"),
) -> None:
"""Add a managed account seed entry and output its index."""
service = _get_entry_service(ctx)
idx = service.add_managed_account(
label,
index=index,
notes=notes,
)
typer.echo(str(idx))
@app.command("modify")
def entry_modify(
ctx: typer.Context,
entry_id: int,
label: Optional[str] = typer.Option(None, "--label"),
username: Optional[str] = typer.Option(None, "--username"),
url: Optional[str] = typer.Option(None, "--url"),
notes: Optional[str] = typer.Option(None, "--notes"),
period: Optional[int] = typer.Option(
None, "--period", help="TOTP period in seconds"
),
digits: Optional[int] = typer.Option(None, "--digits", help="TOTP digits"),
key: Optional[str] = typer.Option(None, "--key", help="New key"),
value: Optional[str] = typer.Option(None, "--value", help="New value"),
) -> None:
"""Modify an existing entry."""
service = _get_entry_service(ctx)
try:
service.modify_entry(
entry_id,
username=username,
url=url,
notes=notes,
label=label,
period=period,
digits=digits,
key=key,
value=value,
)
except ValueError as e:
typer.echo(str(e))
sys.stdout.flush()
raise typer.Exit(code=1)
@app.command("archive")
def entry_archive(ctx: typer.Context, entry_id: int) -> None:
"""Archive an entry."""
service = _get_entry_service(ctx)
service.archive_entry(entry_id)
typer.echo(str(entry_id))
@app.command("unarchive")
def entry_unarchive(ctx: typer.Context, entry_id: int) -> None:
"""Restore an archived entry."""
service = _get_entry_service(ctx)
service.restore_entry(entry_id)
typer.echo(str(entry_id))
@app.command("totp-codes")
def entry_totp_codes(ctx: typer.Context) -> None:
"""Display all current TOTP codes."""
service = _get_entry_service(ctx)
service.display_totp_codes()
@app.command("export-totp")
def entry_export_totp(
ctx: typer.Context, file: str = typer.Option(..., help="Output file")
) -> None:
"""Export all TOTP secrets to a JSON file."""
service = _get_entry_service(ctx)
data = service.export_totp_entries()
Path(file).write_text(json.dumps(data, indent=2))
typer.echo(str(file))

View File

@@ -1,40 +0,0 @@
from __future__ import annotations
import typer
from .common import _get_services, ProfileRemoveRequest, ProfileSwitchRequest
app = typer.Typer(help="Manage seed profiles")
@app.command("list")
def fingerprint_list(ctx: typer.Context) -> None:
"""List available seed profiles."""
_vault, profile_service, _sync = _get_services(ctx)
for fp in profile_service.list_profiles():
typer.echo(fp)
@app.command("add")
def fingerprint_add(ctx: typer.Context) -> None:
"""Create a new seed profile."""
_vault, profile_service, _sync = _get_services(ctx)
profile_service.add_profile()
@app.command("remove")
def fingerprint_remove(ctx: typer.Context, fingerprint: str) -> None:
"""Remove a seed profile."""
_vault, profile_service, _sync = _get_services(ctx)
profile_service.remove_profile(ProfileRemoveRequest(fingerprint=fingerprint))
@app.command("switch")
def fingerprint_switch(ctx: typer.Context, fingerprint: str) -> None:
"""Switch to another seed profile."""
_vault, profile_service, _sync = _get_services(ctx)
password = typer.prompt("Master password", hide_input=True)
profile_service.switch_profile(
ProfileSwitchRequest(fingerprint=fingerprint, password=password)
)

View File

@@ -1,67 +0,0 @@
from __future__ import annotations
import typer
from .common import _get_services, _get_nostr_service
app = typer.Typer(
help="Interact with Nostr relays. See docs/nostr_setup.md for configuration and troubleshooting."
)
@app.command("sync")
def nostr_sync(ctx: typer.Context) -> None:
"""Sync with configured Nostr relays."""
_vault, _profile, sync_service = _get_services(ctx)
model = sync_service.sync()
if model:
typer.echo("Event IDs:")
typer.echo(f"- manifest: {model.manifest_id}")
for cid in model.chunk_ids:
typer.echo(f"- chunk: {cid}")
for did in model.delta_ids:
typer.echo(f"- delta: {did}")
else:
typer.echo("Error: Failed to sync vault")
@app.command("get-pubkey")
def nostr_get_pubkey(ctx: typer.Context) -> None:
"""Display the active profile's npub."""
service = _get_nostr_service(ctx)
npub = service.get_pubkey()
typer.echo(npub)
@app.command("list-relays")
def nostr_list_relays(ctx: typer.Context) -> None:
"""Display configured Nostr relays."""
service = _get_nostr_service(ctx)
relays = service.list_relays()
for i, r in enumerate(relays, 1):
typer.echo(f"{i}: {r}")
@app.command("add-relay")
def nostr_add_relay(ctx: typer.Context, url: str) -> None:
"""Add a relay URL."""
service = _get_nostr_service(ctx)
try:
service.add_relay(url)
except Exception as exc: # pragma: no cover - pass through errors
typer.echo(f"Error: {exc}")
raise typer.Exit(code=1)
typer.echo("Added")
@app.command("remove-relay")
def nostr_remove_relay(ctx: typer.Context, idx: int) -> None:
"""Remove a relay by index (1-based)."""
service = _get_nostr_service(ctx)
try:
service.remove_relay(idx)
except Exception as exc: # pragma: no cover - pass through errors
typer.echo(f"Error: {exc}")
raise typer.Exit(code=1)
typer.echo("Removed")

View File

@@ -1,74 +0,0 @@
from __future__ import annotations
from typing import Optional
import typer
from .common import _get_util_service
app = typer.Typer(help="Utility commands")
@app.command("generate-password")
def generate_password(
ctx: typer.Context,
length: int = 24,
no_special: bool = typer.Option(
False, "--no-special", help="Exclude special characters", is_flag=True
),
allowed_special_chars: Optional[str] = typer.Option(
None, "--allowed-special-chars", help="Explicit set of special characters"
),
special_mode: Optional[str] = typer.Option(
None,
"--special-mode",
help="Special character mode",
),
exclude_ambiguous: bool = typer.Option(
False,
"--exclude-ambiguous",
help="Exclude ambiguous characters",
is_flag=True,
),
min_uppercase: Optional[int] = typer.Option(None, "--min-uppercase"),
min_lowercase: Optional[int] = typer.Option(None, "--min-lowercase"),
min_digits: Optional[int] = typer.Option(None, "--min-digits"),
min_special: Optional[int] = typer.Option(None, "--min-special"),
) -> None:
"""Generate a strong password."""
service = _get_util_service(ctx)
kwargs = {}
if no_special:
kwargs["include_special_chars"] = False
if allowed_special_chars is not None:
kwargs["allowed_special_chars"] = allowed_special_chars
if special_mode is not None:
kwargs["special_mode"] = special_mode
if exclude_ambiguous:
kwargs["exclude_ambiguous"] = True
if min_uppercase is not None:
kwargs["min_uppercase"] = min_uppercase
if min_lowercase is not None:
kwargs["min_lowercase"] = min_lowercase
if min_digits is not None:
kwargs["min_digits"] = min_digits
if min_special is not None:
kwargs["min_special"] = min_special
password = service.generate_password(length, **kwargs)
typer.echo(password)
@app.command("verify-checksum")
def verify_checksum(ctx: typer.Context) -> None:
"""Verify the SeedPass script checksum."""
service = _get_util_service(ctx)
service.verify_checksum()
@app.command("update-checksum")
def update_checksum(ctx: typer.Context) -> None:
"""Regenerate the script checksum file."""
service = _get_util_service(ctx)
service.update_checksum()

View File

@@ -1,99 +0,0 @@
from __future__ import annotations
import json
from pathlib import Path
from typing import Optional
import typer
from .common import (
_get_services,
ChangePasswordRequest,
UnlockRequest,
BackupParentSeedRequest,
)
app = typer.Typer(help="Manage the entire vault")
@app.command("export")
def vault_export(
ctx: typer.Context, file: str = typer.Option(..., help="Output file")
) -> None:
"""Export the vault profile to an encrypted file."""
vault_service, _profile, _sync = _get_services(ctx)
data = vault_service.export_profile()
Path(file).write_bytes(data)
typer.echo(str(file))
@app.command("import")
def vault_import(
ctx: typer.Context, file: str = typer.Option(..., help="Input file")
) -> None:
"""Import a vault profile from an encrypted file."""
vault_service, _profile, _sync = _get_services(ctx)
data = Path(file).read_bytes()
vault_service.import_profile(data)
typer.echo(str(file))
@app.command("change-password")
def vault_change_password(ctx: typer.Context) -> None:
"""Change the master password used for encryption."""
vault_service, _profile, _sync = _get_services(ctx)
old_pw = typer.prompt("Current password", hide_input=True)
new_pw = typer.prompt("New password", hide_input=True, confirmation_prompt=True)
try:
vault_service.change_password(
ChangePasswordRequest(old_password=old_pw, new_password=new_pw)
)
except Exception as exc: # pragma: no cover - pass through errors
typer.echo(f"Error: {exc}")
raise typer.Exit(code=1)
typer.echo("Password updated")
@app.command("unlock")
def vault_unlock(ctx: typer.Context) -> None:
"""Unlock the vault for the active profile."""
vault_service, _profile, _sync = _get_services(ctx)
password = typer.prompt("Master password", hide_input=True)
try:
resp = vault_service.unlock(UnlockRequest(password=password))
except Exception as exc: # pragma: no cover - pass through errors
typer.echo(f"Error: {exc}")
raise typer.Exit(code=1)
typer.echo(f"Unlocked in {resp.duration:.2f}s")
@app.command("lock")
def vault_lock(ctx: typer.Context) -> None:
"""Lock the vault and clear sensitive data from memory."""
vault_service, _profile, _sync = _get_services(ctx)
vault_service.lock()
typer.echo("locked")
@app.command("stats")
def vault_stats(ctx: typer.Context) -> None:
"""Display statistics about the current seed profile."""
vault_service, _profile, _sync = _get_services(ctx)
stats = vault_service.stats()
typer.echo(json.dumps(stats, indent=2))
@app.command("reveal-parent-seed")
def vault_reveal_parent_seed(
ctx: typer.Context,
file: Optional[str] = typer.Option(
None, "--file", help="Save encrypted seed to this path"
),
) -> None:
"""Display the parent seed and optionally write an encrypted backup file."""
vault_service, _profile, _sync = _get_services(ctx)
password = typer.prompt("Master password", hide_input=True)
vault_service.backup_parent_seed(
BackupParentSeedRequest(path=Path(file) if file else None, password=password)
)

View File

@@ -148,9 +148,7 @@ class VaultService:
"""Restore a profile from ``data`` and sync."""
with self._lock:
decrypted = self._manager.vault.encryption_manager.decrypt_data(
data, context="profile"
)
decrypted = self._manager.vault.encryption_manager.decrypt_data(data)
index = json.loads(decrypted.decode("utf-8"))
self._manager.vault.save_index(index)
self._manager.sync_vault()
@@ -265,13 +263,13 @@ class EntryService:
def list_entries(
self,
sort_by: str = "index",
filter_kinds: list[str] | None = None,
filter_kind: str | None = None,
include_archived: bool = False,
):
with self._lock:
return self._manager.entry_manager.list_entries(
sort_by=sort_by,
filter_kinds=filter_kinds,
filter_kind=filter_kind,
include_archived=include_archived,
)
@@ -305,10 +303,9 @@ class EntryService:
def get_totp_code(self, entry_id: int) -> str:
with self._lock:
key = getattr(self._manager, "KEY_TOTP_DET", None) or getattr(
self._manager, "parent_seed", None
return self._manager.entry_manager.get_totp_code(
entry_id, self._manager.parent_seed
)
return self._manager.entry_manager.get_totp_code(entry_id, key)
def add_entry(
self,
@@ -363,18 +360,15 @@ class EntryService:
secret: str | None = None,
period: int = 30,
digits: int = 6,
deterministic: bool = False,
) -> str:
with self._lock:
key = self._manager.KEY_TOTP_DET if deterministic else None
uri = self._manager.entry_manager.add_totp(
label,
key,
self._manager.parent_seed,
index=index,
secret=secret,
period=period,
digits=digits,
deterministic=deterministic,
)
self._manager.start_background_vault_sync()
return uri
@@ -519,10 +513,9 @@ class EntryService:
def export_totp_entries(self) -> dict:
with self._lock:
key = getattr(self._manager, "KEY_TOTP_DET", None) or getattr(
self._manager, "parent_seed", None
return self._manager.entry_manager.export_totp_entries(
self._manager.parent_seed
)
return self._manager.entry_manager.export_totp_entries(key)
def display_totp_codes(self) -> None:
with self._lock:

View File

@@ -15,6 +15,7 @@ import logging
import os
import shutil
import time
import traceback
from pathlib import Path
from termcolor import colored
@@ -145,28 +146,6 @@ class BackupManager:
)
)
def restore_from_backup(self, backup_path: str) -> None:
"""Restore the index file from a user-specified backup path."""
try:
src = Path(backup_path)
if not src.exists():
logger.error(f"Backup file '{src}' does not exist.")
print(colored(f"Error: Backup file '{src}' does not exist.", "red"))
return
shutil.copy2(src, self.index_file)
os.chmod(self.index_file, 0o600)
logger.info(f"Index file restored from backup '{src}'.")
print(colored(f"[+] Index file restored from backup '{src}'.", "green"))
except Exception as e:
logger.error(
f"Failed to restore from backup '{backup_path}': {e}", exc_info=True
)
print(
colored(
f"Error: Failed to restore from backup '{backup_path}': {e}", "red"
)
)
def list_backups(self) -> None:
try:
backup_files = sorted(

View File

@@ -41,18 +41,17 @@ class ConfigManager:
logger.info("Config file not found; returning defaults")
return {
"relays": list(DEFAULT_NOSTR_RELAYS),
"offline_mode": True,
"offline_mode": False,
"pin_hash": "",
"password_hash": "",
"inactivity_timeout": INACTIVITY_TIMEOUT,
"kdf_iterations": 50_000,
"kdf_mode": "pbkdf2",
"argon2_time_cost": 2,
"additional_backup_path": "",
"backup_interval": 0,
"secret_mode_enabled": False,
"clipboard_clear_delay": 45,
"quick_unlock_enabled": False,
"quick_unlock": False,
"nostr_max_retries": MAX_RETRIES,
"nostr_retry_delay": float(RETRY_DELAY),
"min_uppercase": 2,
@@ -71,18 +70,17 @@ class ConfigManager:
raise ValueError("Config data must be a dictionary")
# Ensure defaults for missing keys
data.setdefault("relays", list(DEFAULT_NOSTR_RELAYS))
data.setdefault("offline_mode", True)
data.setdefault("offline_mode", False)
data.setdefault("pin_hash", "")
data.setdefault("password_hash", "")
data.setdefault("inactivity_timeout", INACTIVITY_TIMEOUT)
data.setdefault("kdf_iterations", 50_000)
data.setdefault("kdf_mode", "pbkdf2")
data.setdefault("argon2_time_cost", 2)
data.setdefault("additional_backup_path", "")
data.setdefault("backup_interval", 0)
data.setdefault("secret_mode_enabled", False)
data.setdefault("clipboard_clear_delay", 45)
data.setdefault("quick_unlock_enabled", data.get("quick_unlock", False))
data.setdefault("quick_unlock", False)
data.setdefault("nostr_max_retries", MAX_RETRIES)
data.setdefault("nostr_retry_delay", float(RETRY_DELAY))
data.setdefault("min_uppercase", 2)
@@ -198,19 +196,6 @@ class ConfigManager:
config = self.load_config(require_pin=False)
return config.get("kdf_mode", "pbkdf2")
def set_argon2_time_cost(self, time_cost: int) -> None:
"""Persist the Argon2 ``time_cost`` parameter."""
if time_cost <= 0:
raise ValueError("time_cost must be positive")
config = self.load_config(require_pin=False)
config["argon2_time_cost"] = int(time_cost)
self.save_config(config)
def get_argon2_time_cost(self) -> int:
"""Retrieve the Argon2 ``time_cost`` setting."""
config = self.load_config(require_pin=False)
return int(config.get("argon2_time_cost", 2))
def set_additional_backup_path(self, path: Optional[str]) -> None:
"""Persist an optional additional backup path in the config."""
config = self.load_config(require_pin=False)
@@ -243,7 +228,7 @@ class ConfigManager:
def get_offline_mode(self) -> bool:
"""Retrieve the offline mode setting."""
config = self.load_config(require_pin=False)
return bool(config.get("offline_mode", True))
return bool(config.get("offline_mode", False))
def set_clipboard_clear_delay(self, delay: int) -> None:
"""Persist clipboard clear timeout in seconds."""
@@ -335,13 +320,13 @@ class ConfigManager:
def set_quick_unlock(self, enabled: bool) -> None:
"""Persist the quick unlock toggle."""
cfg = self.load_config(require_pin=False)
cfg["quick_unlock_enabled"] = bool(enabled)
cfg["quick_unlock"] = bool(enabled)
self.save_config(cfg)
def get_quick_unlock(self) -> bool:
"""Retrieve whether quick unlock is enabled."""
cfg = self.load_config(require_pin=False)
return bool(cfg.get("quick_unlock_enabled", False))
return bool(cfg.get("quick_unlock", False))
def set_nostr_max_retries(self, retries: int) -> None:
"""Persist the maximum number of Nostr retry attempts."""

View File

@@ -1,7 +1,7 @@
# /src/seedpass.core/encryption.py
import logging
import unicodedata
import traceback
try:
import orjson as json_lib # type: ignore
@@ -16,43 +16,19 @@ except Exception: # pragma: no cover - fallback for environments without orjson
import hashlib
import os
import base64
import zlib
from dataclasses import asdict
from pathlib import Path
from typing import Optional, Tuple
from typing import Optional
from cryptography.hazmat.primitives.ciphers.aead import AESGCM
from cryptography.exceptions import InvalidTag
from cryptography.fernet import Fernet, InvalidToken
from termcolor import colored
from utils.file_lock import exclusive_lock
from mnemonic import Mnemonic
from utils.password_prompt import prompt_existing_password
from utils.key_derivation import KdfConfig, CURRENT_KDF_VERSION
from .errors import DecryptionError
# Instantiate the logger
logger = logging.getLogger(__name__)
def _derive_legacy_key_from_password(password: str, iterations: int = 100_000) -> bytes:
"""Derive legacy Fernet key using password only (no fingerprint)."""
normalized = unicodedata.normalize("NFKD", password).strip().encode("utf-8")
key = hashlib.pbkdf2_hmac("sha256", normalized, b"", iterations, dklen=32)
return base64.urlsafe_b64encode(key)
class LegacyFormatRequiresMigrationError(Exception):
"""Raised when legacy-encrypted data needs user-guided migration."""
def __init__(self, context: Optional[str] = None) -> None:
msg = (
f"Legacy data detected for {context}" if context else "Legacy data detected"
)
super().__init__(msg)
self.context = context
class EncryptionManager:
"""
Manages encryption and decryption, handling migration from legacy Fernet
@@ -90,94 +66,46 @@ class EncryptionManager:
)
raise
# Track user preference for handling legacy indexes
self._legacy_migrate_flag = True
self.last_migration_performed = False
# Track nonces to detect accidental reuse
self.nonce_crc_table: set[int] = set()
def encrypt_data(self, data: bytes) -> bytes:
"""
Encrypt data using AES-GCM, emitting ``b"V3|" + nonce + ciphertext + tag``.
A fresh 96-bit nonce is generated for each call and tracked via a CRC
table to detect accidental reuse during batch operations.
(2) Encrypts data using the NEW AES-GCM format, prepending a version
header and the nonce. All new data will be in this format.
"""
try:
nonce = os.urandom(12) # 96-bit nonce is recommended for AES-GCM
crc = zlib.crc32(nonce)
if crc in self.nonce_crc_table:
raise ValueError("Nonce reuse detected")
self.nonce_crc_table.add(crc)
ciphertext = self.cipher.encrypt(nonce, data, None)
return b"V3|" + nonce + ciphertext
return b"V2:" + nonce + ciphertext
except Exception as e:
logger.error(f"Failed to encrypt data: {e}", exc_info=True)
raise
def decrypt_data(
self, encrypted_data: bytes, context: Optional[str] = None
) -> bytes:
"""Decrypt ``encrypted_data`` handling legacy fallbacks.
Parameters
----------
encrypted_data:
The bytes to decrypt.
context:
Optional string describing what is being decrypted ("seed", "index", etc.)
for clearer error messages.
def decrypt_data(self, encrypted_data: bytes) -> bytes:
"""
ctx = f" {context}" if context else ""
try:
# Try the new V3 format first
if encrypted_data.startswith(b"V3|"):
(3) The core migration logic. Tries the new format first, then falls back
to the old one. This is the ONLY place decryption logic should live.
"""
# Try the new V2 format first
if encrypted_data.startswith(b"V2:"):
try:
nonce = encrypted_data[3:15]
ciphertext = encrypted_data[15:]
if len(ciphertext) < 16:
logger.error("AES-GCM payload too short")
raise InvalidToken("AES-GCM payload too short")
return self.cipher.decrypt(nonce, ciphertext, None)
except InvalidTag as e:
logger.error("AES-GCM decryption failed: Invalid authentication tag.")
try:
nonce = encrypted_data[3:15]
ciphertext = encrypted_data[15:]
if len(ciphertext) < 16:
logger.error("AES-GCM payload too short")
raise DecryptionError(
f"Failed to decrypt{ctx}: AES-GCM payload too short"
)
return self.cipher.decrypt(nonce, ciphertext, None)
except InvalidTag as e:
logger.error(f"Failed to decrypt{ctx}: invalid key or corrupt file")
raise DecryptionError(
f"Failed to decrypt{ctx}: invalid key or corrupt file"
) from e
# Next try the older V2 format
if encrypted_data.startswith(b"V2:"):
try:
nonce = encrypted_data[3:15]
ciphertext = encrypted_data[15:]
if len(ciphertext) < 16:
logger.error("AES-GCM payload too short")
raise DecryptionError(
f"Failed to decrypt{ctx}: AES-GCM payload too short"
)
return self.cipher.decrypt(nonce, ciphertext, None)
except InvalidTag as e:
logger.debug(
"AES-GCM decryption failed: Invalid authentication tag."
result = self.fernet.decrypt(encrypted_data[3:])
logger.warning(
"Legacy-format file had incorrect 'V2:' header; decrypted with Fernet"
)
try:
result = self.fernet.decrypt(encrypted_data[3:])
logger.warning(
"Legacy-format file had incorrect 'V2:' header; decrypted with Fernet"
)
return result
except InvalidToken:
logger.error(
f"Failed to decrypt{ctx}: invalid key or corrupt file"
)
raise DecryptionError(
f"Failed to decrypt{ctx}: invalid key or corrupt file"
) from e
return result
except InvalidToken:
raise InvalidToken("AES-GCM decryption failed.") from e
# If it's neither V3 nor V2, assume legacy Fernet format
# If it's not V2, it must be the legacy Fernet format
else:
logger.warning("Data is in legacy Fernet format. Attempting migration.")
try:
return self.fernet.decrypt(encrypted_data)
@@ -185,262 +113,109 @@ class EncryptionManager:
logger.error(
"Legacy Fernet decryption failed. Vault may be corrupt or key is incorrect."
)
raise DecryptionError(
f"Failed to decrypt{ctx}: invalid key or corrupt file"
raise InvalidToken(
"Could not decrypt data with any available method."
) from e
except DecryptionError as e:
if (
encrypted_data.startswith(b"V3|")
or encrypted_data.startswith(b"V2:")
or not self._legacy_migrate_flag
):
raise
logger.debug(f"Could not decrypt data{ctx}: {e}")
raise LegacyFormatRequiresMigrationError(context) from e
except (InvalidToken, InvalidTag) as e: # pragma: no cover - safety net
raise DecryptionError(
f"Failed to decrypt{ctx}: invalid key or corrupt file"
) from e
def decrypt_legacy(
self, encrypted_data: bytes, password: str, context: Optional[str] = None
) -> bytes:
"""Decrypt ``encrypted_data`` using legacy password-only key derivation."""
ctx = f" {context}" if context else ""
last_exc: Optional[Exception] = None
for iter_count in [50_000, 100_000]:
try:
legacy_key = _derive_legacy_key_from_password(
password, iterations=iter_count
)
legacy_mgr = EncryptionManager(legacy_key, self.fingerprint_dir)
legacy_mgr._legacy_migrate_flag = False
result = legacy_mgr.decrypt_data(encrypted_data, context=context)
try: # record iteration count for future runs
from .vault import Vault
from .config_manager import ConfigManager
cfg_mgr = ConfigManager(
Vault(self, self.fingerprint_dir), self.fingerprint_dir
)
cfg_mgr.set_kdf_iterations(iter_count)
except Exception: # pragma: no cover - best effort
logger.error(
"Failed to record PBKDF2 iteration count in config",
exc_info=True,
)
logger.warning(
"Data decrypted using legacy password-only key derivation."
)
return result
except Exception as e2: # pragma: no cover - try next iteration
last_exc = e2
logger.error(f"Failed legacy decryption attempt: {last_exc}", exc_info=True)
raise DecryptionError(
f"Failed to decrypt{ctx}: invalid key or corrupt file"
) from last_exc
# --- All functions below this point now use the smart `decrypt_data` method ---
def resolve_relative_path(self, relative_path: Path) -> Path:
"""Resolve ``relative_path`` within ``fingerprint_dir`` and validate it.
Parameters
----------
relative_path:
The user-supplied path relative to ``fingerprint_dir``.
Returns
-------
Path
The normalized absolute path inside ``fingerprint_dir``.
Raises
------
ValueError
If the resulting path is absolute or escapes ``fingerprint_dir``.
"""
candidate = (self.fingerprint_dir / relative_path).resolve()
if not candidate.is_relative_to(self.fingerprint_dir.resolve()):
raise ValueError("Invalid path outside fingerprint directory")
return candidate
def encrypt_parent_seed(
self, parent_seed: str, kdf: Optional[KdfConfig] = None
) -> None:
def encrypt_parent_seed(self, parent_seed: str) -> None:
"""Encrypts and saves the parent seed to 'parent_seed.enc'."""
data = parent_seed.encode("utf-8")
self.encrypt_and_save_file(data, self.parent_seed_file, kdf=kdf)
encrypted_data = self.encrypt_data(data) # This now creates V2 format
with exclusive_lock(self.parent_seed_file) as fh:
fh.seek(0)
fh.truncate()
fh.write(encrypted_data)
os.chmod(self.parent_seed_file, 0o600)
logger.info(f"Parent seed encrypted and saved to '{self.parent_seed_file}'.")
def decrypt_parent_seed(self) -> str:
"""Decrypts and returns the parent seed, handling migration."""
with exclusive_lock(self.parent_seed_file) as fh:
fh.seek(0)
blob = fh.read()
encrypted_data = fh.read()
kdf, encrypted_data = self._deserialize(blob)
is_legacy = not (
encrypted_data.startswith(b"V3|") or encrypted_data.startswith(b"V2:")
)
decrypted_data = self.decrypt_data(encrypted_data, context="seed")
is_legacy = not encrypted_data.startswith(b"V2:")
decrypted_data = self.decrypt_data(encrypted_data)
if is_legacy:
logger.info("Parent seed was in legacy format. Re-encrypting to V3 format.")
self.encrypt_parent_seed(decrypted_data.decode("utf-8").strip(), kdf=kdf)
logger.info("Parent seed was in legacy format. Re-encrypting to V2 format.")
self.encrypt_parent_seed(decrypted_data.decode("utf-8").strip())
return decrypted_data.decode("utf-8").strip()
def _serialize(self, kdf: KdfConfig, ciphertext: bytes) -> bytes:
payload = {"kdf": asdict(kdf), "ct": base64.b64encode(ciphertext).decode()}
if USE_ORJSON:
return json_lib.dumps(payload)
return json_lib.dumps(payload, separators=(",", ":")).encode("utf-8")
def _deserialize(self, blob: bytes) -> Tuple[KdfConfig, bytes]:
"""Return ``(KdfConfig, ciphertext)`` from serialized *blob*.
Legacy files stored the raw ciphertext without a JSON wrapper. If
decoding the wrapper fails, treat ``blob`` as the ciphertext and return
a default HKDF configuration.
"""
try:
if USE_ORJSON:
obj = json_lib.loads(blob)
else:
obj = json_lib.loads(blob.decode("utf-8"))
kdf = KdfConfig(**obj.get("kdf", {}))
ct_b64 = obj.get("ct", "")
ciphertext = base64.b64decode(ct_b64)
if ciphertext:
return kdf, ciphertext
except Exception: # pragma: no cover - fall back to legacy path
pass
# Legacy format: ``blob`` already contains the ciphertext
return (
KdfConfig(name="hkdf", version=CURRENT_KDF_VERSION, params={}, salt_b64=""),
blob,
)
def encrypt_and_save_file(
self, data: bytes, relative_path: Path, *, kdf: Optional[KdfConfig] = None
) -> None:
if kdf is None:
kdf = KdfConfig()
file_path = self.resolve_relative_path(relative_path)
def encrypt_and_save_file(self, data: bytes, relative_path: Path) -> None:
file_path = self.fingerprint_dir / relative_path
file_path.parent.mkdir(parents=True, exist_ok=True)
encrypted_data = self.encrypt_data(data)
payload = self._serialize(kdf, encrypted_data)
with exclusive_lock(file_path) as fh:
fh.seek(0)
fh.truncate()
fh.write(payload)
fh.write(encrypted_data)
fh.flush()
os.fsync(fh.fileno())
os.chmod(file_path, 0o600)
def decrypt_file(self, relative_path: Path) -> bytes:
file_path = self.resolve_relative_path(relative_path)
file_path = self.fingerprint_dir / relative_path
with exclusive_lock(file_path) as fh:
fh.seek(0)
blob = fh.read()
_, encrypted_data = self._deserialize(blob)
return self.decrypt_data(encrypted_data, context=str(relative_path))
encrypted_data = fh.read()
return self.decrypt_data(encrypted_data)
def get_file_kdf(self, relative_path: Path) -> KdfConfig:
file_path = self.resolve_relative_path(relative_path)
with exclusive_lock(file_path) as fh:
fh.seek(0)
blob = fh.read()
kdf, _ = self._deserialize(blob)
return kdf
def save_json_data(
self,
data: dict,
relative_path: Optional[Path] = None,
*,
kdf: Optional[KdfConfig] = None,
) -> None:
def save_json_data(self, data: dict, relative_path: Optional[Path] = None) -> None:
if relative_path is None:
relative_path = Path("seedpass_entries_db.json.enc")
if USE_ORJSON:
json_data = json_lib.dumps(data)
else:
json_data = json_lib.dumps(data, separators=(",", ":")).encode("utf-8")
self.encrypt_and_save_file(json_data, relative_path, kdf=kdf)
self.encrypt_and_save_file(json_data, relative_path)
logger.debug(f"JSON data encrypted and saved to '{relative_path}'.")
def load_json_data(
self, relative_path: Optional[Path] = None, *, return_kdf: bool = False
) -> dict | Tuple[dict, KdfConfig]:
def load_json_data(self, relative_path: Optional[Path] = None) -> dict:
"""
Loads and decrypts JSON data, automatically migrating and re-saving
if it's in the legacy format.
"""
if relative_path is None:
relative_path = Path("seedpass_entries_db.json.enc")
file_path = self.resolve_relative_path(relative_path)
file_path = self.fingerprint_dir / relative_path
if not file_path.exists():
empty: dict = {"entries": {}}
if return_kdf:
return empty, KdfConfig(
name="hkdf", version=CURRENT_KDF_VERSION, params={}, salt_b64=""
)
return empty
return {"entries": {}}
with exclusive_lock(file_path) as fh:
fh.seek(0)
blob = fh.read()
encrypted_data = fh.read()
kdf, encrypted_data = self._deserialize(blob)
is_legacy = not (
encrypted_data.startswith(b"V3|") or encrypted_data.startswith(b"V2:")
)
self.last_migration_performed = False
is_legacy = not encrypted_data.startswith(b"V2:")
try:
decrypted_data = self.decrypt_data(
encrypted_data, context=str(relative_path)
)
decrypted_data = self.decrypt_data(encrypted_data)
if USE_ORJSON:
data = json_lib.loads(decrypted_data)
else:
data = json_lib.loads(decrypted_data.decode("utf-8"))
# If it was a legacy file, re-save it in the new format now
if is_legacy and self._legacy_migrate_flag:
if is_legacy:
logger.info(f"Migrating and re-saving legacy vault file: {file_path}")
self.save_json_data(data, relative_path, kdf=kdf)
self.save_json_data(data, relative_path)
self.update_checksum(relative_path)
self.last_migration_performed = True
if return_kdf:
return data, kdf
return data
except DecryptionError as e:
msg = f"Failed to decrypt or parse data from {file_path}: {e}"
logger.error(msg)
raise
except (InvalidToken, InvalidTag) as e: # pragma: no cover - legacy safety
msg = f"Failed to decrypt or parse data from {file_path}: {e}"
logger.error(msg)
raise DecryptionError(
f"Failed to decrypt {file_path}: invalid key or corrupt file"
) from e
except JSONDecodeError as e:
msg = f"Failed to parse JSON data from {file_path}: {e}"
logger.error(msg)
except (InvalidToken, InvalidTag, JSONDecodeError) as e:
logger.error(
f"FATAL: Could not decrypt or parse data from {file_path}: {e}",
exc_info=True,
)
raise
def get_encrypted_index(self) -> Optional[bytes]:
relative_path = Path("seedpass_entries_db.json.enc")
file_path = self.resolve_relative_path(relative_path)
file_path = self.fingerprint_dir / relative_path
if not file_path.exists():
return None
with exclusive_lock(file_path) as fh:
@@ -469,18 +244,13 @@ class EncryptionManager:
"""
if relative_path is None:
relative_path = Path("seedpass_entries_db.json.enc")
kdf, ciphertext = self._deserialize(encrypted_data)
is_legacy = not (ciphertext.startswith(b"V3|") or ciphertext.startswith(b"V2:"))
self.last_migration_performed = False
def _process(decrypted: bytes) -> dict:
try:
decrypted_data = self.decrypt_data(encrypted_data)
if USE_ORJSON:
data = json_lib.loads(decrypted)
data = json_lib.loads(decrypted_data)
else:
data = json_lib.loads(decrypted.decode("utf-8"))
existing_file = self.resolve_relative_path(relative_path)
if merge and existing_file.exists():
data = json_lib.loads(decrypted_data.decode("utf-8"))
if merge and (self.fingerprint_dir / relative_path).exists():
current = self.load_json_data(relative_path)
current_entries = current.get("entries", {})
for idx, entry in data.get("entries", {}).items():
@@ -494,53 +264,11 @@ class EncryptionManager:
current.get("schema_version", 0), data.get("schema_version", 0)
)
data = current
return data
try:
decrypted_data = self.decrypt_data(ciphertext, context=str(relative_path))
data = _process(decrypted_data)
self.save_json_data(data, relative_path, kdf=kdf)
self.save_json_data(data, relative_path) # This always saves in V2 format
self.update_checksum(relative_path)
logger.info("Index file from Nostr was processed and saved successfully.")
self.last_migration_performed = is_legacy
print(colored("Index file updated from Nostr successfully.", "green"))
return True
except (DecryptionError, LegacyFormatRequiresMigrationError):
try:
password = prompt_existing_password(
"Enter your master password for legacy decryption: "
)
decrypted_data = self.decrypt_legacy(
ciphertext, password, context=str(relative_path)
)
data = _process(decrypted_data)
self.save_json_data(data, relative_path, kdf=kdf)
self.update_checksum(relative_path)
logger.warning(
"Index decrypted using legacy password-only key derivation."
)
print(
colored(
"Warning: index decrypted with legacy key; it will be re-encrypted.",
"yellow",
)
)
self.last_migration_performed = True
return True
except Exception as e2:
if strict:
logger.error(
f"Failed legacy decryption attempt: {e2}",
exc_info=True,
)
print(
colored(
f"Error: Failed to decrypt and save data from Nostr: {e2}",
"red",
)
)
raise
logger.warning(f"Failed to decrypt index from Nostr: {e2}")
return False
except Exception as e: # pragma: no cover - error handling
if strict:
logger.error(
@@ -561,7 +289,8 @@ class EncryptionManager:
"""Updates the checksum file for the specified file."""
if relative_path is None:
relative_path = Path("seedpass_entries_db.json.enc")
file_path = self.resolve_relative_path(relative_path)
file_path = self.fingerprint_dir / relative_path
if not file_path.exists():
return
@@ -570,22 +299,7 @@ class EncryptionManager:
fh.seek(0)
encrypted_bytes = fh.read()
checksum = hashlib.sha256(encrypted_bytes).hexdigest()
# Build checksum path by stripping both `.json` and `.enc`
checksum_base = file_path.with_suffix("").with_suffix("")
checksum_file = checksum_base.parent / f"{checksum_base.name}_checksum.txt"
# Remove legacy checksum file if present
legacy_checksum = file_path.parent / f"{file_path.stem}_checksum.txt"
if legacy_checksum != checksum_file and legacy_checksum.exists():
try:
legacy_checksum.unlink()
except Exception:
logger.warning(
f"Could not remove legacy checksum file '{legacy_checksum}'",
exc_info=True,
)
checksum_file = file_path.parent / f"{file_path.stem}_checksum.txt"
with exclusive_lock(checksum_file) as fh:
fh.seek(0)
fh.truncate()
@@ -600,21 +314,25 @@ class EncryptionManager:
)
raise
def validate_seed(self, seed_phrase: str) -> tuple[bool, Optional[str]]:
"""Validate a BIP-39 mnemonic.
Returns a tuple of ``(is_valid, error_message)`` where ``error_message``
is ``None`` when the mnemonic is valid.
"""
# ... validate_seed and derive_seed_from_mnemonic can remain the same ...
def validate_seed(self, seed_phrase: str) -> bool:
try:
if Mnemonic("english").check(seed_phrase):
logger.debug("Seed phrase validated successfully.")
return True, None
logger.error("Seed phrase failed BIP-39 validation.")
return False, "Invalid seed phrase."
words = seed_phrase.split()
if len(words) != 12:
logger.error("Seed phrase does not contain exactly 12 words.")
print(
colored(
"Error: Seed phrase must contain exactly 12 words.",
"red",
)
)
return False
logger.debug("Seed phrase validated successfully.")
return True
except Exception as e:
logger.error(f"Error validating seed phrase: {e}", exc_info=True)
return False, f"Failed to validate seed phrase: {e}"
logging.error(f"Error validating seed phrase: {e}", exc_info=True)
print(colored(f"Error: Failed to validate seed phrase: {e}", "red"))
return False
def derive_seed_from_mnemonic(self, mnemonic: str, passphrase: str = "") -> bytes:
try:

View File

@@ -25,6 +25,7 @@ except Exception: # pragma: no cover - fallback when orjson is missing
USE_ORJSON = False
import logging
import hashlib
import sys
import shutil
import time
from typing import Optional, Tuple, Dict, Any, List
@@ -32,11 +33,10 @@ from pathlib import Path
from termcolor import colored
from .migrations import LATEST_VERSION
from .entry_types import EntryType, ALL_ENTRY_TYPES
from .totp import TotpManager, random_totp_secret
from .entry_types import EntryType
from .totp import TotpManager
from utils.fingerprint import generate_fingerprint
from utils.checksum import canonical_json_dumps
from utils.atomic_write import atomic_write
from utils.key_validation import (
validate_totp_secret,
validate_ssh_key_pair,
@@ -47,7 +47,6 @@ from utils.key_validation import (
from .vault import Vault
from .backup import BackupManager
from .errors import SeedPassError
# Instantiate the logger
@@ -148,7 +147,7 @@ class EntryManager:
except Exception as e:
logger.error(f"Error determining next index: {e}", exc_info=True)
print(colored(f"Error determining next index: {e}", "red"))
raise SeedPassError(f"Error determining next index: {e}") from e
sys.exit(1)
def add_entry(
self,
@@ -222,9 +221,7 @@ class EntryManager:
data["entries"][str(index)] = entry
logger.debug(
f"Added entry at index {index} with label '{entry.get('label', '')}'."
)
logger.debug(f"Added entry at index {index}: {data['entries'][str(index)]}")
self._save_index(data)
self.update_checksum()
@@ -238,7 +235,7 @@ class EntryManager:
except Exception as e:
logger.error(f"Failed to add entry: {e}", exc_info=True)
print(colored(f"Error: Failed to add entry: {e}", "red"))
raise SeedPassError(f"Failed to add entry: {e}") from e
sys.exit(1)
def get_next_totp_index(self) -> int:
"""Return the next available derivation index for TOTP secrets."""
@@ -257,7 +254,7 @@ class EntryManager:
def add_totp(
self,
label: str,
parent_seed: str | bytes | None = None,
parent_seed: str,
*,
archived: bool = False,
secret: str | None = None,
@@ -266,16 +263,13 @@ class EntryManager:
digits: int = 6,
notes: str = "",
tags: list[str] | None = None,
deterministic: bool = False,
) -> str:
"""Add a new TOTP entry and return the provisioning URI."""
entry_id = self.get_next_index()
data = self._load_index()
data.setdefault("entries", {})
if deterministic:
if parent_seed is None:
raise ValueError("Seed required for deterministic TOTP")
if secret is None:
if index is None:
index = self.get_next_totp_index()
secret = TotpManager.derive_secret(parent_seed, index)
@@ -292,11 +286,8 @@ class EntryManager:
"archived": archived,
"notes": notes,
"tags": tags or [],
"deterministic": True,
}
else:
if secret is None:
secret = random_totp_secret()
if not validate_totp_secret(secret):
raise ValueError("Invalid TOTP secret")
entry = {
@@ -310,7 +301,6 @@ class EntryManager:
"archived": archived,
"notes": notes,
"tags": tags or [],
"deterministic": False,
}
data["entries"][str(entry_id)] = entry
@@ -468,7 +458,7 @@ class EntryManager:
seed_bytes = Bip39SeedGenerator(parent_seed).Generate()
bip85 = BIP85(seed_bytes)
entropy = bip85.derive_entropy(index=index, entropy_bytes=32)
entropy = bip85.derive_entropy(index=index, bytes_len=32)
keys = Keys(priv_k=entropy.hex())
npub = Keys.hex_to_bech32(keys.public_key_hex(), "npub")
nsec = Keys.hex_to_bech32(keys.private_key_hex(), "nsec")
@@ -546,7 +536,7 @@ class EntryManager:
bip85 = BIP85(seed_bytes)
key_idx = int(entry.get("index", index))
entropy = bip85.derive_entropy(index=key_idx, entropy_bytes=32)
entropy = bip85.derive_entropy(index=key_idx, bytes_len=32)
keys = Keys(priv_k=entropy.hex())
npub = Keys.hex_to_bech32(keys.public_key_hex(), "npub")
nsec = Keys.hex_to_bech32(keys.private_key_hex(), "nsec")
@@ -696,10 +686,7 @@ class EntryManager:
return derive_seed_phrase(bip85, seed_index, words)
def get_totp_code(
self,
index: int,
parent_seed: str | bytes | None = None,
timestamp: int | None = None,
self, index: int, parent_seed: str | None = None, timestamp: int | None = None
) -> str:
"""Return the current TOTP code for the specified entry."""
entry = self.retrieve_entry(index)
@@ -709,12 +696,12 @@ class EntryManager:
etype != EntryType.TOTP.value and kind != EntryType.TOTP.value
):
raise ValueError("Entry is not a TOTP entry")
if entry.get("deterministic", False) or "secret" not in entry:
if parent_seed is None:
raise ValueError("Seed required for derived TOTP")
totp_index = int(entry.get("index", 0))
return TotpManager.current_code(parent_seed, totp_index, timestamp)
return TotpManager.current_code_from_secret(entry["secret"], timestamp)
if "secret" in entry:
return TotpManager.current_code_from_secret(entry["secret"], timestamp)
if parent_seed is None:
raise ValueError("Seed required for derived TOTP")
totp_index = int(entry.get("index", 0))
return TotpManager.current_code(parent_seed, totp_index, timestamp)
def get_totp_time_remaining(self, index: int) -> int:
"""Return seconds remaining in the TOTP period for the given entry."""
@@ -729,9 +716,7 @@ class EntryManager:
period = int(entry.get("period", 30))
return TotpManager.time_remaining(period)
def export_totp_entries(
self, parent_seed: str | bytes | None
) -> dict[str, list[dict[str, Any]]]:
def export_totp_entries(self, parent_seed: str) -> dict[str, list[dict[str, Any]]]:
"""Return all TOTP secrets and metadata for external use."""
data = self._load_index()
entries = data.get("entries", {})
@@ -743,13 +728,11 @@ class EntryManager:
label = entry.get("label", "")
period = int(entry.get("period", 30))
digits = int(entry.get("digits", 6))
if entry.get("deterministic", False) or "secret" not in entry:
if parent_seed is None:
raise ValueError("Seed required for deterministic TOTP export")
if "secret" in entry:
secret = entry["secret"]
else:
idx = int(entry.get("index", 0))
secret = TotpManager.derive_secret(parent_seed, idx)
else:
secret = entry["secret"]
uri = TotpManager.make_otpauth_uri(label, secret, period, digits)
exported.append(
{
@@ -796,9 +779,7 @@ class EntryManager:
EntryType.MANAGED_ACCOUNT.value,
):
entry.setdefault("custom_fields", [])
logger.debug(
f"Retrieved entry at index {index} with label '{entry.get('label', '')}'."
)
logger.debug(f"Retrieved entry at index {index}: {entry}")
clean = {k: v for k, v in entry.items() if k != "modified_ts"}
return clean
else:
@@ -1028,11 +1009,13 @@ class EntryManager:
if custom_fields is not None:
entry["custom_fields"] = custom_fields
logger.debug(f"Updated custom fields for index {index}.")
logger.debug(
f"Updated custom fields for index {index}: {custom_fields}"
)
if tags is not None:
entry["tags"] = tags
logger.debug(f"Updated tags for index {index}.")
logger.debug(f"Updated tags for index {index}: {tags}")
policy_updates: dict[str, Any] = {}
if include_special_chars is not None:
@@ -1059,9 +1042,7 @@ class EntryManager:
entry["modified_ts"] = int(time.time())
data["entries"][str(index)] = entry
logger.debug(
f"Modified entry at index {index} with label '{entry.get('label', '')}'."
)
logger.debug(f"Modified entry at index {index}: {entry}")
self._save_index(data)
self.update_checksum()
@@ -1090,7 +1071,7 @@ class EntryManager:
def list_entries(
self,
sort_by: str = "index",
filter_kinds: list[str] | None = None,
filter_kind: str | None = None,
*,
include_archived: bool = False,
verbose: bool = True,
@@ -1102,9 +1083,8 @@ class EntryManager:
sort_by:
Field to sort by. Supported values are ``"index"``, ``"label"`` and
``"updated"``.
filter_kinds:
Optional list of entry kinds to restrict the results. Defaults to
``ALL_ENTRY_TYPES``.
filter_kind:
Optional entry kind to restrict the results.
Archived entries are omitted unless ``include_archived`` is ``True``.
"""
@@ -1133,14 +1113,12 @@ class EntryManager:
sorted_items = sorted(entries_data.items(), key=sort_key)
if filter_kinds is None:
filter_kinds = ALL_ENTRY_TYPES
filtered_items: List[Tuple[int, Dict[str, Any]]] = []
for idx_str, entry in sorted_items:
if (
entry.get("type", entry.get("kind", EntryType.PASSWORD.value))
not in filter_kinds
filter_kind is not None
and entry.get("type", entry.get("kind", EntryType.PASSWORD.value))
!= filter_kind
):
continue
if not include_archived and entry.get(
@@ -1334,7 +1312,8 @@ class EntryManager:
# The checksum file path already includes the fingerprint directory
checksum_path = self.checksum_file
atomic_write(checksum_path, lambda f: f.write(checksum))
with open(checksum_path, "w") as f:
f.write(checksum)
logger.debug(f"Checksum updated and written to '{checksum_path}'.")
print(colored(f"[+] Checksum updated successfully.", "green"))
@@ -1388,7 +1367,7 @@ class EntryManager:
def list_all_entries(
self,
sort_by: str = "index",
filter_kinds: list[str] | None = None,
filter_kind: str | None = None,
*,
include_archived: bool = False,
) -> None:
@@ -1396,7 +1375,7 @@ class EntryManager:
try:
entries = self.list_entries(
sort_by=sort_by,
filter_kinds=filter_kinds,
filter_kind=filter_kind,
include_archived=include_archived,
)
if not entries:
@@ -1420,7 +1399,7 @@ class EntryManager:
def get_entry_summaries(
self,
filter_kinds: list[str] | None = None,
filter_kind: str | None = None,
*,
include_archived: bool = False,
) -> list[tuple[int, str, str]]:
@@ -1429,13 +1408,10 @@ class EntryManager:
data = self._load_index()
entries_data = data.get("entries", {})
if filter_kinds is None:
filter_kinds = ALL_ENTRY_TYPES
summaries: list[tuple[int, str, str]] = []
for idx_str, entry in entries_data.items():
etype = entry.get("type", entry.get("kind", EntryType.PASSWORD.value))
if etype not in filter_kinds:
if filter_kind and etype != filter_kind:
continue
if not include_archived and entry.get(
"archived", entry.get("blacklisted", False)

View File

@@ -1,233 +0,0 @@
from __future__ import annotations
import logging
import time
from typing import TYPE_CHECKING
from termcolor import colored
from constants import (
DEFAULT_PASSWORD_LENGTH,
MAX_PASSWORD_LENGTH,
MIN_PASSWORD_LENGTH,
)
import seedpass.core.manager as manager_module
from utils.terminal_utils import clear_header_with_notification, pause
if TYPE_CHECKING: # pragma: no cover - typing only
from .manager import PasswordManager
class EntryService:
"""Entry management operations for :class:`PasswordManager`."""
def __init__(self, manager: PasswordManager) -> None:
self.manager = manager
def handle_add_password(self) -> None:
pm = self.manager
try:
fp, parent_fp, child_fp = pm.header_fingerprint_args
clear_header_with_notification(
pm,
fp,
"Main Menu > Add Entry > Password",
parent_fingerprint=parent_fp,
child_fingerprint=child_fp,
)
def prompt_length() -> int | None:
length_input = input(
f"Enter desired password length (default {DEFAULT_PASSWORD_LENGTH}): "
).strip()
length = DEFAULT_PASSWORD_LENGTH
if length_input:
if not length_input.isdigit():
print(
colored("Error: Password length must be a number.", "red")
)
return None
length = int(length_input)
if not (MIN_PASSWORD_LENGTH <= length <= MAX_PASSWORD_LENGTH):
print(
colored(
f"Error: Password length must be between {MIN_PASSWORD_LENGTH} and {MAX_PASSWORD_LENGTH}.",
"red",
)
)
return None
return length
def finalize_entry(index: int, label: str, length: int) -> None:
pm.is_dirty = True
pm.last_update = time.time()
entry = pm.entry_manager.retrieve_entry(index)
password = pm._generate_password_for_entry(entry, index, length)
print(
colored(
f"\n[+] Password generated and indexed with ID {index}.\n",
"green",
)
)
if pm.secret_mode_enabled:
if manager_module.copy_to_clipboard(
password, pm.clipboard_clear_delay
):
print(
colored(
f"[+] Password copied to clipboard. Will clear in {pm.clipboard_clear_delay} seconds.",
"green",
)
)
else:
print(colored(f"Password for {label}: {password}\n", "yellow"))
try:
pm.start_background_vault_sync()
logging.info(
"Encrypted index posted to Nostr after entry addition."
)
except Exception as nostr_error: # pragma: no cover - best effort
logging.error(
f"Failed to post updated index to Nostr: {nostr_error}",
exc_info=True,
)
pause()
mode = input("Choose mode: [Q]uick or [A]dvanced? ").strip().lower()
website_name = input("Enter the label or website name: ").strip()
if not website_name:
print(colored("Error: Label cannot be empty.", "red"))
return
username = input("Enter the username (optional): ").strip()
url = input("Enter the URL (optional): ").strip()
if mode.startswith("q"):
length = prompt_length()
if length is None:
return
include_special_input = (
input("Include special characters? (Y/n): ").strip().lower()
)
include_special_chars: bool | None = None
if include_special_input:
include_special_chars = include_special_input != "n"
index = pm.entry_manager.add_entry(
website_name,
length,
username,
url,
include_special_chars=include_special_chars,
)
finalize_entry(index, website_name, length)
return
notes = input("Enter notes (optional): ").strip()
tags_input = input("Enter tags (comma-separated, optional): ").strip()
tags = (
[t.strip() for t in tags_input.split(",") if t.strip()]
if tags_input
else []
)
custom_fields: list[dict[str, object]] = []
while True:
add_field = input("Add custom field? (y/N): ").strip().lower()
if add_field != "y":
break
label = input(" Field label: ").strip()
value = input(" Field value: ").strip()
hidden = input(" Hidden field? (y/N): ").strip().lower() == "y"
custom_fields.append(
{"label": label, "value": value, "is_hidden": hidden}
)
length = prompt_length()
if length is None:
return
include_special_input = (
input("Include special characters? (Y/n): ").strip().lower()
)
include_special_chars: bool | None = None
if include_special_input:
include_special_chars = include_special_input != "n"
allowed_special_chars = input(
"Allowed special characters (leave blank for default): "
).strip()
if not allowed_special_chars:
allowed_special_chars = None
special_mode = input("Special character mode (safe/leave blank): ").strip()
if not special_mode:
special_mode = None
exclude_ambiguous_input = (
input("Exclude ambiguous characters? (y/N): ").strip().lower()
)
exclude_ambiguous: bool | None = None
if exclude_ambiguous_input:
exclude_ambiguous = exclude_ambiguous_input == "y"
min_uppercase_input = input(
"Minimum uppercase letters (blank for default): "
).strip()
if min_uppercase_input and not min_uppercase_input.isdigit():
print(colored("Error: Minimum uppercase must be a number.", "red"))
return
min_uppercase = int(min_uppercase_input) if min_uppercase_input else None
min_lowercase_input = input(
"Minimum lowercase letters (blank for default): "
).strip()
if min_lowercase_input and not min_lowercase_input.isdigit():
print(colored("Error: Minimum lowercase must be a number.", "red"))
return
min_lowercase = int(min_lowercase_input) if min_lowercase_input else None
min_digits_input = input("Minimum digits (blank for default): ").strip()
if min_digits_input and not min_digits_input.isdigit():
print(colored("Error: Minimum digits must be a number.", "red"))
return
min_digits = int(min_digits_input) if min_digits_input else None
min_special_input = input(
"Minimum special characters (blank for default): "
).strip()
if min_special_input and not min_special_input.isdigit():
print(colored("Error: Minimum special must be a number.", "red"))
return
min_special = int(min_special_input) if min_special_input else None
index = pm.entry_manager.add_entry(
website_name,
length,
username,
url,
archived=False,
notes=notes,
custom_fields=custom_fields,
tags=tags,
include_special_chars=include_special_chars,
allowed_special_chars=allowed_special_chars,
special_mode=special_mode,
exclude_ambiguous=exclude_ambiguous,
min_uppercase=min_uppercase,
min_lowercase=min_lowercase,
min_digits=min_digits,
min_special=min_special,
)
finalize_entry(index, website_name, length)
except Exception as e: # pragma: no cover - defensive
logging.error(f"Error during password generation: {e}", exc_info=True)
print(colored(f"Error: Failed to generate password: {e}", "red"))
pause()

View File

@@ -15,7 +15,3 @@ class EntryType(str, Enum):
NOSTR = "nostr"
KEY_VALUE = "key_value"
MANAGED_ACCOUNT = "managed_account"
# List of all entry type values for convenience
ALL_ENTRY_TYPES = [e.value for e in EntryType]

View File

@@ -1,30 +0,0 @@
"""Custom exceptions for SeedPass core modules.
This module defines :class:`SeedPassError`, a base exception used across the
core modules. Library code should raise this error instead of terminating the
process with ``sys.exit`` so that callers can handle failures gracefully.
When raised inside the CLI, :class:`SeedPassError` behaves like a Click
exception, displaying a friendly message and exiting with code ``1``.
"""
from click import ClickException
from cryptography.fernet import InvalidToken
class SeedPassError(ClickException):
"""Base exception for SeedPass-related errors."""
def __init__(self, message: str):
super().__init__(message)
class DecryptionError(InvalidToken, SeedPassError):
"""Raised when encrypted data cannot be decrypted.
Subclasses :class:`cryptography.fernet.InvalidToken` so callers expecting
the cryptography exception continue to work.
"""
__all__ = ["SeedPassError", "DecryptionError"]

File diff suppressed because it is too large Load Diff

View File

@@ -1,188 +0,0 @@
from __future__ import annotations
import logging
import sys
from typing import TYPE_CHECKING
from termcolor import colored
from .entry_types import EntryType, ALL_ENTRY_TYPES
import seedpass.core.manager as manager_module
from utils.color_scheme import color_text
from utils.terminal_utils import clear_header_with_notification
from utils.logging_utils import pause_logging_for_ui
if TYPE_CHECKING: # pragma: no cover - typing only
from .manager import PasswordManager
class MenuHandler:
"""Handle interactive menu operations for :class:`PasswordManager`."""
def __init__(self, manager: PasswordManager) -> None:
self.manager = manager
@pause_logging_for_ui
def handle_list_entries(self) -> None:
"""List entries and optionally show details."""
pm = self.manager
try:
while True:
fp, parent_fp, child_fp = pm.header_fingerprint_args
clear_header_with_notification(
pm,
fp,
"Main Menu > List Entries",
parent_fingerprint=parent_fp,
child_fingerprint=child_fp,
)
print(color_text("\nList Entries:", "menu"))
print(color_text("1. All", "menu"))
option_map: dict[str, str] = {}
for i, etype in enumerate(ALL_ENTRY_TYPES, start=2):
label = etype.replace("_", " ").title()
print(color_text(f"{i}. {label}", "menu"))
option_map[str(i)] = etype
choice = input("Select entry type or press Enter to go back: ").strip()
if choice == "1":
filter_kinds = None
elif choice in option_map:
filter_kinds = [option_map[choice]]
elif not choice:
return
else:
print(colored("Invalid choice.", "red"))
continue
while True:
summaries = pm.entry_manager.get_entry_summaries(
filter_kinds, include_archived=False
)
if not summaries:
break
fp, parent_fp, child_fp = pm.header_fingerprint_args
clear_header_with_notification(
pm,
fp,
"Main Menu > List Entries",
parent_fingerprint=parent_fp,
child_fingerprint=child_fp,
)
print(colored("\n[+] Entries:\n", "green"))
for idx, etype, label in summaries:
if filter_kinds is None:
display_type = etype.capitalize()
print(colored(f"{idx}. {display_type} - {label}", "cyan"))
else:
print(colored(f"{idx}. {label}", "cyan"))
idx_input = input(
"Enter index to view details or press Enter to go back: "
).strip()
if not idx_input:
break
if not idx_input.isdigit():
print(colored("Invalid index.", "red"))
continue
pm.show_entry_details_by_index(int(idx_input))
except Exception as e: # pragma: no cover - defensive
logging.error(f"Failed to list entries: {e}", exc_info=True)
print(colored(f"Error: Failed to list entries: {e}", "red"))
@pause_logging_for_ui
def handle_display_totp_codes(self) -> None:
"""Display all stored TOTP codes with a countdown progress bar."""
pm = self.manager
try:
fp, parent_fp, child_fp = pm.header_fingerprint_args
clear_header_with_notification(
pm,
fp,
"Main Menu > 2FA Codes",
parent_fingerprint=parent_fp,
child_fingerprint=child_fp,
)
data = pm.entry_manager.vault.load_index()
entries = data.get("entries", {})
totp_list: list[tuple[str, int, int, bool]] = []
for idx_str, entry in entries.items():
if pm._entry_type_str(entry) == EntryType.TOTP.value and not entry.get(
"archived", entry.get("blacklisted", False)
):
label = entry.get("label", "")
period = int(entry.get("period", 30))
imported = "secret" in entry
totp_list.append((label, int(idx_str), period, imported))
if not totp_list:
pm.notify("No 2FA entries found.", level="WARNING")
return
totp_list.sort(key=lambda t: t[0].lower())
print(colored("Press Enter to return to the menu.", "cyan"))
while True:
fp, parent_fp, child_fp = pm.header_fingerprint_args
clear_header_with_notification(
pm,
fp,
"Main Menu > 2FA Codes",
parent_fingerprint=parent_fp,
child_fingerprint=child_fp,
)
print(colored("Press Enter to return to the menu.", "cyan"))
generated = [t for t in totp_list if not t[3]]
imported_list = [t for t in totp_list if t[3]]
if generated:
print(colored("\nGenerated 2FA Codes:", "green"))
for label, idx, period, _ in generated:
key = getattr(pm, "KEY_TOTP_DET", None) or getattr(
pm, "parent_seed", None
)
code = pm.entry_manager.get_totp_code(idx, key)
remaining = pm.entry_manager.get_totp_time_remaining(idx)
filled = int(20 * (period - remaining) / period)
bar = "[" + "#" * filled + "-" * (20 - filled) + "]"
if pm.secret_mode_enabled:
if manager_module.copy_to_clipboard(
code, pm.clipboard_clear_delay
):
print(
f"[{idx}] {label}: [HIDDEN] {bar} {remaining:2d}s - copied to clipboard"
)
else:
print(
f"[{idx}] {label}: {color_text(code, 'deterministic')} {bar} {remaining:2d}s"
)
if imported_list:
print(colored("\nImported 2FA Codes:", "green"))
for label, idx, period, _ in imported_list:
key = getattr(pm, "KEY_TOTP_DET", None) or getattr(
pm, "parent_seed", None
)
code = pm.entry_manager.get_totp_code(idx, key)
remaining = pm.entry_manager.get_totp_time_remaining(idx)
filled = int(20 * (period - remaining) / period)
bar = "[" + "#" * filled + "-" * (20 - filled) + "]"
if pm.secret_mode_enabled:
if manager_module.copy_to_clipboard(
code, pm.clipboard_clear_delay
):
print(
f"[{idx}] {label}: [HIDDEN] {bar} {remaining:2d}s - copied to clipboard"
)
else:
print(
f"[{idx}] {label}: {color_text(code, 'imported')} {bar} {remaining:2d}s"
)
sys.stdout.flush()
try:
user_input = manager_module.timed_input("", 1)
if user_input.strip() == "" or user_input.strip().lower() == "b":
break
except TimeoutError:
pass
except KeyboardInterrupt:
print()
break
except Exception as e: # pragma: no cover - defensive
logging.error(f"Error displaying TOTP codes: {e}", exc_info=True)
print(colored(f"Error: Failed to display TOTP codes: {e}", "red"))

View File

@@ -11,18 +11,14 @@ Ensure that all dependencies are installed and properly configured in your envir
Never ever ever use Random Salt. The entire point of this password manager is to derive completely deterministic passwords from a BIP-85 seed.
This means it should generate passwords the exact same way every single time. Salts would break this functionality and is not appropriate for this software's use case.
To keep behaviour stable across Python versions, the shuffling logic uses an
HMAC-SHA256-based FisherYates shuffle instead of ``random.Random``. The HMAC
is keyed with the derived password bytes, providing deterministic yet
cryptographically strong pseudo-randomness without relying on Python's
non-stable random implementation.
"""
import os
import logging
import hashlib
import string
import hmac
import random
import traceback
import base64
from typing import Optional
from dataclasses import dataclass
@@ -113,12 +109,10 @@ class PasswordGenerator:
self.bip85 = bip85
self.policy = policy or PasswordPolicy()
if isinstance(parent_seed, (bytes, bytearray)):
self.seed_bytes = bytes(parent_seed)
else:
self.seed_bytes = self.encryption_manager.derive_seed_from_mnemonic(
self.parent_seed
)
# Derive seed bytes from parent_seed using BIP39 (handled by EncryptionManager)
self.seed_bytes = self.encryption_manager.derive_seed_from_mnemonic(
self.parent_seed
)
logger.debug("PasswordGenerator initialized successfully.")
except Exception as e:
@@ -128,8 +122,8 @@ class PasswordGenerator:
def _derive_password_entropy(self, index: int) -> bytes:
"""Derive deterministic entropy for password generation."""
entropy = self.bip85.derive_entropy(index=index, entropy_bytes=64, app_no=32)
logger.debug("Entropy derived for password generation.")
entropy = self.bip85.derive_entropy(index=index, bytes_len=64, app_no=32)
logger.debug(f"Derived entropy: {entropy.hex()}")
hkdf = HKDF(
algorithm=hashes.SHA256(),
@@ -139,43 +133,26 @@ class PasswordGenerator:
backend=default_backend(),
)
hkdf_derived = hkdf.derive(entropy)
logger.debug("Derived key using HKDF.")
logger.debug(f"Derived key using HKDF: {hkdf_derived.hex()}")
dk = hashlib.pbkdf2_hmac("sha256", entropy, b"", 100000)
logger.debug("Derived key using PBKDF2.")
logger.debug(f"Derived key using PBKDF2: {dk.hex()}")
return dk
def _map_entropy_to_chars(self, dk: bytes, alphabet: str) -> str:
"""Map derived bytes to characters from the provided alphabet."""
password = "".join(alphabet[byte % len(alphabet)] for byte in dk)
logger.debug("Mapped entropy to allowed characters.")
logger.debug(f"Password after mapping to all allowed characters: {password}")
return password
def _fisher_yates_hmac(self, items: list[str], key: bytes) -> list[str]:
"""Shuffle ``items`` in a deterministic yet cryptographically sound manner.
A FisherYates shuffle is driven by an HMAC-SHA256 based
pseudo-random number generator seeded with ``key``. Unlike
:class:`random.Random`, this approach is stable across Python
versions while still deriving all of its entropy from ``key``.
"""
counter = 0
for i in range(len(items) - 1, 0, -1):
msg = counter.to_bytes(4, "big")
digest = hmac.new(key, msg, hashlib.sha256).digest()
j = int.from_bytes(digest, "big") % (i + 1)
items[i], items[j] = items[j], items[i]
counter += 1
return items
def _shuffle_deterministically(self, password: str, dk: bytes) -> str:
"""Deterministically shuffle characters using an HMAC-based PRNG."""
"""Deterministically shuffle characters using derived bytes."""
shuffle_seed = int.from_bytes(dk, "big")
rng = random.Random(shuffle_seed)
password_chars = list(password)
shuffled_chars = self._fisher_yates_hmac(password_chars, dk)
shuffled = "".join(shuffled_chars)
logger.debug("Shuffled password deterministically using HMAC-Fisher-Yates.")
rng.shuffle(password_chars)
shuffled = "".join(password_chars)
logger.debug("Shuffled password deterministically.")
return shuffled
def generate_password(
@@ -249,7 +226,7 @@ class PasswordGenerator:
extra = self._map_entropy_to_chars(dk, all_allowed)
password += extra
password = self._shuffle_deterministically(password, dk)
logger.debug("Extended password to meet length requirement.")
logger.debug(f"Extended password: {password}")
# Trim the password to the desired length and enforce complexity on
# the final result. Complexity enforcement is repeated here because
@@ -262,7 +239,7 @@ class PasswordGenerator:
)
password = self._shuffle_deterministically(password, dk)
logger.debug(
f"Generated final password of length {length} with complexity enforced."
f"Final password (trimmed to {length} chars with complexity enforced): {password}"
)
return password
@@ -334,28 +311,34 @@ class PasswordGenerator:
index = get_dk_value() % len(password_chars)
char = uppercase[get_dk_value() % len(uppercase)]
password_chars[index] = char
logger.debug(f"Added uppercase letter at position {index}.")
logger.debug(
f"Added uppercase letter '{char}' at position {index}."
)
if current_lower < min_lower:
for _ in range(min_lower - current_lower):
index = get_dk_value() % len(password_chars)
char = lowercase[get_dk_value() % len(lowercase)]
password_chars[index] = char
logger.debug(f"Added lowercase letter at position {index}.")
logger.debug(
f"Added lowercase letter '{char}' at position {index}."
)
if current_digits < min_digits:
for _ in range(min_digits - current_digits):
index = get_dk_value() % len(password_chars)
char = digits[get_dk_value() % len(digits)]
password_chars[index] = char
logger.debug(f"Added digit at position {index}.")
logger.debug(f"Added digit '{char}' at position {index}.")
if special and current_special < min_special:
for _ in range(min_special - current_special):
index = get_dk_value() % len(password_chars)
char = special[get_dk_value() % len(special)]
password_chars[index] = char
logger.debug(f"Added special character at position {index}.")
logger.debug(
f"Added special character '{char}' at position {index}."
)
# Additional deterministic inclusion of symbols to increase score
if special:
@@ -369,7 +352,9 @@ class PasswordGenerator:
index = get_dk_value() % len(password_chars)
char = special[get_dk_value() % len(special)]
password_chars[index] = char
logger.debug(f"Added additional symbol at position {index}.")
logger.debug(
f"Added additional symbol '{char}' at position {index}."
)
# Ensure balanced distribution by assigning different character types to specific segments
# Example: Divide password into segments and assign different types
@@ -387,15 +372,19 @@ class PasswordGenerator:
if i == 0 and password_chars[j] not in uppercase:
char = uppercase[get_dk_value() % len(uppercase)]
password_chars[j] = char
logger.debug(f"Assigned uppercase letter to position {j}.")
logger.debug(
f"Assigned uppercase letter '{char}' to position {j}."
)
elif i == 1 and password_chars[j] not in lowercase:
char = lowercase[get_dk_value() % len(lowercase)]
password_chars[j] = char
logger.debug(f"Assigned lowercase letter to position {j}.")
logger.debug(
f"Assigned lowercase letter '{char}' to position {j}."
)
elif i == 2 and password_chars[j] not in digits:
char = digits[get_dk_value() % len(digits)]
password_chars[j] = char
logger.debug(f"Assigned digit to position {j}.")
logger.debug(f"Assigned digit '{char}' to position {j}.")
elif (
special
and i == len(char_types) - 1
@@ -403,18 +392,17 @@ class PasswordGenerator:
):
char = special[get_dk_value() % len(special)]
password_chars[j] = char
logger.debug(f"Assigned special character to position {j}.")
logger.debug(
f"Assigned special character '{char}' to position {j}."
)
# Shuffle again to distribute the characters more evenly. The key is
# tweaked with the current ``dk_index`` so that each call produces a
# unique but deterministic ordering.
shuffle_key = hmac.new(
dk, dk_index.to_bytes(4, "big"), hashlib.sha256
).digest()
password_chars = self._fisher_yates_hmac(password_chars, shuffle_key)
logger.debug(
"Shuffled password characters for balanced distribution using HMAC-Fisher-Yates."
)
# Shuffle again to distribute the characters more evenly
shuffle_seed = (
int.from_bytes(dk, "big") + dk_index
) # Modify seed to vary shuffle
rng = random.Random(shuffle_seed)
rng.shuffle(password_chars)
logger.debug(f"Shuffled password characters for balanced distribution.")
# Final counts after modifications
final_upper = sum(1 for c in password_chars if c in uppercase)
@@ -435,7 +423,7 @@ class PasswordGenerator:
def derive_ssh_key(bip85: BIP85, idx: int) -> bytes:
"""Derive 32 bytes of entropy suitable for an SSH key."""
return bip85.derive_entropy(index=idx, entropy_bytes=32, app_no=32)
return bip85.derive_entropy(index=idx, bytes_len=32, app_no=32)
def derive_ssh_key_pair(parent_seed: str, index: int) -> tuple[str, str]:
@@ -469,13 +457,7 @@ def derive_seed_phrase(bip85: BIP85, idx: int, words: int = 24) -> str:
def derive_pgp_key(
bip85: BIP85, idx: int, key_type: str = "ed25519", user_id: str = ""
) -> tuple[str, str]:
"""Derive a deterministic PGP private key and return it with its fingerprint.
For RSA keys the randomness required during key generation is provided by
an HMAC-SHA256 based deterministic generator seeded from the BIP-85
entropy. This avoids use of Python's ``random`` module while ensuring the
output remains stable across Python versions.
"""
"""Derive a deterministic PGP private key and return it with its fingerprint."""
from pgpy import PGPKey, PGPUID
from pgpy.packet.packets import PrivKeyV4
@@ -501,24 +483,20 @@ def derive_pgp_key(
import hashlib
import datetime
entropy = bip85.derive_entropy(index=idx, entropy_bytes=32, app_no=32)
entropy = bip85.derive_entropy(index=idx, bytes_len=32, app_no=32)
created = datetime.datetime(2000, 1, 1, tzinfo=datetime.timezone.utc)
if key_type.lower() == "rsa":
class DRNG:
"""HMAC-SHA256 based deterministic random generator."""
def __init__(self, seed: bytes) -> None:
self.key = seed
self.counter = 0
self.seed = seed
def __call__(self, n: int) -> bytes: # pragma: no cover - deterministic
out = b""
while len(out) < n:
msg = self.counter.to_bytes(4, "big")
out += hmac.new(self.key, msg, hashlib.sha256).digest()
self.counter += 1
self.seed = hashlib.sha256(self.seed).digest()
out += self.seed
return out[:n]
rsa_key = RSA.generate(2048, randfunc=DRNG(entropy))

View File

@@ -21,7 +21,6 @@ from utils.key_derivation import (
)
from .encryption import EncryptionManager
from utils.checksum import json_checksum, canonical_json_dumps
from .state_manager import StateManager
logger = logging.getLogger(__name__)
@@ -33,7 +32,6 @@ class PortableMode(Enum):
"""Encryption mode for portable exports."""
SEED_ONLY = EncryptionMode.SEED_ONLY.value
NONE = "none"
def _derive_export_key(seed: str) -> bytes:
@@ -49,15 +47,8 @@ def export_backup(
*,
publish: bool = False,
parent_seed: str | None = None,
encrypt: bool = True,
) -> Path:
"""Export the current vault state to a portable file.
When ``encrypt`` is ``True`` (the default) the payload is encrypted with a
key derived from the parent seed. When ``encrypt`` is ``False`` the payload
is written in plaintext and the wrapper records an ``encryption_mode`` of
:data:`PortableMode.NONE`.
"""
"""Export the current vault state to a portable encrypted file."""
if dest_path is None:
ts = int(time.time())
@@ -66,32 +57,24 @@ def export_backup(
dest_path = dest_dir / EXPORT_NAME_TEMPLATE.format(ts=ts)
index_data = vault.load_index()
seed = (
parent_seed
if parent_seed is not None
else vault.encryption_manager.decrypt_parent_seed()
)
key = _derive_export_key(seed)
enc_mgr = EncryptionManager(key, vault.fingerprint_dir)
canonical = canonical_json_dumps(index_data)
if encrypt:
seed = (
parent_seed
if parent_seed is not None
else vault.encryption_manager.decrypt_parent_seed()
)
key = _derive_export_key(seed)
enc_mgr = EncryptionManager(key, vault.fingerprint_dir)
payload_bytes = enc_mgr.encrypt_data(canonical.encode("utf-8"))
mode = PortableMode.SEED_ONLY
cipher = "aes-gcm"
else:
payload_bytes = canonical.encode("utf-8")
mode = PortableMode.NONE
cipher = "none"
payload_bytes = enc_mgr.encrypt_data(canonical.encode("utf-8"))
checksum = json_checksum(index_data)
wrapper = {
"format_version": FORMAT_VERSION,
"created_at": int(time.time()),
"fingerprint": vault.fingerprint_dir.name,
"encryption_mode": mode.value,
"cipher": cipher,
"encryption_mode": PortableMode.SEED_ONLY.value,
"cipher": "aes-gcm",
"checksum": checksum,
"payload": base64.b64encode(payload_bytes).decode("utf-8"),
}
@@ -107,12 +90,10 @@ def export_backup(
enc_file.write_bytes(encrypted)
os.chmod(enc_file, 0o600)
try:
idx = StateManager(vault.fingerprint_dir).state.get("nostr_account_idx", 0)
client = NostrClient(
vault.encryption_manager,
vault.fingerprint_dir.name,
config_manager=backup_manager.config_manager,
account_index=idx,
)
asyncio.run(client.publish_snapshot(encrypted))
except Exception:
@@ -131,30 +112,24 @@ def import_backup(
raw = Path(path).read_bytes()
if path.suffix.endswith(".enc"):
raw = vault.encryption_manager.decrypt_data(raw, context=str(path))
raw = vault.encryption_manager.decrypt_data(raw)
wrapper = json.loads(raw.decode("utf-8"))
if wrapper.get("format_version") != FORMAT_VERSION:
raise ValueError("Unsupported backup format")
mode = wrapper.get("encryption_mode")
if wrapper.get("encryption_mode") != PortableMode.SEED_ONLY.value:
raise ValueError("Unsupported encryption mode")
payload = base64.b64decode(wrapper["payload"])
if mode == PortableMode.SEED_ONLY.value:
seed = (
parent_seed
if parent_seed is not None
else vault.encryption_manager.decrypt_parent_seed()
)
key = _derive_export_key(seed)
enc_mgr = EncryptionManager(key, vault.fingerprint_dir)
enc_mgr._legacy_migrate_flag = False
index_bytes = enc_mgr.decrypt_data(payload, context="backup payload")
elif mode == PortableMode.NONE.value:
index_bytes = payload
else:
raise ValueError("Unsupported encryption mode")
seed = (
parent_seed
if parent_seed is not None
else vault.encryption_manager.decrypt_parent_seed()
)
key = _derive_export_key(seed)
enc_mgr = EncryptionManager(key, vault.fingerprint_dir)
index_bytes = enc_mgr.decrypt_data(payload)
index = json.loads(index_bytes.decode("utf-8"))
checksum = json_checksum(index)

View File

@@ -1,109 +0,0 @@
from __future__ import annotations
import logging
from typing import Optional, TYPE_CHECKING
from termcolor import colored
import seedpass.core.manager as manager_module
from utils.password_prompt import prompt_existing_password
if TYPE_CHECKING: # pragma: no cover - typing only
from .manager import PasswordManager
from nostr.client import NostrClient
class ProfileService:
"""Profile-related operations for :class:`PasswordManager`."""
def __init__(self, manager: PasswordManager) -> None:
self.manager = manager
def handle_switch_fingerprint(self, *, password: Optional[str] = None) -> bool:
"""Handle switching to a different seed profile."""
pm = self.manager
try:
print(colored("\nAvailable Seed Profiles:", "cyan"))
fingerprints = pm.fingerprint_manager.list_fingerprints()
for idx, fp in enumerate(fingerprints, start=1):
display = (
pm.fingerprint_manager.display_name(fp)
if hasattr(pm.fingerprint_manager, "display_name")
else fp
)
print(colored(f"{idx}. {display}", "cyan"))
choice = input("Select a seed profile by number to switch: ").strip()
if not choice.isdigit() or not (1 <= int(choice) <= len(fingerprints)):
print(colored("Invalid selection. Returning to main menu.", "red"))
return False
selected_fingerprint = fingerprints[int(choice) - 1]
pm.fingerprint_manager.current_fingerprint = selected_fingerprint
pm.current_fingerprint = selected_fingerprint
if not getattr(pm, "manifest_id", None):
pm.manifest_id = None
pm.fingerprint_dir = pm.fingerprint_manager.get_current_fingerprint_dir()
if not pm.fingerprint_dir:
print(
colored(
f"Error: Seed profile directory for {selected_fingerprint} not found.",
"red",
)
)
return False
if password is None:
password = prompt_existing_password(
"Enter the master password for the selected seed profile: "
)
if not pm.setup_encryption_manager(
pm.fingerprint_dir, password, exit_on_fail=False
):
return False
pm.initialize_bip85()
pm.initialize_managers()
pm.start_background_sync()
print(colored(f"Switched to seed profile {selected_fingerprint}.", "green"))
try:
pm.nostr_client = manager_module.NostrClient(
encryption_manager=pm.encryption_manager,
fingerprint=pm.current_fingerprint,
config_manager=getattr(pm, "config_manager", None),
parent_seed=getattr(pm, "parent_seed", None),
key_index=pm.KEY_INDEX,
account_index=pm.nostr_account_idx,
)
if getattr(pm, "manifest_id", None) and hasattr(
pm.nostr_client, "_state_lock"
):
from nostr.backup_models import Manifest
with pm.nostr_client._state_lock:
pm.nostr_client.current_manifest_id = pm.manifest_id
pm.nostr_client.current_manifest = Manifest(
ver=1,
algo="gzip",
chunks=[],
delta_since=pm.delta_since or None,
)
logging.info(
f"NostrClient re-initialized with seed profile {pm.current_fingerprint}."
)
except Exception as e:
logging.error(f"Failed to re-initialize NostrClient: {e}")
print(
colored(f"Error: Failed to re-initialize NostrClient: {e}", "red")
)
return False
return True
except Exception as e: # pragma: no cover - defensive
logging.error(f"Error during seed profile switching: {e}", exc_info=True)
print(colored(f"Error: Failed to switch seed profiles: {e}", "red"))
return False

View File

@@ -26,7 +26,6 @@ class StateManager:
"manifest_id": None,
"delta_since": 0,
"relays": list(DEFAULT_RELAYS),
"nostr_account_idx": 0,
}
with shared_lock(self.state_path) as fh:
fh.seek(0)
@@ -38,7 +37,6 @@ class StateManager:
"manifest_id": None,
"delta_since": 0,
"relays": list(DEFAULT_RELAYS),
"nostr_account_idx": 0,
}
try:
obj = json.loads(data.decode())
@@ -49,7 +47,6 @@ class StateManager:
obj.setdefault("manifest_id", None)
obj.setdefault("delta_since", 0)
obj.setdefault("relays", list(DEFAULT_RELAYS))
obj.setdefault("nostr_account_idx", 0)
return obj
def _save(self, data: dict) -> None:

View File

@@ -2,11 +2,8 @@
from __future__ import annotations
import os
import sys
import time
import base64
from typing import Union
from urllib.parse import quote
from urllib.parse import urlparse, parse_qs, unquote
@@ -17,24 +14,17 @@ import pyotp
from utils import key_derivation
def random_totp_secret(length: int = 20) -> str:
"""Return a random Base32 encoded TOTP secret."""
return base64.b32encode(os.urandom(length)).decode("ascii").rstrip("=")
class TotpManager:
"""Helper methods for TOTP secrets and codes."""
@staticmethod
def derive_secret(seed: Union[str, bytes], index: int) -> str:
"""Derive a TOTP secret from a seed or raw key and index."""
def derive_secret(seed: str, index: int) -> str:
"""Derive a TOTP secret from a BIP39 seed and index."""
return key_derivation.derive_totp_secret(seed, index)
@classmethod
def current_code(
cls, seed: Union[str, bytes], index: int, timestamp: int | None = None
) -> str:
"""Return the TOTP code for the given seed/key and index."""
def current_code(cls, seed: str, index: int, timestamp: int | None = None) -> str:
"""Return the TOTP code for the given seed and index."""
secret = cls.derive_secret(seed, index)
totp = pyotp.TOTP(secret)
if timestamp is None:

View File

@@ -3,19 +3,8 @@
from pathlib import Path
from typing import Optional, Union
from os import PathLike
import shutil
from termcolor import colored
from cryptography.fernet import InvalidToken
from .encryption import (
EncryptionManager,
LegacyFormatRequiresMigrationError,
USE_ORJSON,
json_lib,
)
from utils.key_derivation import KdfConfig, CURRENT_KDF_VERSION
from utils.password_prompt import prompt_existing_password
from .encryption import EncryptionManager
class Vault:
@@ -33,164 +22,26 @@ class Vault:
self.fingerprint_dir = Path(fingerprint_dir)
self.index_file = self.fingerprint_dir / self.INDEX_FILENAME
self.config_file = self.fingerprint_dir / self.CONFIG_FILENAME
self.migrated_from_legacy = False
def set_encryption_manager(self, manager: EncryptionManager) -> None:
"""Replace the internal encryption manager."""
self.encryption_manager = manager
def _hkdf_kdf(self) -> KdfConfig:
return KdfConfig(
name="hkdf", version=CURRENT_KDF_VERSION, params={}, salt_b64=""
)
# ----- Password index helpers -----
def load_index(self, *, return_migration_flags: bool = False):
"""Return decrypted password index data, applying migrations.
If a legacy ``seedpass_passwords_db.json.enc`` file is detected, the
user is prompted to migrate it. A backup copy of the legacy file (and
its checksum) is saved under ``legacy_backups`` within the fingerprint
directory before renaming to the new filename.
When ``return_migration_flags`` is ``True`` the tuple
``(data, migrated, last_migration_performed)`` is returned where
``migrated`` indicates whether any migration occurred and
``last_migration_performed`` reflects whether the underlying
:class:`EncryptionManager` reported a conversion.
"""
def load_index(self) -> dict:
"""Return decrypted password index data as a dict, applying migrations."""
legacy_file = self.fingerprint_dir / "seedpass_passwords_db.json.enc"
self.migrated_from_legacy = False
legacy_detected = False
backup_dir = None
if legacy_file.exists() and not self.index_file.exists():
print(colored("Legacy index detected.", "yellow"))
resp = (
input("Would you like to migrate this to the new index format? [y/N]: ")
.strip()
.lower()
)
if resp != "y":
raise RuntimeError("Migration declined by user")
legacy_checksum = (
self.fingerprint_dir / "seedpass_passwords_db_checksum.txt"
)
backup_dir = self.fingerprint_dir / "legacy_backups"
backup_dir.mkdir(exist_ok=True)
shutil.copy2(legacy_file, backup_dir / legacy_file.name)
if legacy_checksum.exists():
shutil.copy2(legacy_checksum, backup_dir / legacy_checksum.name)
legacy_file.rename(self.index_file)
if legacy_checksum.exists():
legacy_checksum.rename(
self.fingerprint_dir / "seedpass_entries_db_checksum.txt"
)
# Remove any leftover legacy files to avoid triggering migration again
for stray in self.fingerprint_dir.glob("seedpass_passwords_db*.enc"):
try:
stray.unlink()
except FileNotFoundError:
pass
stray_checksum = self.fingerprint_dir / "seedpass_passwords_db_checksum.txt"
if stray_checksum.exists():
stray_checksum.unlink()
legacy_detected = True
print(
colored(
"Migration complete. Original index backed up to 'legacy_backups'",
"green",
)
)
try:
data, kdf = self.encryption_manager.load_json_data(
self.index_file, return_kdf=True
)
migration_performed = getattr(
self.encryption_manager, "last_migration_performed", False
)
if kdf.version < CURRENT_KDF_VERSION:
new_kdf = KdfConfig(
name=kdf.name,
version=CURRENT_KDF_VERSION,
params=kdf.params,
salt_b64=kdf.salt_b64,
)
self.encryption_manager.save_json_data(
data, self.index_file, kdf=new_kdf
)
self.encryption_manager.update_checksum(self.index_file)
migration_performed = True
except LegacyFormatRequiresMigrationError:
print(
colored(
"Failed to decrypt index with current key. This may be a legacy index.",
"red",
)
)
resp = input(
"\nChoose an option:\n"
"1. Open legacy index without migrating\n"
"2. Migrate to new format.\n"
"Selection [1/2]: "
).strip()
if resp == "1":
self.encryption_manager._legacy_migrate_flag = False
self.encryption_manager.last_migration_performed = False
elif resp == "2":
self.encryption_manager._legacy_migrate_flag = True
self.encryption_manager.last_migration_performed = True
else:
raise InvalidToken(
"User declined legacy decryption or provided invalid choice."
)
password = prompt_existing_password(
"Enter your master password for legacy decryption: "
)
with self.index_file.open("rb") as fh:
encrypted_data = fh.read()
decrypted = self.encryption_manager.decrypt_legacy(
encrypted_data, password, context=str(self.index_file)
)
if USE_ORJSON:
data = json_lib.loads(decrypted)
else:
data = json_lib.loads(decrypted.decode("utf-8"))
if self.encryption_manager._legacy_migrate_flag:
self.encryption_manager.save_json_data(
data, self.index_file, kdf=self._hkdf_kdf()
)
self.encryption_manager.update_checksum(self.index_file)
migration_performed = getattr(
self.encryption_manager, "last_migration_performed", False
)
except Exception as exc: # noqa: BLE001 - surface clear error and restore
if legacy_detected and backup_dir is not None:
backup_file = backup_dir / legacy_file.name
legacy_checksum_path = (
self.fingerprint_dir / "seedpass_passwords_db_checksum.txt"
)
backup_checksum = backup_dir / legacy_checksum_path.name
try:
if self.index_file.exists():
self.index_file.unlink()
shutil.copy2(backup_file, legacy_file)
checksum_new = (
self.fingerprint_dir / "seedpass_entries_db_checksum.txt"
)
if checksum_new.exists():
checksum_new.unlink()
if backup_checksum.exists():
shutil.copy2(backup_checksum, legacy_checksum_path)
finally:
self.migrated_from_legacy = False
raise RuntimeError(f"Migration failed: {exc}") from exc
data = self.encryption_manager.load_json_data(self.index_file)
from .migrations import apply_migrations, LATEST_VERSION
version = data.get("schema_version", 0)
@@ -198,78 +49,24 @@ class Vault:
raise ValueError(
f"File schema version {version} is newer than supported {LATEST_VERSION}"
)
schema_migrated = version < LATEST_VERSION
try:
data = apply_migrations(data)
if schema_migrated:
self.encryption_manager.save_json_data(
data, self.index_file, kdf=self._hkdf_kdf()
)
self.encryption_manager.update_checksum(self.index_file)
except Exception as exc: # noqa: BLE001 - surface clear error and restore
if legacy_detected and backup_dir is not None:
backup_file = backup_dir / legacy_file.name
legacy_checksum_path = (
self.fingerprint_dir / "seedpass_passwords_db_checksum.txt"
)
backup_checksum = backup_dir / legacy_checksum_path.name
try:
if self.index_file.exists():
self.index_file.unlink()
shutil.copy2(backup_file, legacy_file)
checksum_new = (
self.fingerprint_dir / "seedpass_entries_db_checksum.txt"
)
if checksum_new.exists():
checksum_new.unlink()
if backup_checksum.exists():
shutil.copy2(backup_checksum, legacy_checksum_path)
finally:
self.migrated_from_legacy = False
raise RuntimeError(f"Migration failed: {exc}") from exc
self.migrated_from_legacy = (
legacy_detected or migration_performed or schema_migrated
)
if return_migration_flags:
return data, self.migrated_from_legacy, migration_performed
data = apply_migrations(data)
return data
def save_index(self, data: dict) -> None:
"""Encrypt and write password index."""
self.encryption_manager.save_json_data(
data, self.index_file, kdf=self._hkdf_kdf()
)
self.encryption_manager.save_json_data(data, self.index_file)
def get_encrypted_index(self) -> Optional[bytes]:
"""Return the encrypted index bytes if present."""
return self.encryption_manager.get_encrypted_index()
def decrypt_and_save_index_from_nostr(
self,
encrypted_data: bytes,
*,
strict: bool = True,
merge: bool = False,
return_migration_flag: bool = False,
):
"""Decrypt Nostr payload and update the local index.
Returns ``True``/``False`` for success by default. When
``return_migration_flag`` is ``True`` a tuple ``(success, migrated)`` is
returned, where ``migrated`` indicates whether any legacy migration
occurred.
"""
result = self.encryption_manager.decrypt_and_save_index_from_nostr(
self, encrypted_data: bytes, *, strict: bool = True, merge: bool = False
) -> bool:
"""Decrypt Nostr payload and update the local index."""
return self.encryption_manager.decrypt_and_save_index_from_nostr(
encrypted_data, strict=strict, merge=merge
)
self.migrated_from_legacy = result and getattr(
self.encryption_manager, "last_migration_performed", False
)
if return_migration_flag:
return result, self.migrated_from_legacy
return result
# ----- Config helpers -----
def load_config(self) -> dict:
@@ -278,6 +75,4 @@ class Vault:
def save_config(self, config: dict) -> None:
"""Encrypt and persist configuration."""
self.encryption_manager.save_json_data(
config, self.config_file, kdf=self._hkdf_kdf()
)
self.encryption_manager.save_json_data(config, self.config_file)

View File

@@ -1,13 +0,0 @@
"""Compatibility layer for historic exception types."""
from .core.errors import SeedPassError
class VaultLockedError(SeedPassError):
"""Raised when an operation requires an unlocked vault."""
def __init__(self, message: str = "Vault is locked") -> None:
super().__init__(message)
__all__ = ["VaultLockedError", "SeedPassError"]

View File

@@ -1,4 +1,4 @@
from . import main
from .app import main
if __name__ == "__main__":
main()

View File

@@ -393,7 +393,7 @@ class TotpViewerWindow(toga.Window):
def refresh_codes(self) -> None:
self.table.data = []
for idx, label, *_rest in self.entries.list_entries(
filter_kinds=[EntryType.TOTP.value]
filter_kind=EntryType.TOTP.value
):
entry = self.entries.retrieve_entry(idx)
code = self.entries.get_totp_code(idx)

View File

@@ -1,25 +1,6 @@
import importlib.util
import logging
import sys
from pathlib import Path
import pytest
sys.path.append(str(Path(__file__).resolve().parents[1]))
from helpers import create_vault, TEST_PASSWORD, TEST_SEED
from seedpass.core.backup import BackupManager
from seedpass.core.config_manager import ConfigManager
from seedpass.core.entry_management import EntryManager
from seedpass.core.manager import EncryptionMode, PasswordManager
@pytest.fixture(
params=["asyncio"] + (["trio"] if importlib.util.find_spec("trio") else [])
)
def anyio_backend(request):
return request.param
@pytest.fixture(autouse=True)
def mute_logging():
@@ -68,29 +49,3 @@ def pytest_collection_modifyitems(
for item in items:
if "desktop" in item.keywords:
item.add_marker(skip_desktop)
@pytest.fixture
def vault(tmp_path):
vault, _ = create_vault(tmp_path, TEST_SEED, TEST_PASSWORD)
return vault
@pytest.fixture
def password_manager(vault, tmp_path):
cfg_mgr = ConfigManager(vault, tmp_path)
backup_mgr = BackupManager(tmp_path, cfg_mgr)
entry_mgr = EntryManager(vault, backup_mgr)
pm = PasswordManager.__new__(PasswordManager)
pm.encryption_mode = EncryptionMode.SEED_ONLY
pm.encryption_manager = vault.encryption_manager
pm.vault = vault
pm.entry_manager = entry_mgr
pm.backup_manager = backup_mgr
pm.parent_seed = TEST_SEED
pm.nostr_client = None
pm.fingerprint_dir = tmp_path
pm.is_dirty = False
pm.secret_mode_enabled = False
return pm

View File

@@ -11,7 +11,6 @@ from utils.key_derivation import (
derive_index_key,
derive_key_from_password,
)
from utils.fingerprint import generate_fingerprint
TEST_SEED = "abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon about"
TEST_PASSWORD = "pw"
@@ -23,8 +22,7 @@ def create_vault(
password: str = TEST_PASSWORD,
) -> tuple[Vault, EncryptionManager]:
"""Create a Vault initialized for tests."""
fp = generate_fingerprint(seed)
seed_key = derive_key_from_password(password, fp)
seed_key = derive_key_from_password(password)
seed_mgr = EncryptionManager(seed_key, dir_path)
seed_mgr.encrypt_parent_seed(seed)

View File

@@ -1,35 +0,0 @@
import builtins
from types import SimpleNamespace
import seedpass.core.manager as manager_module
from helpers import TEST_SEED
def test_add_new_fingerprint_word_entry_exits(monkeypatch):
pm = manager_module.PasswordManager.__new__(manager_module.PasswordManager)
pm.fingerprint_manager = SimpleNamespace(current_fingerprint=None)
pm.initialize_managers = lambda: None
calls = {"count": 0}
original_setup = manager_module.PasswordManager.setup_existing_seed
def setup_wrapper(self, *a, **k):
calls["count"] += 1
return original_setup(self, *a, **k)
monkeypatch.setattr(
manager_module.PasswordManager, "setup_existing_seed", setup_wrapper
)
monkeypatch.setattr(manager_module, "prompt_seed_words", lambda *a, **k: TEST_SEED)
monkeypatch.setattr(
manager_module.PasswordManager,
"_finalize_existing_seed",
lambda self, seed, password=None: "fp",
)
monkeypatch.setattr(builtins, "input", lambda *_a, **_k: "2")
result = pm.add_new_fingerprint()
assert result == "fp"
assert calls["count"] == 1
assert pm.fingerprint_manager.current_fingerprint == "fp"

View File

@@ -3,8 +3,7 @@ from pathlib import Path
import sys
import pytest
from httpx import ASGITransport, AsyncClient
import bcrypt
from fastapi.testclient import TestClient
sys.path.append(str(Path(__file__).resolve().parents[1]))
@@ -13,7 +12,7 @@ from seedpass.core.entry_types import EntryType
@pytest.fixture
async def client(monkeypatch):
def client(monkeypatch):
dummy = SimpleNamespace(
entry_manager=SimpleNamespace(
search_entries=lambda q: [
@@ -40,36 +39,25 @@ async def client(monkeypatch):
nostr_client=SimpleNamespace(
key_manager=SimpleNamespace(get_npub=lambda: "np")
),
verify_password=lambda pw: True,
)
monkeypatch.setattr(api, "PasswordManager", lambda: dummy)
monkeypatch.setenv("SEEDPASS_CORS_ORIGINS", "http://example.com")
token = api.start_server()
transport = ASGITransport(app=api.app)
async with AsyncClient(transport=transport, base_url="http://test") as ac:
yield ac, token
client = TestClient(api.app)
return client, token
@pytest.mark.anyio
async def test_token_hashed(client):
_, token = client
assert api.app.state.token_hash != token
assert bcrypt.checkpw(token.encode(), api.app.state.token_hash)
@pytest.mark.anyio
async def test_cors_and_auth(client):
def test_cors_and_auth(client):
cl, token = client
headers = {"Authorization": f"Bearer {token}", "Origin": "http://example.com"}
res = await cl.get("/api/v1/entry", params={"query": "s"}, headers=headers)
res = cl.get("/api/v1/entry", params={"query": "s"}, headers=headers)
assert res.status_code == 200
assert res.headers.get("access-control-allow-origin") == "http://example.com"
@pytest.mark.anyio
async def test_invalid_token(client):
def test_invalid_token(client):
cl, _token = client
res = await cl.get(
res = cl.get(
"/api/v1/entry",
params={"query": "s"},
headers={"Authorization": "Bearer bad"},
@@ -77,65 +65,59 @@ async def test_invalid_token(client):
assert res.status_code == 401
@pytest.mark.anyio
async def test_get_entry_by_id(client):
def test_get_entry_by_id(client):
cl, token = client
headers = {
"Authorization": f"Bearer {token}",
"Origin": "http://example.com",
"X-SeedPass-Password": "pw",
}
res = await cl.get("/api/v1/entry/1", headers=headers)
res = cl.get("/api/v1/entry/1", headers=headers)
assert res.status_code == 200
assert res.json() == {"label": "Site"}
assert res.headers.get("access-control-allow-origin") == "http://example.com"
@pytest.mark.anyio
async def test_get_config_value(client):
def test_get_config_value(client):
cl, token = client
headers = {
"Authorization": f"Bearer {token}",
"Origin": "http://example.com",
}
res = await cl.get("/api/v1/config/k", headers=headers)
res = cl.get("/api/v1/config/k", headers=headers)
assert res.status_code == 200
assert res.json() == {"key": "k", "value": "v"}
assert res.headers.get("access-control-allow-origin") == "http://example.com"
@pytest.mark.anyio
async def test_list_fingerprint(client):
def test_list_fingerprint(client):
cl, token = client
headers = {
"Authorization": f"Bearer {token}",
"Origin": "http://example.com",
}
res = await cl.get("/api/v1/fingerprint", headers=headers)
res = cl.get("/api/v1/fingerprint", headers=headers)
assert res.status_code == 200
assert res.json() == ["fp"]
assert res.headers.get("access-control-allow-origin") == "http://example.com"
@pytest.mark.anyio
async def test_get_nostr_pubkey(client):
def test_get_nostr_pubkey(client):
cl, token = client
headers = {
"Authorization": f"Bearer {token}",
"Origin": "http://example.com",
}
res = await cl.get("/api/v1/nostr/pubkey", headers=headers)
res = cl.get("/api/v1/nostr/pubkey", headers=headers)
assert res.status_code == 200
assert res.json() == {"npub": "np"}
assert res.headers.get("access-control-allow-origin") == "http://example.com"
@pytest.mark.anyio
async def test_create_modify_archive_entry(client):
def test_create_modify_archive_entry(client):
cl, token = client
headers = {"Authorization": f"Bearer {token}", "Origin": "http://example.com"}
res = await cl.post(
res = cl.post(
"/api/v1/entry",
json={"label": "test", "length": 12},
headers=headers,
@@ -143,7 +125,7 @@ async def test_create_modify_archive_entry(client):
assert res.status_code == 200
assert res.json() == {"id": 1}
res = await cl.put(
res = cl.put(
"/api/v1/entry/1",
json={"username": "bob"},
headers=headers,
@@ -151,26 +133,25 @@ async def test_create_modify_archive_entry(client):
assert res.status_code == 200
assert res.json() == {"status": "ok"}
res = await cl.post("/api/v1/entry/1/archive", headers=headers)
res = cl.post("/api/v1/entry/1/archive", headers=headers)
assert res.status_code == 200
assert res.json() == {"status": "archived"}
res = await cl.post("/api/v1/entry/1/unarchive", headers=headers)
res = cl.post("/api/v1/entry/1/unarchive", headers=headers)
assert res.status_code == 200
assert res.json() == {"status": "active"}
@pytest.mark.anyio
async def test_update_config(client):
def test_update_config(client):
cl, token = client
called = {}
def set_timeout(val):
called["val"] = val
api.app.state.pm.config_manager.set_inactivity_timeout = set_timeout
api._pm.config_manager.set_inactivity_timeout = set_timeout
headers = {"Authorization": f"Bearer {token}", "Origin": "http://example.com"}
res = await cl.put(
res = cl.put(
"/api/v1/config/inactivity_timeout",
json={"value": 42},
headers=headers,
@@ -181,15 +162,13 @@ async def test_update_config(client):
assert res.headers.get("access-control-allow-origin") == "http://example.com"
@pytest.mark.anyio
async def test_update_config_quick_unlock(client):
def test_update_config_quick_unlock(client):
cl, token = client
called = {}
api.app.state.pm.config_manager.set_quick_unlock = lambda v: called.setdefault(
"val", v
)
api._pm.config_manager.set_quick_unlock = lambda v: called.setdefault("val", v)
headers = {"Authorization": f"Bearer {token}", "Origin": "http://example.com"}
res = await cl.put(
res = cl.put(
"/api/v1/config/quick_unlock",
json={"value": True},
headers=headers,
@@ -199,13 +178,13 @@ async def test_update_config_quick_unlock(client):
assert called.get("val") is True
@pytest.mark.anyio
async def test_change_password_route(client):
def test_change_password_route(client):
cl, token = client
called = {}
api.app.state.pm.change_password = lambda o, n: called.setdefault("called", (o, n))
api._pm.change_password = lambda o, n: called.setdefault("called", (o, n))
headers = {"Authorization": f"Bearer {token}", "Origin": "http://example.com"}
res = await cl.post(
res = cl.post(
"/api/v1/change-password",
headers=headers,
json={"old": "old", "new": "new"},
@@ -216,11 +195,10 @@ async def test_change_password_route(client):
assert res.headers.get("access-control-allow-origin") == "http://example.com"
@pytest.mark.anyio
async def test_update_config_unknown_key(client):
def test_update_config_unknown_key(client):
cl, token = client
headers = {"Authorization": f"Bearer {token}", "Origin": "http://example.com"}
res = await cl.put(
res = cl.put(
"/api/v1/config/bogus",
json={"value": 1},
headers=headers,
@@ -228,8 +206,7 @@ async def test_update_config_unknown_key(client):
assert res.status_code == 400
@pytest.mark.anyio
async def test_shutdown(client, monkeypatch):
def test_shutdown(client, monkeypatch):
cl, token = client
calls = {}
@@ -245,7 +222,7 @@ async def test_shutdown(client, monkeypatch):
"Authorization": f"Bearer {token}",
"Origin": "http://example.com",
}
res = await cl.post("/api/v1/shutdown", headers=headers)
res = cl.post("/api/v1/shutdown", headers=headers)
assert res.status_code == 200
assert res.json() == {"status": "shutting down"}
assert calls["func"] is sys.exit
@@ -253,7 +230,6 @@ async def test_shutdown(client, monkeypatch):
assert res.headers.get("access-control-allow-origin") == "http://example.com"
@pytest.mark.anyio
@pytest.mark.parametrize(
"method,path",
[
@@ -271,11 +247,11 @@ async def test_shutdown(client, monkeypatch):
("post", "/api/v1/vault/lock"),
],
)
async def test_invalid_token_other_endpoints(client, method, path):
def test_invalid_token_other_endpoints(client, method, path):
cl, _token = client
req = getattr(cl, method)
kwargs = {"headers": {"Authorization": "Bearer bad"}}
if method in {"post", "put"}:
kwargs["json"] = {}
res = await req(path, **kwargs)
res = req(path, **kwargs)
assert res.status_code == 401

View File

@@ -1,7 +1,5 @@
from types import SimpleNamespace
from pathlib import Path
import os
import base64
import pytest
from seedpass import api
@@ -9,12 +7,10 @@ from test_api import client
from helpers import dummy_nostr_client
import string
from seedpass.core.password_generation import PasswordGenerator, PasswordPolicy
from seedpass.core.encryption import EncryptionManager
from nostr.client import NostrClient, DEFAULT_RELAYS
@pytest.mark.anyio
async def test_create_and_modify_totp_entry(client):
def test_create_and_modify_totp_entry(client):
cl, token = client
calls = {}
@@ -25,13 +21,13 @@ async def test_create_and_modify_totp_entry(client):
def modify(idx, **kwargs):
calls["modify"] = (idx, kwargs)
api.app.state.pm.entry_manager.add_totp = add_totp
api.app.state.pm.entry_manager.modify_entry = modify
api.app.state.pm.entry_manager.get_next_index = lambda: 5
api.app.state.pm.parent_seed = "seed"
api._pm.entry_manager.add_totp = add_totp
api._pm.entry_manager.modify_entry = modify
api._pm.entry_manager.get_next_index = lambda: 5
api._pm.parent_seed = "seed"
headers = {"Authorization": f"Bearer {token}"}
res = await cl.post(
res = cl.post(
"/api/v1/entry",
json={
"type": "totp",
@@ -53,10 +49,9 @@ async def test_create_and_modify_totp_entry(client):
"digits": 8,
"notes": "n",
"archived": False,
"deterministic": False,
}
res = await cl.put(
res = cl.put(
"/api/v1/entry/5",
json={"period": 90, "digits": 6},
headers=headers,
@@ -67,8 +62,7 @@ async def test_create_and_modify_totp_entry(client):
assert calls["modify"][1]["digits"] == 6
@pytest.mark.anyio
async def test_create_and_modify_ssh_entry(client):
def test_create_and_modify_ssh_entry(client):
cl, token = client
calls = {}
@@ -79,12 +73,12 @@ async def test_create_and_modify_ssh_entry(client):
def modify(idx, **kwargs):
calls["modify"] = (idx, kwargs)
api.app.state.pm.entry_manager.add_ssh_key = add_ssh
api.app.state.pm.entry_manager.modify_entry = modify
api.app.state.pm.parent_seed = "seed"
api._pm.entry_manager.add_ssh_key = add_ssh
api._pm.entry_manager.modify_entry = modify
api._pm.parent_seed = "seed"
headers = {"Authorization": f"Bearer {token}"}
res = await cl.post(
res = cl.post(
"/api/v1/entry",
json={"type": "ssh", "label": "S", "index": 2, "notes": "n"},
headers=headers,
@@ -93,7 +87,7 @@ async def test_create_and_modify_ssh_entry(client):
assert res.json() == {"id": 2}
assert calls["create"] == {"index": 2, "notes": "n", "archived": False}
res = await cl.put(
res = cl.put(
"/api/v1/entry/2",
json={"notes": "x"},
headers=headers,
@@ -103,31 +97,29 @@ async def test_create_and_modify_ssh_entry(client):
assert calls["modify"][1]["notes"] == "x"
@pytest.mark.anyio
async def test_update_entry_error(client):
def test_update_entry_error(client):
cl, token = client
def modify(*a, **k):
raise ValueError("nope")
api.app.state.pm.entry_manager.modify_entry = modify
api._pm.entry_manager.modify_entry = modify
headers = {"Authorization": f"Bearer {token}"}
res = await cl.put("/api/v1/entry/1", json={"username": "x"}, headers=headers)
res = cl.put("/api/v1/entry/1", json={"username": "x"}, headers=headers)
assert res.status_code == 400
assert res.json() == {"detail": "nope"}
@pytest.mark.anyio
async def test_update_config_secret_mode(client):
def test_update_config_secret_mode(client):
cl, token = client
called = {}
def set_secret(val):
called["val"] = val
api.app.state.pm.config_manager.set_secret_mode_enabled = set_secret
api._pm.config_manager.set_secret_mode_enabled = set_secret
headers = {"Authorization": f"Bearer {token}"}
res = await cl.put(
res = cl.put(
"/api/v1/config/secret_mode_enabled",
json={"value": True},
headers=headers,
@@ -137,28 +129,24 @@ async def test_update_config_secret_mode(client):
assert called["val"] is True
@pytest.mark.anyio
async def test_totp_export_endpoint(client):
def test_totp_export_endpoint(client):
cl, token = client
api.app.state.pm.entry_manager.export_totp_entries = lambda seed: {"entries": ["x"]}
api.app.state.pm.parent_seed = "seed"
headers = {"Authorization": f"Bearer {token}", "X-SeedPass-Password": "pw"}
res = await cl.get("/api/v1/totp/export", headers=headers)
api._pm.entry_manager.export_totp_entries = lambda seed: {"entries": ["x"]}
api._pm.parent_seed = "seed"
headers = {"Authorization": f"Bearer {token}"}
res = cl.get("/api/v1/totp/export", headers=headers)
assert res.status_code == 200
assert res.json() == {"entries": ["x"]}
@pytest.mark.anyio
async def test_totp_codes_endpoint(client):
def test_totp_codes_endpoint(client):
cl, token = client
api.app.state.pm.entry_manager.list_entries = lambda **kw: [
(0, "Email", None, None, False)
]
api.app.state.pm.entry_manager.get_totp_code = lambda i, s: "123456"
api.app.state.pm.entry_manager.get_totp_time_remaining = lambda i: 30
api.app.state.pm.parent_seed = "seed"
headers = {"Authorization": f"Bearer {token}", "X-SeedPass-Password": "pw"}
res = await cl.get("/api/v1/totp", headers=headers)
api._pm.entry_manager.list_entries = lambda **kw: [(0, "Email", None, None, False)]
api._pm.entry_manager.get_totp_code = lambda i, s: "123456"
api._pm.entry_manager.get_totp_time_remaining = lambda i: 30
api._pm.parent_seed = "seed"
headers = {"Authorization": f"Bearer {token}"}
res = cl.get("/api/v1/totp", headers=headers)
assert res.status_code == 200
assert res.json() == {
"codes": [
@@ -167,39 +155,49 @@ async def test_totp_codes_endpoint(client):
}
@pytest.mark.anyio
async def test_parent_seed_endpoint_removed(client):
def test_parent_seed_endpoint(client, tmp_path):
cl, token = client
res = await cl.get(
"/api/v1/parent-seed", headers={"Authorization": f"Bearer {token}"}
api._pm.parent_seed = "seed"
called = {}
api._pm.encryption_manager = SimpleNamespace(
encrypt_and_save_file=lambda data, path: called.setdefault("path", path)
)
assert res.status_code == 404
headers = {"Authorization": f"Bearer {token}"}
res = cl.get("/api/v1/parent-seed", headers=headers)
assert res.status_code == 200
assert res.json() == {"seed": "seed"}
out = tmp_path / "bk.enc"
res = cl.get("/api/v1/parent-seed", params={"file": str(out)}, headers=headers)
assert res.status_code == 200
assert res.json() == {"status": "saved", "path": str(out)}
assert called["path"] == out
@pytest.mark.anyio
async def test_fingerprint_endpoints(client):
def test_fingerprint_endpoints(client):
cl, token = client
calls = {}
api.app.state.pm.add_new_fingerprint = lambda: calls.setdefault("add", True)
api.app.state.pm.fingerprint_manager.remove_fingerprint = (
lambda fp: calls.setdefault("remove", fp)
api._pm.add_new_fingerprint = lambda: calls.setdefault("add", True)
api._pm.fingerprint_manager.remove_fingerprint = lambda fp: calls.setdefault(
"remove", fp
)
api.app.state.pm.select_fingerprint = lambda fp: calls.setdefault("select", fp)
api._pm.select_fingerprint = lambda fp: calls.setdefault("select", fp)
headers = {"Authorization": f"Bearer {token}"}
res = await cl.post("/api/v1/fingerprint", headers=headers)
res = cl.post("/api/v1/fingerprint", headers=headers)
assert res.status_code == 200
assert res.json() == {"status": "ok"}
assert calls.get("add") is True
res = await cl.delete("/api/v1/fingerprint/abc", headers=headers)
res = cl.delete("/api/v1/fingerprint/abc", headers=headers)
assert res.status_code == 200
assert res.json() == {"status": "deleted"}
assert calls.get("remove") == "abc"
res = await cl.post(
res = cl.post(
"/api/v1/fingerprint/select",
json={"fingerprint": "xyz"},
headers=headers,
@@ -209,47 +207,40 @@ async def test_fingerprint_endpoints(client):
assert calls.get("select") == "xyz"
@pytest.mark.anyio
async def test_checksum_endpoints(client):
def test_checksum_endpoints(client):
cl, token = client
calls = {}
api.app.state.pm.handle_verify_checksum = lambda: calls.setdefault("verify", True)
api.app.state.pm.handle_update_script_checksum = lambda: calls.setdefault(
"update", True
)
api._pm.handle_verify_checksum = lambda: calls.setdefault("verify", True)
api._pm.handle_update_script_checksum = lambda: calls.setdefault("update", True)
headers = {"Authorization": f"Bearer {token}"}
res = await cl.post("/api/v1/checksum/verify", headers=headers)
res = cl.post("/api/v1/checksum/verify", headers=headers)
assert res.status_code == 200
assert res.json() == {"status": "ok"}
assert calls.get("verify") is True
res = await cl.post("/api/v1/checksum/update", headers=headers)
res = cl.post("/api/v1/checksum/update", headers=headers)
assert res.status_code == 200
assert res.json() == {"status": "ok"}
assert calls.get("update") is True
@pytest.mark.anyio
async def test_vault_import_via_path(client, tmp_path):
def test_vault_import_via_path(client, tmp_path):
cl, token = client
called = {}
def import_db(path):
called["path"] = path
api.app.state.pm.handle_import_database = import_db
api.app.state.pm.sync_vault = lambda: called.setdefault("sync", True)
api.app.state.pm.encryption_manager = SimpleNamespace(
resolve_relative_path=lambda p: p
)
file_path = tmp_path / "b.json.enc"
api._pm.handle_import_database = import_db
api._pm.sync_vault = lambda: called.setdefault("sync", True)
file_path = tmp_path / "b.json"
file_path.write_text("{}")
headers = {"Authorization": f"Bearer {token}"}
res = await cl.post(
res = cl.post(
"/api/v1/vault/import",
json={"path": str(file_path)},
headers=headers,
@@ -260,22 +251,21 @@ async def test_vault_import_via_path(client, tmp_path):
assert called.get("sync") is True
@pytest.mark.anyio
async def test_vault_import_via_upload(client, tmp_path):
def test_vault_import_via_upload(client, tmp_path):
cl, token = client
called = {}
def import_db(path):
called["path"] = path
api.app.state.pm.handle_import_database = import_db
api.app.state.pm.sync_vault = lambda: called.setdefault("sync", True)
api._pm.handle_import_database = import_db
api._pm.sync_vault = lambda: called.setdefault("sync", True)
file_path = tmp_path / "c.json"
file_path.write_text("{}")
headers = {"Authorization": f"Bearer {token}"}
with open(file_path, "rb") as fh:
res = await cl.post(
res = cl.post(
"/api/v1/vault/import",
files={"file": ("c.json", fh.read())},
headers=headers,
@@ -286,68 +276,29 @@ async def test_vault_import_via_upload(client, tmp_path):
assert called.get("sync") is True
@pytest.mark.anyio
async def test_vault_import_invalid_extension(client):
cl, token = client
api.app.state.pm.handle_import_database = lambda path: None
api.app.state.pm.sync_vault = lambda: None
api.app.state.pm.encryption_manager = SimpleNamespace(
resolve_relative_path=lambda p: p
)
headers = {"Authorization": f"Bearer {token}"}
res = await cl.post(
"/api/v1/vault/import",
json={"path": "bad.txt"},
headers=headers,
)
assert res.status_code == 400
@pytest.mark.anyio
async def test_vault_import_path_traversal_blocked(client, tmp_path):
cl, token = client
key = base64.urlsafe_b64encode(os.urandom(32))
api.app.state.pm.encryption_manager = EncryptionManager(key, tmp_path)
api.app.state.pm.handle_import_database = lambda path: None
api.app.state.pm.sync_vault = lambda: None
headers = {"Authorization": f"Bearer {token}"}
res = await cl.post(
"/api/v1/vault/import",
json={"path": "../evil.json.enc"},
headers=headers,
)
assert res.status_code == 400
@pytest.mark.anyio
async def test_vault_lock_endpoint(client):
def test_vault_lock_endpoint(client):
cl, token = client
called = {}
def lock():
called["locked"] = True
api.app.state.pm.locked = True
api._pm.locked = True
api.app.state.pm.lock_vault = lock
api.app.state.pm.locked = False
api._pm.lock_vault = lock
api._pm.locked = False
headers = {"Authorization": f"Bearer {token}"}
res = await cl.post("/api/v1/vault/lock", headers=headers)
res = cl.post("/api/v1/vault/lock", headers=headers)
assert res.status_code == 200
assert res.json() == {"status": "locked"}
assert called.get("locked") is True
assert api.app.state.pm.locked is True
api.app.state.pm.unlock_vault = lambda pw: setattr(
api.app.state.pm, "locked", False
)
api.app.state.pm.unlock_vault("pw")
assert api.app.state.pm.locked is False
assert api._pm.locked is True
api._pm.unlock_vault = lambda pw: setattr(api._pm, "locked", False)
api._pm.unlock_vault("pw")
assert api._pm.locked is False
@pytest.mark.anyio
async def test_secret_mode_endpoint(client):
def test_secret_mode_endpoint(client):
cl, token = client
called = {}
@@ -357,11 +308,11 @@ async def test_secret_mode_endpoint(client):
def set_delay(val):
called.setdefault("delay", val)
api.app.state.pm.config_manager.set_secret_mode_enabled = set_secret
api.app.state.pm.config_manager.set_clipboard_clear_delay = set_delay
api._pm.config_manager.set_secret_mode_enabled = set_secret
api._pm.config_manager.set_clipboard_clear_delay = set_delay
headers = {"Authorization": f"Bearer {token}"}
res = await cl.post(
res = cl.post(
"/api/v1/secret-mode",
json={"enabled": True, "delay": 12},
headers=headers,
@@ -372,79 +323,40 @@ async def test_secret_mode_endpoint(client):
assert called["delay"] == 12
@pytest.mark.anyio
async def test_vault_export_endpoint(client, tmp_path):
def test_vault_export_endpoint(client, tmp_path):
cl, token = client
out = tmp_path / "out.json"
out.write_text("data")
api.app.state.pm.handle_export_database = lambda *a, **k: out
api._pm.handle_export_database = lambda: out
headers = {
"Authorization": f"Bearer {token}",
"X-SeedPass-Password": "pw",
}
res = await cl.post("/api/v1/vault/export", headers=headers)
headers = {"Authorization": f"Bearer {token}"}
res = cl.post("/api/v1/vault/export", headers=headers)
assert res.status_code == 200
assert res.content == b"data"
res = await cl.post(
"/api/v1/vault/export", headers={"Authorization": f"Bearer {token}"}
)
assert res.status_code == 401
@pytest.mark.anyio
async def test_backup_parent_seed_endpoint(client, tmp_path):
def test_backup_parent_seed_endpoint(client, tmp_path):
cl, token = client
api.app.state.pm.parent_seed = "seed"
called = {}
api.app.state.pm.encryption_manager = SimpleNamespace(
encrypt_and_save_file=lambda data, path: called.setdefault("path", path),
resolve_relative_path=lambda p: p,
)
path = Path("seed.enc")
headers = {
"Authorization": f"Bearer {token}",
"X-SeedPass-Password": "pw",
}
res = await cl.post(
"/api/v1/vault/backup-parent-seed",
json={"path": str(path), "confirm": True},
headers=headers,
)
assert res.status_code == 200
assert res.json() == {"status": "saved", "path": str(path)}
assert called["path"] == path
res = await cl.post(
def backup(path=None):
called["path"] = path
api._pm.handle_backup_reveal_parent_seed = backup
path = tmp_path / "seed.enc"
headers = {"Authorization": f"Bearer {token}"}
res = cl.post(
"/api/v1/vault/backup-parent-seed",
json={"path": str(path)},
headers=headers,
)
assert res.status_code == 400
assert res.status_code == 200
assert res.json() == {"status": "ok"}
assert called["path"] == path
@pytest.mark.anyio
async def test_backup_parent_seed_path_traversal_blocked(client, tmp_path):
cl, token = client
api.app.state.pm.parent_seed = "seed"
key = base64.urlsafe_b64encode(os.urandom(32))
api.app.state.pm.encryption_manager = EncryptionManager(key, tmp_path)
headers = {
"Authorization": f"Bearer {token}",
"X-SeedPass-Password": "pw",
}
res = await cl.post(
"/api/v1/vault/backup-parent-seed",
json={"path": "../evil.enc", "confirm": True},
headers=headers,
)
assert res.status_code == 400
@pytest.mark.anyio
async def test_relay_management_endpoints(client, dummy_nostr_client, monkeypatch):
def test_relay_management_endpoints(client, dummy_nostr_client, monkeypatch):
cl, token = client
nostr_client, _ = dummy_nostr_client
relays = ["wss://a", "wss://b"]
@@ -457,8 +369,8 @@ async def test_relay_management_endpoints(client, dummy_nostr_client, monkeypatc
def set_relays(new, require_pin=False):
called["set"] = new
api.app.state.pm.config_manager.load_config = load_config
api.app.state.pm.config_manager.set_relays = set_relays
api._pm.config_manager.load_config = load_config
api._pm.config_manager.set_relays = set_relays
monkeypatch.setattr(
NostrClient,
"initialize_client_pool",
@@ -467,34 +379,33 @@ async def test_relay_management_endpoints(client, dummy_nostr_client, monkeypatc
monkeypatch.setattr(
nostr_client, "close_client_pool", lambda: called.setdefault("close", True)
)
api.app.state.pm.nostr_client = nostr_client
api.app.state.pm.nostr_client.relays = relays.copy()
api._pm.nostr_client = nostr_client
api._pm.nostr_client.relays = relays.copy()
headers = {"Authorization": f"Bearer {token}"}
res = await cl.get("/api/v1/relays", headers=headers)
res = cl.get("/api/v1/relays", headers=headers)
assert res.status_code == 200
assert res.json() == {"relays": relays}
res = await cl.post("/api/v1/relays", json={"url": "wss://c"}, headers=headers)
res = cl.post("/api/v1/relays", json={"url": "wss://c"}, headers=headers)
assert res.status_code == 200
assert called["set"] == ["wss://a", "wss://b", "wss://c"]
api.app.state.pm.config_manager.load_config = lambda require_pin=False: {
api._pm.config_manager.load_config = lambda require_pin=False: {
"relays": ["wss://a", "wss://b", "wss://c"]
}
res = await cl.delete("/api/v1/relays/2", headers=headers)
res = cl.delete("/api/v1/relays/2", headers=headers)
assert res.status_code == 200
assert called["set"] == ["wss://a", "wss://c"]
res = await cl.post("/api/v1/relays/reset", headers=headers)
res = cl.post("/api/v1/relays/reset", headers=headers)
assert res.status_code == 200
assert called.get("init") is True
assert api.app.state.pm.nostr_client.relays == list(DEFAULT_RELAYS)
assert api._pm.nostr_client.relays == list(DEFAULT_RELAYS)
@pytest.mark.anyio
async def test_generate_password_no_special_chars(client):
def test_generate_password_no_special_chars(client):
cl, token = client
class DummyEnc:
@@ -502,18 +413,14 @@ async def test_generate_password_no_special_chars(client):
return b"\x00" * 32
class DummyBIP85:
def derive_entropy(
self, index: int, entropy_bytes: int, app_no: int = 32
) -> bytes:
return bytes(range(entropy_bytes))
def derive_entropy(self, index: int, bytes_len: int, app_no: int = 32) -> bytes:
return bytes(range(bytes_len))
api.app.state.pm.password_generator = PasswordGenerator(
DummyEnc(), "seed", DummyBIP85()
)
api.app.state.pm.parent_seed = "seed"
api._pm.password_generator = PasswordGenerator(DummyEnc(), "seed", DummyBIP85())
api._pm.parent_seed = "seed"
headers = {"Authorization": f"Bearer {token}"}
res = await cl.post(
res = cl.post(
"/api/v1/password",
json={"length": 16, "include_special_chars": False},
headers=headers,
@@ -523,8 +430,7 @@ async def test_generate_password_no_special_chars(client):
assert not any(c in string.punctuation for c in pw)
@pytest.mark.anyio
async def test_generate_password_allowed_chars(client):
def test_generate_password_allowed_chars(client):
cl, token = client
class DummyEnc:
@@ -532,19 +438,15 @@ async def test_generate_password_allowed_chars(client):
return b"\x00" * 32
class DummyBIP85:
def derive_entropy(
self, index: int, entropy_bytes: int, app_no: int = 32
) -> bytes:
return bytes((index + i) % 256 for i in range(entropy_bytes))
def derive_entropy(self, index: int, bytes_len: int, app_no: int = 32) -> bytes:
return bytes((index + i) % 256 for i in range(bytes_len))
api.app.state.pm.password_generator = PasswordGenerator(
DummyEnc(), "seed", DummyBIP85()
)
api.app.state.pm.parent_seed = "seed"
api._pm.password_generator = PasswordGenerator(DummyEnc(), "seed", DummyBIP85())
api._pm.parent_seed = "seed"
headers = {"Authorization": f"Bearer {token}"}
allowed = "@$"
res = await cl.post(
res = cl.post(
"/api/v1/password",
json={"length": 16, "allowed_special_chars": allowed},
headers=headers,

View File

@@ -1,59 +1,45 @@
from test_api import client
from types import SimpleNamespace
import queue
import pytest
import seedpass.api as api
@pytest.mark.anyio
async def test_notifications_endpoint(client):
def test_notifications_endpoint(client):
cl, token = client
api.app.state.pm.notifications = queue.Queue()
api.app.state.pm.notifications.put(SimpleNamespace(message="m1", level="INFO"))
api.app.state.pm.notifications.put(SimpleNamespace(message="m2", level="WARNING"))
res = await cl.get(
"/api/v1/notifications", headers={"Authorization": f"Bearer {token}"}
)
api._pm.notifications = queue.Queue()
api._pm.notifications.put(SimpleNamespace(message="m1", level="INFO"))
api._pm.notifications.put(SimpleNamespace(message="m2", level="WARNING"))
res = cl.get("/api/v1/notifications", headers={"Authorization": f"Bearer {token}"})
assert res.status_code == 200
assert res.json() == [
{"level": "INFO", "message": "m1"},
{"level": "WARNING", "message": "m2"},
]
assert api.app.state.pm.notifications.empty()
assert api._pm.notifications.empty()
@pytest.mark.anyio
async def test_notifications_endpoint_clears_queue(client):
def test_notifications_endpoint_clears_queue(client):
cl, token = client
api.app.state.pm.notifications = queue.Queue()
api.app.state.pm.notifications.put(SimpleNamespace(message="hi", level="INFO"))
res = await cl.get(
"/api/v1/notifications", headers={"Authorization": f"Bearer {token}"}
)
api._pm.notifications = queue.Queue()
api._pm.notifications.put(SimpleNamespace(message="hi", level="INFO"))
res = cl.get("/api/v1/notifications", headers={"Authorization": f"Bearer {token}"})
assert res.status_code == 200
assert res.json() == [{"level": "INFO", "message": "hi"}]
assert api.app.state.pm.notifications.empty()
res = await cl.get(
"/api/v1/notifications", headers={"Authorization": f"Bearer {token}"}
)
assert api._pm.notifications.empty()
res = cl.get("/api/v1/notifications", headers={"Authorization": f"Bearer {token}"})
assert res.json() == []
@pytest.mark.anyio
async def test_notifications_endpoint_does_not_clear_current(client):
def test_notifications_endpoint_does_not_clear_current(client):
cl, token = client
api.app.state.pm.notifications = queue.Queue()
api._pm.notifications = queue.Queue()
msg = SimpleNamespace(message="keep", level="INFO")
api.app.state.pm.notifications.put(msg)
api.app.state.pm._current_notification = msg
api.app.state.pm.get_current_notification = (
lambda: api.app.state.pm._current_notification
)
api._pm.notifications.put(msg)
api._pm._current_notification = msg
api._pm.get_current_notification = lambda: api._pm._current_notification
res = await cl.get(
"/api/v1/notifications", headers={"Authorization": f"Bearer {token}"}
)
res = cl.get("/api/v1/notifications", headers={"Authorization": f"Bearer {token}"})
assert res.status_code == 200
assert res.json() == [{"level": "INFO", "message": "keep"}]
assert api.app.state.pm.notifications.empty()
assert api.app.state.pm.get_current_notification() is msg
assert api._pm.notifications.empty()
assert api._pm.get_current_notification() is msg

View File

@@ -1,14 +1,13 @@
from test_api import client
import pytest
@pytest.mark.anyio
async def test_profile_stats_endpoint(client):
def test_profile_stats_endpoint(client):
cl, token = client
stats = {"total_entries": 1}
# monkeypatch set _pm.get_profile_stats after client fixture started
import seedpass.api as api
api.app.state.pm.get_profile_stats = lambda: stats
res = await cl.get("/api/v1/stats", headers={"Authorization": f"Bearer {token}"})
api._pm.get_profile_stats = lambda: stats
res = cl.get("/api/v1/stats", headers={"Authorization": f"Bearer {token}"})
assert res.status_code == 200
assert res.json() == stats

View File

@@ -1,47 +0,0 @@
import importlib
from pathlib import Path
from types import SimpleNamespace
import importlib
import pytest
from httpx import ASGITransport, AsyncClient
import sys
sys.path.append(str(Path(__file__).resolve().parents[1]))
@pytest.mark.anyio
async def test_rate_limit_exceeded(monkeypatch):
monkeypatch.setenv("SEEDPASS_RATE_LIMIT", "2")
monkeypatch.setenv("SEEDPASS_RATE_WINDOW", "60")
import seedpass.api as api
importlib.reload(api)
dummy = SimpleNamespace(
entry_manager=SimpleNamespace(
search_entries=lambda q: [
(1, "Site", "user", "url", False, SimpleNamespace(value="password"))
]
),
config_manager=SimpleNamespace(load_config=lambda require_pin=False: {}),
fingerprint_manager=SimpleNamespace(list_fingerprints=lambda: []),
nostr_client=SimpleNamespace(
key_manager=SimpleNamespace(get_npub=lambda: "np")
),
verify_password=lambda pw: True,
)
monkeypatch.setattr(api, "PasswordManager", lambda: dummy)
token = api.start_server()
transport = ASGITransport(app=api.app)
async with AsyncClient(transport=transport, base_url="http://test") as client:
headers = {"Authorization": f"Bearer {token}"}
for _ in range(2):
res = await client.get(
"/api/v1/entry", params={"query": "s"}, headers=headers
)
assert res.status_code == 200
res = await client.get("/api/v1/entry", params={"query": "s"}, headers=headers)
assert res.status_code == 429

View File

@@ -1,29 +0,0 @@
import logging
from types import SimpleNamespace
from seedpass import api
def test_reload_relays_logs_errors(caplog):
def close():
raise RuntimeError("close fail")
def init():
raise OSError("init fail")
pm = SimpleNamespace(
nostr_client=SimpleNamespace(
close_client_pool=close,
initialize_client_pool=init,
relays=[],
)
)
request = SimpleNamespace(app=SimpleNamespace(state=SimpleNamespace(pm=pm)))
with caplog.at_level(logging.WARNING):
api._reload_relays(request, ["ws://relay"])
assert "Failed to close NostrClient pool" in caplog.text
assert "close fail" in caplog.text
assert "Failed to initialize NostrClient with relays" in caplog.text
assert "init fail" in caplog.text

View File

@@ -1,30 +0,0 @@
import json
from multiprocessing import Process
from pathlib import Path
from utils.atomic_write import atomic_write
def _writer(path: Path, content: dict, loops: int) -> None:
for _ in range(loops):
atomic_write(path, lambda f: json.dump(content, f), mode="w")
def test_atomic_write_concurrent(tmp_path: Path) -> None:
"""Concurrent writers should not leave partial files."""
file_path = tmp_path / "data.json"
contents = [{"proc": i} for i in range(5)]
procs = [
Process(target=_writer, args=(file_path, content, 50)) for content in contents
]
for p in procs:
p.start()
for p in procs:
p.join()
final_text = file_path.read_text()
final_obj = json.loads(final_text)
assert final_obj in contents

View File

@@ -1,85 +0,0 @@
import json
import hashlib
import hmac
import queue
from pathlib import Path
from types import SimpleNamespace
import importlib
import pytest
from seedpass.core.manager import PasswordManager, AuditLogger
import seedpass.core.manager as manager_module
def test_audit_logger_records_events(monkeypatch, tmp_path):
monkeypatch.setattr(Path, "home", lambda: tmp_path)
pm = PasswordManager.__new__(PasswordManager)
pm.fingerprint_dir = tmp_path
pm.current_fingerprint = "user123"
pm.profile_stack = []
pm.setup_encryption_manager = lambda *a, **k: None
pm.initialize_bip85 = lambda: None
pm.initialize_managers = lambda: None
pm.update_activity = lambda: None
pm.verify_password = lambda pw: True
pm.notifications = queue.Queue()
pm.parent_seed = "seed phrase"
pm.config_manager = SimpleNamespace(get_quick_unlock=lambda: True)
manager_module.clear_header_with_notification = lambda *a, **k: None
pm.unlock_vault(password="pw")
dest = tmp_path / "db.json.enc"
monkeypatch.setattr(manager_module, "export_backup", lambda *a, **k: dest)
pm.vault = object()
pm.backup_manager = object()
monkeypatch.setattr("seedpass.core.manager.confirm_action", lambda *_a, **_k: True)
pm.handle_export_database(dest)
confirms = iter([True, False])
monkeypatch.setattr(
"seedpass.core.manager.confirm_action", lambda *_a, **_k: next(confirms)
)
pm.encryption_manager = SimpleNamespace(encrypt_and_save_file=lambda *a, **k: None)
pm.handle_backup_reveal_parent_seed(password="pw")
log_path = tmp_path / ".seedpass" / "audit.log"
lines = [json.loads(l) for l in log_path.read_text().splitlines()]
events = [e["event"] for e in lines]
assert "quick_unlock" in events
assert "backup_export" in events
assert "seed_reveal" in events
def _verify_chain(path: Path, key: bytes) -> bool:
prev = "0" * 64
for line in path.read_text().splitlines():
data = json.loads(line)
sig = data.pop("sig")
payload = json.dumps(data, sort_keys=True, separators=(",", ":"))
expected = hmac.new(
key, f"{prev}{payload}".encode(), hashlib.sha256
).hexdigest()
if sig != expected:
return False
prev = sig
return True
def test_audit_log_tamper_evident(monkeypatch, tmp_path):
monkeypatch.setattr(Path, "home", lambda: tmp_path)
key = hashlib.sha256(b"seed").digest()
logger = AuditLogger(key)
logger.log("one", {})
logger.log("two", {})
log_path = tmp_path / ".seedpass" / "audit.log"
assert _verify_chain(log_path, key)
lines = log_path.read_text().splitlines()
tampered = json.loads(lines[0])
tampered["event"] = "evil"
lines[0] = json.dumps(tampered)
log_path.write_text("\n".join(lines) + "\n")
assert not _verify_chain(log_path, key)

View File

@@ -15,7 +15,6 @@ def test_auto_sync_triggers_post(monkeypatch):
is_dirty=True,
last_update=time.time() - 0.2,
last_activity=time.time(),
current_fingerprint="fp",
nostr_client=SimpleNamespace(close_client_pool=lambda: None),
handle_add_password=lambda: None,
handle_retrieve_entry=lambda: None,

View File

@@ -1,56 +0,0 @@
import logging
import queue
import seedpass.core.manager as manager_module
def _make_pm():
pm = manager_module.PasswordManager.__new__(manager_module.PasswordManager)
pm.offline_mode = False
pm.notifications = queue.Queue()
pm.error_queue = queue.Queue()
pm.notify = lambda msg, level="INFO": pm.notifications.put(
manager_module.Notification(msg, level)
)
pm.nostr_client = object()
return pm
def test_start_background_sync_error(monkeypatch, caplog):
pm = _make_pm()
async def failing_sync(*_args, **_kwargs):
raise RuntimeError("boom")
monkeypatch.setattr(pm, "attempt_initial_sync_async", failing_sync)
monkeypatch.setattr(pm, "sync_index_from_nostr_async", failing_sync)
pm.start_background_sync()
pm._sync_task.join(timeout=1)
with caplog.at_level(logging.WARNING):
pm.poll_background_errors()
note = pm.notifications.get_nowait()
assert "boom" in note.message
assert "boom" in caplog.text
def test_start_background_relay_check_error(monkeypatch, caplog):
pm = _make_pm()
class DummyClient:
def check_relay_health(self, *_args, **_kwargs):
raise RuntimeError("relay boom")
pm.nostr_client = DummyClient()
pm.start_background_relay_check()
pm._relay_thread.join(timeout=1)
with caplog.at_level(logging.WARNING):
pm.poll_background_errors()
note = pm.notifications.get_nowait()
assert "relay boom" in note.message
assert "relay boom" in caplog.text

View File

@@ -20,7 +20,6 @@ def test_switch_fingerprint_triggers_bg_sync(monkeypatch, tmp_path):
pm.current_fingerprint = None
pm.encryption_manager = object()
pm.config_manager = SimpleNamespace(get_quick_unlock=lambda: False)
pm.nostr_account_idx = 0
monkeypatch.setattr("builtins.input", lambda *_a, **_k: "1")
monkeypatch.setattr(

View File

@@ -1,56 +0,0 @@
import main
from pathlib import Path
def test_cli_flag_restores_before_init(monkeypatch, tmp_path):
calls = []
backup = tmp_path / "bak.json"
backup.write_text("{}")
def fake_restore(path, fingerprint):
calls.append(("restore", Path(path), fingerprint))
class DummyPM:
def __init__(self, fingerprint=None):
calls.append(("init", fingerprint))
self.secret_mode_enabled = True
self.inactivity_timeout = 0
monkeypatch.setattr(main, "restore_backup_index", fake_restore)
monkeypatch.setattr(main, "PasswordManager", DummyPM)
monkeypatch.setattr(main, "display_menu", lambda pm, **k: None)
rc = main.main(["--fingerprint", "fp", "--restore-backup", str(backup)])
assert rc == 0
assert calls[0][0] == "restore"
assert calls[1][0] == "init"
assert calls[0][1] == backup
assert calls[0][2] == "fp"
def test_menu_option_restores_before_init(monkeypatch, tmp_path):
calls = []
backup = tmp_path / "bak.json"
backup.write_text("{}")
def fake_restore(path, fingerprint):
calls.append(("restore", Path(path), fingerprint))
class DummyPM:
def __init__(self, fingerprint=None):
calls.append(("init", fingerprint))
self.secret_mode_enabled = True
self.inactivity_timeout = 0
monkeypatch.setattr(main, "restore_backup_index", fake_restore)
monkeypatch.setattr(main, "PasswordManager", DummyPM)
monkeypatch.setattr(main, "display_menu", lambda pm, **k: None)
inputs = iter(["2", str(backup)])
monkeypatch.setattr("builtins.input", lambda _prompt="": next(inputs))
rc = main.main(["--fingerprint", "fp"])
assert rc == 0
assert calls[0][0] == "restore"
assert calls[1][0] == "init"
assert calls[0][1] == backup
assert calls[0][2] == "fp"

View File

@@ -1,52 +0,0 @@
from local_bip85.bip85 import BIP85
class DummyChild:
def PrivateKey(self):
return self
def Raw(self):
return self
def ToBytes(self):
return b"\x00" * 32
class DummyCtx:
def __init__(self):
self.last_path = None
def DerivePath(self, path: str):
self.last_path = path
return DummyChild()
def test_derivation_paths_for_entropy_lengths():
bip85 = BIP85(b"\x00" * 64)
ctx = DummyCtx()
bip85.bip32_ctx = ctx
vectors = [
(16, 12),
(24, 18),
(32, 24),
]
for entropy_bytes, word_count in vectors:
bip85.derive_entropy(
index=0,
entropy_bytes=entropy_bytes,
app_no=39,
word_count=word_count,
)
assert ctx.last_path == f"m/83696968'/39'/0'/{word_count}'/0'"
def test_default_word_count_from_entropy_bytes():
bip85 = BIP85(b"\x00" * 64)
ctx = DummyCtx()
bip85.bip32_ctx = ctx
bip85.derive_entropy(index=5, entropy_bytes=20, app_no=39)
assert ctx.last_path == "m/83696968'/39'/0'/20'/5'"

View File

@@ -1,21 +0,0 @@
import sys
from pathlib import Path
sys.path.append(str(Path(__file__).resolve().parents[1]))
from bip_utils import Bip39SeedGenerator
from local_bip85.bip85 import BIP85
from helpers import TEST_SEED
MASTER_XPRV = "xprv9s21ZrQH143K2LBWUUQRFXhucrQqBpKdRRxNVq2zBqsx8HVqFk2uYo8kmbaLLHRdqtQpUm98uKfu3vca1LqdGhUtyoFnCNkfmXRyPXLjbKb"
def test_init_with_seed_bytes():
seed_bytes = Bip39SeedGenerator(TEST_SEED).Generate()
bip85 = BIP85(seed_bytes)
assert isinstance(bip85, BIP85)
def test_init_with_xprv():
bip85 = BIP85(MASTER_XPRV)
assert isinstance(bip85, BIP85)

View File

@@ -1,44 +0,0 @@
from typer.testing import CliRunner
from seedpass.cli import app, entry as cli_entry
from seedpass.core.entry_types import EntryType
from utils.clipboard import ClipboardUnavailableError
runner = CliRunner()
def _stub_service(ctx, raise_error=True):
class Service:
def search_entries(self, query, kinds=None):
return [(1, "label", None, None, False, EntryType.PASSWORD)]
def retrieve_entry(self, idx):
return {"type": EntryType.PASSWORD.value, "length": 12}
def generate_password(self, length, index):
if raise_error and not ctx.obj.get("no_clipboard"):
raise ClipboardUnavailableError("missing")
return "pwd"
return Service()
def test_entry_get_handles_missing_clipboard(monkeypatch):
monkeypatch.setattr(
cli_entry, "_get_entry_service", lambda ctx: _stub_service(ctx, True)
)
result = runner.invoke(app, ["entry", "get", "label"], catch_exceptions=False)
assert result.exit_code == 1
assert "no-clipboard" in result.stderr.lower()
def test_entry_get_no_clipboard_flag(monkeypatch):
monkeypatch.setattr(
cli_entry, "_get_entry_service", lambda ctx: _stub_service(ctx, True)
)
result = runner.invoke(
app, ["--no-clipboard", "entry", "get", "label"], catch_exceptions=False
)
assert result.exit_code == 0
assert "pwd" in result.stdout

View File

@@ -3,7 +3,7 @@ from types import SimpleNamespace
from typer.testing import CliRunner
from seedpass.cli import app
from seedpass.cli import common as cli_common
from seedpass import cli
runner = CliRunner()
@@ -39,7 +39,7 @@ def test_config_set_variants(monkeypatch, key, value, method, expected):
config_manager=SimpleNamespace(**{method: func}),
select_fingerprint=lambda fp: None,
)
monkeypatch.setattr(cli_common, "PasswordManager", lambda: pm)
monkeypatch.setattr(cli, "PasswordManager", lambda: pm)
result = runner.invoke(app, ["config", "set", key, value])

View File

@@ -5,7 +5,6 @@ from typer.testing import CliRunner
from seedpass import cli
from seedpass.cli import app
from seedpass.cli import common as cli_common
from seedpass.core.entry_types import EntryType
runner = CliRunner()
@@ -19,7 +18,7 @@ def test_cli_vault_unlock(monkeypatch):
return 0.5
pm = SimpleNamespace(unlock_vault=unlock_vault, select_fingerprint=lambda fp: None)
monkeypatch.setattr(cli_common, "PasswordManager", lambda: pm)
monkeypatch.setattr(cli, "PasswordManager", lambda: pm)
monkeypatch.setattr(cli.typer, "prompt", lambda *a, **k: "pw")
result = runner.invoke(app, ["vault", "unlock"])
assert result.exit_code == 0
@@ -50,7 +49,7 @@ def test_cli_entry_add_search_sync(monkeypatch):
sync_vault=lambda: {"manifest_id": "m", "chunk_ids": [], "delta_ids": []},
select_fingerprint=lambda fp: None,
)
monkeypatch.setattr(cli_common, "PasswordManager", lambda: pm)
monkeypatch.setattr(cli, "PasswordManager", lambda: pm)
# entry add
result = runner.invoke(app, ["entry", "add", "Label"])

Some files were not shown because too many files have changed in this diff Show More