mirror of
https://github.com/PR0M3TH3AN/SeedPass.git
synced 2025-09-09 07:48:57 +00:00
Compare commits
70 Commits
90c304ff6e
...
beta
Author | SHA1 | Date | |
---|---|---|---|
![]() |
ca733be2e3 | ||
![]() |
e528bebae3 | ||
![]() |
e760bf2b25 | ||
![]() |
d106802a18 | ||
![]() |
f2648a8c1d | ||
![]() |
d030cf9692 | ||
![]() |
bebbca8169 | ||
![]() |
4d7e3d4b63 | ||
![]() |
7b0344739f | ||
![]() |
fde09bd1a0 | ||
![]() |
b307728c05 | ||
![]() |
8ade9e3028 | ||
![]() |
c0a6187478 | ||
![]() |
d9f76ee668 | ||
![]() |
40a75adcb7 | ||
![]() |
bd1588fba1 | ||
![]() |
d5e0d61db4 | ||
![]() |
d795ac9006 | ||
![]() |
ee3d9d8e9d | ||
![]() |
2b68df9428 | ||
![]() |
a2a663eed1 | ||
![]() |
ae59ede374 | ||
![]() |
61b1aa6773 | ||
![]() |
428efd02b4 | ||
![]() |
cfb861b60a | ||
![]() |
ca533a3518 | ||
![]() |
a7da9b8971 | ||
![]() |
a0ae414765 | ||
![]() |
45c112b26b | ||
![]() |
4df6ff639e | ||
![]() |
108fcfcb04 | ||
![]() |
505cf1a950 | ||
![]() |
e701a1c1cb | ||
![]() |
cb9a068e40 | ||
![]() |
c13742f3f3 | ||
![]() |
6c8b1928b8 | ||
![]() |
b1b31eeb8a | ||
![]() |
492bfba3fb | ||
![]() |
b33565e7f3 | ||
![]() |
857b1ef0f9 | ||
![]() |
7a039171a0 | ||
![]() |
dd513cf964 | ||
![]() |
16de0a82c7 | ||
![]() |
d99af30d9f | ||
![]() |
da37ec2e61 | ||
![]() |
0315562d80 | ||
![]() |
e75e197270 | ||
![]() |
619226d336 | ||
![]() |
15df3f10a6 | ||
![]() |
b451097c65 | ||
![]() |
9cacd1b13d | ||
![]() |
b97d60778b | ||
![]() |
bbb26ca55a | ||
![]() |
d6e03d5e7a | ||
![]() |
26632c0e70 | ||
![]() |
06ca51993a | ||
![]() |
1b6b0ab5c5 | ||
![]() |
87999b1888 | ||
![]() |
6928b4ddbf | ||
![]() |
73183d53a5 | ||
![]() |
c9ad16f150 | ||
![]() |
bd86bdbb3a | ||
![]() |
8d5374ef5b | ||
![]() |
468608a369 | ||
![]() |
56e652089a | ||
![]() |
c353c04472 | ||
![]() |
2559920a14 | ||
![]() |
57935bdfc1 | ||
![]() |
55fdee522c | ||
![]() |
af4eb72385 |
59
AGENTS.md
59
AGENTS.md
@@ -2,6 +2,60 @@
|
|||||||
|
|
||||||
This project is written in **Python**. Follow these instructions when working with the code base.
|
This project is written in **Python**. Follow these instructions when working with the code base.
|
||||||
|
|
||||||
|
## Installation Quickstart for AI Agents
|
||||||
|
|
||||||
|
### Prerequisites
|
||||||
|
|
||||||
|
Ensure the system has the required build tools and Python headers. Examples:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Ubuntu/Debian
|
||||||
|
sudo apt update && sudo apt install -y \
|
||||||
|
build-essential \
|
||||||
|
libffi-dev \
|
||||||
|
pkg-config \
|
||||||
|
python3.11-dev \
|
||||||
|
curl \
|
||||||
|
git
|
||||||
|
|
||||||
|
# CentOS/RHEL
|
||||||
|
sudo yum install -y gcc gcc-c++ libffi-devel pkgconfig python3-devel curl git
|
||||||
|
|
||||||
|
# macOS
|
||||||
|
brew install python@3.11 libffi pkg-config git
|
||||||
|
```
|
||||||
|
|
||||||
|
### Installation
|
||||||
|
|
||||||
|
Run the installer script to fetch the latest release:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Stable release
|
||||||
|
bash -c "$(curl -sSL https://raw.githubusercontent.com/PR0M3TH3AN/SeedPass/main/scripts/install.sh)"
|
||||||
|
|
||||||
|
# Beta branch
|
||||||
|
bash -c "$(curl -sSL https://raw.githubusercontent.com/PR0M3TH3AN/SeedPass/main/scripts/install.sh)" _ -b beta
|
||||||
|
```
|
||||||
|
|
||||||
|
### Environment Layout
|
||||||
|
|
||||||
|
- Virtual environment: `~/.seedpass/app/venv/`
|
||||||
|
- Entry point: `~/.seedpass/app/src/main.py`
|
||||||
|
|
||||||
|
### Verification
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd ~/.seedpass/app && source venv/bin/activate
|
||||||
|
cd src && python main.py --version # Expected: SeedPass v[version]
|
||||||
|
```
|
||||||
|
|
||||||
|
### Running SeedPass
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd ~/.seedpass/app && source venv/bin/activate
|
||||||
|
cd src && python main.py
|
||||||
|
```
|
||||||
|
|
||||||
## Running Tests
|
## Running Tests
|
||||||
|
|
||||||
1. Set up a virtual environment and install dependencies:
|
1. Set up a virtual environment and install dependencies:
|
||||||
@@ -39,6 +93,11 @@ This project is written in **Python**. Follow these instructions when working wi
|
|||||||
|
|
||||||
Following these practices helps keep the code base consistent and secure.
|
Following these practices helps keep the code base consistent and secure.
|
||||||
|
|
||||||
|
## Deterministic Artifact Generation
|
||||||
|
|
||||||
|
- All generated artifacts (passwords, keys, TOTP secrets, etc.) must be fully deterministic across runs and platforms.
|
||||||
|
- Randomness is only permitted for security primitives (e.g., encryption nonces, in-memory keys) and must never influence derived artifacts.
|
||||||
|
|
||||||
## Legacy Index Migration
|
## Legacy Index Migration
|
||||||
|
|
||||||
- Always provide a migration path for index archives and import/export routines.
|
- Always provide a migration path for index archives and import/export routines.
|
||||||
|
94
README.md
94
README.md
@@ -16,6 +16,10 @@ This software was not developed by an experienced security expert and should be
|
|||||||
|
|
||||||
Recent releases derive passwords and other artifacts using a fully deterministic algorithm that behaves consistently across Python versions. This improvement means artifacts generated with earlier versions of SeedPass will not match those produced now. Regenerate any previously derived data or retain the old version if you need to reproduce older passwords or keys.
|
Recent releases derive passwords and other artifacts using a fully deterministic algorithm that behaves consistently across Python versions. This improvement means artifacts generated with earlier versions of SeedPass will not match those produced now. Regenerate any previously derived data or retain the old version if you need to reproduce older passwords or keys.
|
||||||
|
|
||||||
|
**⚠️ First Run Warning**
|
||||||
|
|
||||||
|
Use a dedicated BIP-39 seed phrase exclusively for SeedPass. Offline Mode is **ON by default**, keeping all Nostr syncing disabled until you explicitly opt in. To synchronize with Nostr, disable offline mode through the Settings menu or by running `seedpass config toggle-offline` and choosing to turn syncing on.
|
||||||
|
|
||||||
---
|
---
|
||||||
### Supported OS
|
### Supported OS
|
||||||
|
|
||||||
@@ -119,22 +123,27 @@ See `docs/ARCHITECTURE.md` and [Nostr Setup](docs/nostr_setup.md) for details.
|
|||||||
### Quick Installer
|
### Quick Installer
|
||||||
|
|
||||||
Use the automated installer to download SeedPass and its dependencies in one step.
|
Use the automated installer to download SeedPass and its dependencies in one step.
|
||||||
The scripts can also install the BeeWare backend for your platform when requested (use `-IncludeGui` on Windows).
|
The default `tui` mode installs only the text interface, so it runs headlessly and works well in CI or other automation. GUI backends are optional and must be explicitly requested (`--mode gui` or `--mode both` on Linux/macOS, `-IncludeGui` on Windows). If the GTK `gi` bindings are missing, the installer attempts to install the
|
||||||
If the GTK `gi` bindings are missing, the installer attempts to install the
|
necessary system packages using `apt`, `yum`, `pacman`, or Homebrew. When no display server is detected, GUI components are skipped automatically.
|
||||||
necessary system packages using `apt`, `yum`, `pacman`, or Homebrew.
|
|
||||||
|
|
||||||
**Linux and macOS:**
|
**Linux and macOS:**
|
||||||
```bash
|
```bash
|
||||||
bash -c "$(curl -sSL https://raw.githubusercontent.com/PR0M3TH3AN/SeedPass/main/scripts/install.sh)"
|
# TUI-only/agent install (headless default)
|
||||||
|
bash -c "$(curl -sSL https://raw.githubusercontent.com/PR0M3TH3AN/SeedPass/main/scripts/install.sh)" _ --mode tui
|
||||||
```
|
```
|
||||||
*Install the beta branch:*
|
*Install the beta branch:*
|
||||||
```bash
|
```bash
|
||||||
bash -c "$(curl -sSL https://raw.githubusercontent.com/PR0M3TH3AN/SeedPass/main/scripts/install.sh)" _ -b beta
|
bash -c "$(curl -sSL https://raw.githubusercontent.com/PR0M3TH3AN/SeedPass/main/scripts/install.sh)" _ -b beta
|
||||||
```
|
```
|
||||||
Make sure the command ends right after `-b beta` with **no trailing parenthesis**.
|
Make sure the command ends right after `-b beta` with **no trailing parenthesis**.
|
||||||
|
*Install with GUI support:*
|
||||||
|
```bash
|
||||||
|
bash -c "$(curl -sSL https://raw.githubusercontent.com/PR0M3TH3AN/SeedPass/main/scripts/install.sh)" _ --mode gui
|
||||||
|
```
|
||||||
|
|
||||||
**Windows (PowerShell):**
|
**Windows (PowerShell):**
|
||||||
```powershell
|
```powershell
|
||||||
|
# TUI-only/agent install (default)
|
||||||
Set-ExecutionPolicy Bypass -Scope Process -Force; [System.Net.ServicePointManager]::SecurityProtocol = [System.Net.ServicePointManager]::SecurityProtocol -bor 3072; $scriptContent = (New-Object System.Net.WebClient).DownloadString('https://raw.githubusercontent.com/PR0M3TH3AN/SeedPass/main/scripts/install.ps1'); & ([scriptblock]::create($scriptContent))
|
Set-ExecutionPolicy Bypass -Scope Process -Force; [System.Net.ServicePointManager]::SecurityProtocol = [System.Net.ServicePointManager]::SecurityProtocol -bor 3072; $scriptContent = (New-Object System.Net.WebClient).DownloadString('https://raw.githubusercontent.com/PR0M3TH3AN/SeedPass/main/scripts/install.ps1'); & ([scriptblock]::create($scriptContent))
|
||||||
```
|
```
|
||||||
*Install with the optional GUI:*
|
*Install with the optional GUI:*
|
||||||
@@ -146,6 +155,10 @@ The Windows installer will attempt to install Git automatically if it is not alr
|
|||||||
|
|
||||||
**Note:** If this fallback fails, install Python 3.12 manually or install the [Microsoft Visual C++ Build Tools](https://visualstudio.microsoft.com/visual-cpp-build-tools/) and rerun the installer.
|
**Note:** If this fallback fails, install Python 3.12 manually or install the [Microsoft Visual C++ Build Tools](https://visualstudio.microsoft.com/visual-cpp-build-tools/) and rerun the installer.
|
||||||
|
|
||||||
|
#### Installer Dependency Checks
|
||||||
|
|
||||||
|
The installer verifies that core build tooling—C/C++ build tools, Rust, CMake, and the imaging/GTK libraries—are available before completing. Use `--mode gui` to install only the graphical interface or `--mode both` to install both interfaces (default: `tui`). On Linux, ensure `xclip` or `wl-clipboard` is installed for clipboard support.
|
||||||
|
|
||||||
#### Windows Nostr Sync Troubleshooting
|
#### Windows Nostr Sync Troubleshooting
|
||||||
|
|
||||||
When backing up or restoring from Nostr on Windows, a few issues are common:
|
When backing up or restoring from Nostr on Windows, a few issues are common:
|
||||||
@@ -225,8 +238,9 @@ After installing `xclip`, restart SeedPass to enable clipboard support.
|
|||||||
### Optional GUI
|
### Optional GUI
|
||||||
|
|
||||||
SeedPass ships with a GTK-based desktop interface that is still in development
|
SeedPass ships with a GTK-based desktop interface that is still in development
|
||||||
and not currently functional. Install the packages for your platform before
|
and not currently functional. GUI backends are optional—run the installer with
|
||||||
adding the Python GUI dependencies.
|
`--mode gui` or install the Python extras below to add them. Install the packages
|
||||||
|
for your platform before adding the Python GUI dependencies.
|
||||||
|
|
||||||
- **Debian/Ubuntu**
|
- **Debian/Ubuntu**
|
||||||
```bash
|
```bash
|
||||||
@@ -245,14 +259,22 @@ adding the Python GUI dependencies.
|
|||||||
brew install pygobject3 gtk+3 adwaita-icon-theme librsvg webkitgtk
|
brew install pygobject3 gtk+3 adwaita-icon-theme librsvg webkitgtk
|
||||||
```
|
```
|
||||||
|
|
||||||
With the system requirements in place, install the Python GUI extras:
|
With the system requirements in place, install the Python GUI extras for your
|
||||||
|
platform:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
pip install .[gui]
|
# Linux
|
||||||
|
pip install .[gui-gtk]
|
||||||
|
|
||||||
|
# Windows
|
||||||
|
pip install .[gui-win]
|
||||||
|
|
||||||
|
# macOS
|
||||||
|
pip install .[gui-mac]
|
||||||
```
|
```
|
||||||
|
|
||||||
CLI-only users can skip these steps and install just the core package for a
|
CLI-only users can skip these steps and install just the core package for a
|
||||||
lightweight setup:
|
lightweight, headless setup compatible with CI/automation:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
pip install .
|
pip install .
|
||||||
@@ -274,10 +296,10 @@ You can then launch SeedPass and create a backup:
|
|||||||
seedpass
|
seedpass
|
||||||
|
|
||||||
# Export your index
|
# Export your index
|
||||||
seedpass export --file "~/seedpass_backup.json"
|
seedpass vault export --file "~/seedpass_backup.json"
|
||||||
|
|
||||||
# Later you can restore it
|
# Later you can restore it
|
||||||
seedpass import --file "~/seedpass_backup.json"
|
seedpass vault import --file "~/seedpass_backup.json"
|
||||||
|
|
||||||
# Quickly find or retrieve entries
|
# Quickly find or retrieve entries
|
||||||
seedpass search "github"
|
seedpass search "github"
|
||||||
@@ -311,31 +333,30 @@ python -m seedpass_gui
|
|||||||
seedpass-gui
|
seedpass-gui
|
||||||
```
|
```
|
||||||
|
|
||||||
GUI dependencies are optional. Install them alongside SeedPass with:
|
GUI dependencies are optional. Install them alongside SeedPass with the
|
||||||
|
extra for your platform:
|
||||||
```bash
|
|
||||||
pip install "seedpass[gui]"
|
|
||||||
|
|
||||||
# or when working from a local checkout
|
|
||||||
pip install -e .[gui]
|
|
||||||
```
|
|
||||||
|
|
||||||
After installing the optional GUI extras, add the BeeWare backend for your
|
|
||||||
platform:
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Linux
|
# Linux
|
||||||
pip install toga-gtk
|
pip install "seedpass[gui-gtk]"
|
||||||
|
|
||||||
# If you see build errors about "cairo" on Linux, install the cairo
|
|
||||||
# development headers using your package manager, e.g.:
|
|
||||||
sudo apt-get install libcairo2 libcairo2-dev
|
|
||||||
|
|
||||||
# Windows
|
# Windows
|
||||||
pip install toga-winforms
|
pip install "seedpass[gui-win]"
|
||||||
|
|
||||||
# macOS
|
# macOS
|
||||||
pip install toga-cocoa
|
pip install "seedpass[gui-mac]"
|
||||||
|
|
||||||
|
# or when working from a local checkout
|
||||||
|
pip install -e ".[gui-gtk]" # Linux
|
||||||
|
pip install -e ".[gui-win]" # Windows
|
||||||
|
pip install -e ".[gui-mac]" # macOS
|
||||||
|
```
|
||||||
|
|
||||||
|
If you see build errors about "cairo" on Linux, install the cairo development
|
||||||
|
headers using your package manager, e.g.:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo apt-get install libcairo2 libcairo2-dev
|
||||||
```
|
```
|
||||||
|
|
||||||
The GUI works with the same vault and configuration files as the CLI.
|
The GUI works with the same vault and configuration files as the CLI.
|
||||||
@@ -431,6 +452,16 @@ For a full list of commands see [docs/advanced_cli.md](docs/advanced_cli.md). Th
|
|||||||
```
|
```
|
||||||
*(or `python src/main.py` when running directly from the repository)*
|
*(or `python src/main.py` when running directly from the repository)*
|
||||||
|
|
||||||
|
To restore a previously backed up index at launch, provide the backup path
|
||||||
|
and fingerprint:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
seedpass --restore-backup /path/to/backup.json.enc --fingerprint <fp>
|
||||||
|
```
|
||||||
|
|
||||||
|
Without the flag, the startup prompt offers a **Restore from backup** option
|
||||||
|
before the vault is initialized.
|
||||||
|
|
||||||
2. **Follow the Prompts:**
|
2. **Follow the Prompts:**
|
||||||
|
|
||||||
- **Seed Profile Selection:** If you have existing seed profiles, you'll be prompted to select one or add a new one.
|
- **Seed Profile Selection:** If you have existing seed profiles, you'll be prompted to select one or add a new one.
|
||||||
@@ -616,6 +647,10 @@ initial setup. You must provide both your 12‑word master seed and the master
|
|||||||
password that encrypted the vault; without the correct password the retrieved
|
password that encrypted the vault; without the correct password the retrieved
|
||||||
data cannot be decrypted.
|
data cannot be decrypted.
|
||||||
|
|
||||||
|
Alternatively, a local backup file can be loaded at startup. Launch the
|
||||||
|
application with `--restore-backup <file> --fingerprint <fp>` or choose the
|
||||||
|
**Restore from backup** option presented before the vault initializes.
|
||||||
|
|
||||||
1. Start SeedPass and choose option **4** when prompted to set up a seed.
|
1. Start SeedPass and choose option **4** when prompted to set up a seed.
|
||||||
2. Paste your BIP‑85 seed phrase when asked.
|
2. Paste your BIP‑85 seed phrase when asked.
|
||||||
3. Enter the master password associated with that seed.
|
3. Enter the master password associated with that seed.
|
||||||
@@ -767,6 +802,7 @@ You can also launch the GUI directly with `seedpass gui` or `seedpass-gui`.
|
|||||||
- **No PBKDF2 Salt Needed:** SeedPass deliberately omits an explicit PBKDF2 salt. Every password is derived from a unique 512-bit BIP-85 child seed, which already provides stronger per-password uniqueness than a conventional 128-bit salt.
|
- **No PBKDF2 Salt Needed:** SeedPass deliberately omits an explicit PBKDF2 salt. Every password is derived from a unique 512-bit BIP-85 child seed, which already provides stronger per-password uniqueness than a conventional 128-bit salt.
|
||||||
- **Checksum Verification:** Always verify the script's checksum to ensure its integrity and protect against unauthorized modifications.
|
- **Checksum Verification:** Always verify the script's checksum to ensure its integrity and protect against unauthorized modifications.
|
||||||
- **Potential Bugs and Limitations:** Be aware that the software may contain bugs and lacks certain features. Snapshot chunks are capped at 50 KB and the client rotates snapshots after enough delta events accumulate. The security of memory management and logs has not been thoroughly evaluated and may pose risks of leaking sensitive information.
|
- **Potential Bugs and Limitations:** Be aware that the software may contain bugs and lacks certain features. Snapshot chunks are capped at 50 KB and the client rotates snapshots after enough delta events accumulate. The security of memory management and logs has not been thoroughly evaluated and may pose risks of leaking sensitive information.
|
||||||
|
- **Best-Effort Memory Zeroization:** Sensitive data is wiped from memory when possible, but Python may retain copies of decrypted values.
|
||||||
- **Multiple Seeds Management:** While managing multiple seeds adds flexibility, it also increases the responsibility to secure each seed and its associated password.
|
- **Multiple Seeds Management:** While managing multiple seeds adds flexibility, it also increases the responsibility to secure each seed and its associated password.
|
||||||
- **No PBKDF2 Salt Required:** SeedPass deliberately omits an explicit PBKDF2 salt. Every password is derived from a unique 512-bit BIP-85 child seed, which already provides stronger per-password uniqueness than a conventional 128-bit salt.
|
- **No PBKDF2 Salt Required:** SeedPass deliberately omits an explicit PBKDF2 salt. Every password is derived from a unique 512-bit BIP-85 child seed, which already provides stronger per-password uniqueness than a conventional 128-bit salt.
|
||||||
- **Default KDF Iterations:** New profiles start with 50,000 PBKDF2 iterations. Adjust this with `seedpass config set kdf_iterations`.
|
- **Default KDF Iterations:** New profiles start with 50,000 PBKDF2 iterations. Adjust this with `seedpass config set kdf_iterations`.
|
||||||
|
44
docs/SPEC.md
Normal file
44
docs/SPEC.md
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
# SeedPass Specification
|
||||||
|
|
||||||
|
## Key Hierarchy
|
||||||
|
|
||||||
|
SeedPass derives a hierarchy of keys from a single BIP-39 parent seed using HKDF:
|
||||||
|
|
||||||
|
- **Master Key** – `HKDF(seed, "seedpass:v1:master")`
|
||||||
|
- **KEY_STORAGE** – used to encrypt vault data.
|
||||||
|
- **KEY_INDEX** – protects the metadata index.
|
||||||
|
- **KEY_PW_DERIVE** – deterministic password generation.
|
||||||
|
- **KEY_TOTP_DET** – deterministic TOTP secrets.
|
||||||
|
|
||||||
|
Each context string keeps derived keys domain separated.
|
||||||
|
|
||||||
|
## KDF Parameters
|
||||||
|
|
||||||
|
Passwords are protected with **PBKDF2-HMAC-SHA256**. The default work factor is
|
||||||
|
**50,000 iterations** but may be adjusted via the settings slider. The config
|
||||||
|
stores a `KdfConfig` structure with the chosen iteration count, algorithm name,
|
||||||
|
and the current spec version (`CURRENT_KDF_VERSION = 1`). Argon2 is available
|
||||||
|
with a default `time_cost` of 2 when selected.
|
||||||
|
|
||||||
|
## Message Formats
|
||||||
|
|
||||||
|
SeedPass synchronizes profiles over Nostr using three event kinds:
|
||||||
|
|
||||||
|
- **Manifest (`30070`)** – high level snapshot description and current version.
|
||||||
|
- **Snapshot Chunk (`30071`)** – compressed, encrypted portions of the vault.
|
||||||
|
- **Delta (`30072`)** – incremental changes since the last snapshot.
|
||||||
|
|
||||||
|
Events encode JSON and include tags for checksums, fingerprints, and timestamps.
|
||||||
|
|
||||||
|
## Versioning
|
||||||
|
|
||||||
|
Configuration and KDF schemas are versioned so clients can migrate older
|
||||||
|
profiles. Nostr events carry a version field in the manifest, and the software
|
||||||
|
follows semantic versioning for releases.
|
||||||
|
|
||||||
|
## Memory Protection
|
||||||
|
|
||||||
|
SeedPass encrypts sensitive values in memory and attempts to wipe them when no
|
||||||
|
longer needed. This zeroization is best-effort only; Python's memory management
|
||||||
|
may retain copies of decrypted data. Critical cryptographic operations may move
|
||||||
|
to a Rust/WASM module in the future to provide stronger guarantees.
|
@@ -78,7 +78,7 @@ Manage the entire vault for a profile.
|
|||||||
|
|
||||||
### Nostr Commands
|
### Nostr Commands
|
||||||
|
|
||||||
Interact with the Nostr network for backup and synchronization.
|
Interact with the Nostr network for backup and synchronization. Offline mode is enabled by default, so disable it with `seedpass config toggle-offline` before using these commands.
|
||||||
|
|
||||||
| Action | Command | Examples |
|
| Action | Command | Examples |
|
||||||
| :--- | :--- | :--- |
|
| :--- | :--- | :--- |
|
||||||
@@ -127,7 +127,7 @@ Run or stop the local HTTP API.
|
|||||||
| Action | Command | Examples |
|
| Action | Command | Examples |
|
||||||
| :--- | :--- | :--- |
|
| :--- | :--- | :--- |
|
||||||
| Start the API | `api start` | `seedpass api start --host 0.0.0.0 --port 8000` |
|
| Start the API | `api start` | `seedpass api start --host 0.0.0.0 --port 8000` |
|
||||||
| Stop the API | `api stop` | `seedpass api stop` |
|
| Stop the API | `api stop --token TOKEN` | `seedpass api stop --token <token>` |
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -214,7 +214,7 @@ Set the `SEEDPASS_CORS_ORIGINS` environment variable to a comma‑separated list
|
|||||||
SEEDPASS_CORS_ORIGINS=http://localhost:3000 seedpass api start
|
SEEDPASS_CORS_ORIGINS=http://localhost:3000 seedpass api start
|
||||||
```
|
```
|
||||||
|
|
||||||
Shut down the server with `seedpass api stop`.
|
Shut down the server with `seedpass api stop --token <token>`.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@@ -83,7 +83,7 @@ maintainable while enabling a consistent experience on multiple platforms.
|
|||||||
- **Change Master Password:** Rotate your encryption password at any time.
|
- **Change Master Password:** Rotate your encryption password at any time.
|
||||||
- **Checksum Verification Utilities:** Verify or regenerate the script checksum.
|
- **Checksum Verification Utilities:** Verify or regenerate the script checksum.
|
||||||
- **Relay Management:** List, add, remove or reset configured Nostr relays.
|
- **Relay Management:** List, add, remove or reset configured Nostr relays.
|
||||||
- **Offline Mode:** Disable network sync to work entirely locally.
|
- **Offline Mode (default):** SeedPass runs without network sync until you explicitly enable it.
|
||||||
|
|
||||||
## Prerequisites
|
## Prerequisites
|
||||||
|
|
||||||
@@ -120,6 +120,11 @@ isn't on your PATH. If these tools are unavailable you'll see a link to download
|
|||||||
the installer now attempts to download Python 3.12 automatically so you don't have to compile packages from source.
|
the installer now attempts to download Python 3.12 automatically so you don't have to compile packages from source.
|
||||||
|
|
||||||
**Note:** If this fallback fails, install Python 3.12 manually or install the [Microsoft Visual C++ Build Tools](https://visualstudio.microsoft.com/visual-cpp-build-tools/) and rerun the installer.
|
**Note:** If this fallback fails, install Python 3.12 manually or install the [Microsoft Visual C++ Build Tools](https://visualstudio.microsoft.com/visual-cpp-build-tools/) and rerun the installer.
|
||||||
|
|
||||||
|
#### Installer Dependency Checks
|
||||||
|
|
||||||
|
The installer verifies that core build tooling—C/C++ build tools, Rust, CMake, and the imaging/GTK libraries—are available before completing. Pass `--no-gui` to skip installing GUI packages. On Linux, ensure `xclip` or `wl-clipboard` is installed for clipboard support.
|
||||||
|
|
||||||
### Uninstall
|
### Uninstall
|
||||||
|
|
||||||
Run the matching uninstaller if you need to remove a previous installation or clean up an old `seedpass` command:
|
Run the matching uninstaller if you need to remove a previous installation or clean up an old `seedpass` command:
|
||||||
@@ -467,7 +472,7 @@ Back in the Settings menu you can:
|
|||||||
whether both the encrypted database and the script itself pass checksum
|
whether both the encrypted database and the script itself pass checksum
|
||||||
validation.
|
validation.
|
||||||
* Choose `14` to toggle Secret Mode and set the clipboard clear delay.
|
* Choose `14` to toggle Secret Mode and set the clipboard clear delay.
|
||||||
* Select `15` to toggle Offline Mode and work locally without contacting Nostr.
|
* Select `15` to toggle Offline Mode. SeedPass starts offline; disable it here to enable Nostr syncing.
|
||||||
* Choose `16` to toggle Quick Unlock so subsequent actions skip the password prompt. Startup delay is unchanged.
|
* Choose `16` to toggle Quick Unlock so subsequent actions skip the password prompt. Startup delay is unchanged.
|
||||||
* Select `17` to return to the main menu.
|
* Select `17` to return to the main menu.
|
||||||
|
|
||||||
@@ -561,7 +566,7 @@ Mutation testing is disabled in the GitHub workflow due to reliability issues an
|
|||||||
- **Multiple Seeds Management:** While managing multiple seeds adds flexibility, it also increases the responsibility to secure each seed and its associated password.
|
- **Multiple Seeds Management:** While managing multiple seeds adds flexibility, it also increases the responsibility to secure each seed and its associated password.
|
||||||
- **No PBKDF2 Salt Required:** SeedPass deliberately omits an explicit PBKDF2 salt. Every password is derived from a unique 512-bit BIP-85 child seed, which already provides stronger per-password uniqueness than a conventional 128-bit salt.
|
- **No PBKDF2 Salt Required:** SeedPass deliberately omits an explicit PBKDF2 salt. Every password is derived from a unique 512-bit BIP-85 child seed, which already provides stronger per-password uniqueness than a conventional 128-bit salt.
|
||||||
- **Default KDF Iterations:** New profiles start with 50,000 PBKDF2 iterations. Use `seedpass config set kdf_iterations` to change this.
|
- **Default KDF Iterations:** New profiles start with 50,000 PBKDF2 iterations. Use `seedpass config set kdf_iterations` to change this.
|
||||||
- **Offline Mode:** Disable Nostr sync to keep all operations local until you re-enable networking.
|
- **Offline Mode (default):** Nostr sync is disabled until you explicitly enable it via the Settings menu or `seedpass config toggle-offline`.
|
||||||
- **Quick Unlock:** Store a hashed copy of your password so future actions skip the prompt. Startup delay no longer changes. Use with caution on shared systems.
|
- **Quick Unlock:** Store a hashed copy of your password so future actions skip the prompt. Startup delay no longer changes. Use with caution on shared systems.
|
||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
# Nostr Setup
|
# Nostr Setup
|
||||||
|
|
||||||
This guide explains how SeedPass uses the Nostr protocol for encrypted vault backups and how to configure relays.
|
This guide explains how SeedPass uses the Nostr protocol for encrypted vault backups and how to configure relays. SeedPass starts in offline mode, so you must explicitly disable it before any network synchronization. Run `seedpass config toggle-offline` or use the Settings menu to enable online syncing.
|
||||||
|
|
||||||
## Relay Configuration
|
## Relay Configuration
|
||||||
|
|
||||||
|
238
poetry.lock
generated
238
poetry.lock
generated
@@ -724,6 +724,22 @@ files = [
|
|||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
colorama = {version = "*", markers = "platform_system == \"Windows\""}
|
colorama = {version = "*", markers = "platform_system == \"Windows\""}
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "clr-loader"
|
||||||
|
version = "0.2.7.post0"
|
||||||
|
description = "Generic pure Python loader for .NET runtimes"
|
||||||
|
optional = true
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
groups = ["main"]
|
||||||
|
markers = "extra == \"gui-win\""
|
||||||
|
files = [
|
||||||
|
{file = "clr_loader-0.2.7.post0-py3-none-any.whl", hash = "sha256:e0b9fcc107d48347a4311a28ffe3ae78c4968edb216ffb6564cb03f7ace0bb47"},
|
||||||
|
{file = "clr_loader-0.2.7.post0.tar.gz", hash = "sha256:b7a8b3f8fbb1bcbbb6382d887e21d1742d4f10b5ea209e4ad95568fe97e1c7c6"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
cffi = {version = ">=1.17", markers = "python_version >= \"3.8\""}
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "coincurve"
|
name = "coincurve"
|
||||||
version = "21.0.0"
|
version = "21.0.0"
|
||||||
@@ -1125,6 +1141,88 @@ docs = ["furo (>=2024.8.6)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)
|
|||||||
testing = ["covdefaults (>=2.3)", "coverage (>=7.6.10)", "diff-cover (>=9.2.1)", "pytest (>=8.3.4)", "pytest-asyncio (>=0.25.2)", "pytest-cov (>=6)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.28.1)"]
|
testing = ["covdefaults (>=2.3)", "coverage (>=7.6.10)", "diff-cover (>=9.2.1)", "pytest (>=8.3.4)", "pytest-asyncio (>=0.25.2)", "pytest-cov (>=6)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.28.1)"]
|
||||||
typing = ["typing-extensions (>=4.12.2) ; python_version < \"3.11\""]
|
typing = ["typing-extensions (>=4.12.2) ; python_version < \"3.11\""]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "fonttools"
|
||||||
|
version = "4.59.1"
|
||||||
|
description = "Tools to manipulate font files"
|
||||||
|
optional = true
|
||||||
|
python-versions = ">=3.9"
|
||||||
|
groups = ["main"]
|
||||||
|
markers = "extra == \"gui-mac\""
|
||||||
|
files = [
|
||||||
|
{file = "fonttools-4.59.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e90a89e52deb56b928e761bb5b5f65f13f669bfd96ed5962975debea09776a23"},
|
||||||
|
{file = "fonttools-4.59.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d29ab70658d2ec19422b25e6ace00a0b0ae4181ee31e03335eaef53907d2d83"},
|
||||||
|
{file = "fonttools-4.59.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:94f9721a564978a10d5c12927f99170d18e9a32e5a727c61eae56f956a4d118b"},
|
||||||
|
{file = "fonttools-4.59.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8c8758a7d97848fc8b514b3d9b4cb95243714b2f838dde5e1e3c007375de6214"},
|
||||||
|
{file = "fonttools-4.59.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2aeb829ad9d41a2ef17cab8bb5d186049ba38a840f10352e654aa9062ec32dc1"},
|
||||||
|
{file = "fonttools-4.59.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ac216a2980a2d2b3b88c68a24f8a9bfb203e2490e991b3238502ad8f1e7bfed0"},
|
||||||
|
{file = "fonttools-4.59.1-cp310-cp310-win32.whl", hash = "sha256:d31dc137ed8ec71dbc446949eba9035926e6e967b90378805dcf667ff57cabb1"},
|
||||||
|
{file = "fonttools-4.59.1-cp310-cp310-win_amd64.whl", hash = "sha256:5265bc52ed447187d39891b5f21d7217722735d0de9fe81326566570d12851a9"},
|
||||||
|
{file = "fonttools-4.59.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4909cce2e35706f3d18c54d3dcce0414ba5e0fb436a454dffec459c61653b513"},
|
||||||
|
{file = "fonttools-4.59.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:efbec204fa9f877641747f2d9612b2b656071390d7a7ef07a9dbf0ecf9c7195c"},
|
||||||
|
{file = "fonttools-4.59.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:39dfd42cc2dc647b2c5469bc7a5b234d9a49e72565b96dd14ae6f11c2c59ef15"},
|
||||||
|
{file = "fonttools-4.59.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b11bc177a0d428b37890825d7d025040d591aa833f85f8d8878ed183354f47df"},
|
||||||
|
{file = "fonttools-4.59.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b9b4c35b3be45e5bc774d3fc9608bbf4f9a8d371103b858c80edbeed31dd5aa"},
|
||||||
|
{file = "fonttools-4.59.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:01158376b8a418a0bae9625c476cebfcfcb5e6761e9d243b219cd58341e7afbb"},
|
||||||
|
{file = "fonttools-4.59.1-cp311-cp311-win32.whl", hash = "sha256:cf7c5089d37787387123f1cb8f1793a47c5e1e3d1e4e7bfbc1cc96e0f925eabe"},
|
||||||
|
{file = "fonttools-4.59.1-cp311-cp311-win_amd64.whl", hash = "sha256:c866eef7a0ba320486ade6c32bfc12813d1a5db8567e6904fb56d3d40acc5116"},
|
||||||
|
{file = "fonttools-4.59.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:43ab814bbba5f02a93a152ee61a04182bb5809bd2bc3609f7822e12c53ae2c91"},
|
||||||
|
{file = "fonttools-4.59.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4f04c3ffbfa0baafcbc550657cf83657034eb63304d27b05cff1653b448ccff6"},
|
||||||
|
{file = "fonttools-4.59.1-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d601b153e51a5a6221f0d4ec077b6bfc6ac35bfe6c19aeaa233d8990b2b71726"},
|
||||||
|
{file = "fonttools-4.59.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c735e385e30278c54f43a0d056736942023c9043f84ee1021eff9fd616d17693"},
|
||||||
|
{file = "fonttools-4.59.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1017413cdc8555dce7ee23720da490282ab7ec1cf022af90a241f33f9a49afc4"},
|
||||||
|
{file = "fonttools-4.59.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5c6d8d773470a5107052874341ed3c487c16ecd179976d81afed89dea5cd7406"},
|
||||||
|
{file = "fonttools-4.59.1-cp312-cp312-win32.whl", hash = "sha256:2a2d0d33307f6ad3a2086a95dd607c202ea8852fa9fb52af9b48811154d1428a"},
|
||||||
|
{file = "fonttools-4.59.1-cp312-cp312-win_amd64.whl", hash = "sha256:0b9e4fa7eaf046ed6ac470f6033d52c052481ff7a6e0a92373d14f556f298dc0"},
|
||||||
|
{file = "fonttools-4.59.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:89d9957b54246c6251345297dddf77a84d2c19df96af30d2de24093bbdf0528b"},
|
||||||
|
{file = "fonttools-4.59.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8156b11c0d5405810d216f53907bd0f8b982aa5f1e7e3127ab3be1a4062154ff"},
|
||||||
|
{file = "fonttools-4.59.1-cp313-cp313-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8387876a8011caec52d327d5e5bca705d9399ec4b17afb8b431ec50d47c17d23"},
|
||||||
|
{file = "fonttools-4.59.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fb13823a74b3a9204a8ed76d3d6d5ec12e64cc5bc44914eb9ff1cdac04facd43"},
|
||||||
|
{file = "fonttools-4.59.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e1ca10da138c300f768bb68e40e5b20b6ecfbd95f91aac4cc15010b6b9d65455"},
|
||||||
|
{file = "fonttools-4.59.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2beb5bfc4887a3130f8625349605a3a45fe345655ce6031d1bac11017454b943"},
|
||||||
|
{file = "fonttools-4.59.1-cp313-cp313-win32.whl", hash = "sha256:419f16d750d78e6d704bfe97b48bba2f73b15c9418f817d0cb8a9ca87a5b94bf"},
|
||||||
|
{file = "fonttools-4.59.1-cp313-cp313-win_amd64.whl", hash = "sha256:c536f8a852e8d3fa71dde1ec03892aee50be59f7154b533f0bf3c1174cfd5126"},
|
||||||
|
{file = "fonttools-4.59.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:d5c3bfdc9663f3d4b565f9cb3b8c1efb3e178186435b45105bde7328cfddd7fe"},
|
||||||
|
{file = "fonttools-4.59.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:ea03f1da0d722fe3c2278a05957e6550175571a4894fbf9d178ceef4a3783d2b"},
|
||||||
|
{file = "fonttools-4.59.1-cp314-cp314-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:57a3708ca6bfccb790f585fa6d8f29432ec329618a09ff94c16bcb3c55994643"},
|
||||||
|
{file = "fonttools-4.59.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:729367c91eb1ee84e61a733acc485065a00590618ca31c438e7dd4d600c01486"},
|
||||||
|
{file = "fonttools-4.59.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8f8ef66ac6db450193ed150e10b3b45dde7aded10c5d279968bc63368027f62b"},
|
||||||
|
{file = "fonttools-4.59.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:075f745d539a998cd92cb84c339a82e53e49114ec62aaea8307c80d3ad3aef3a"},
|
||||||
|
{file = "fonttools-4.59.1-cp314-cp314-win32.whl", hash = "sha256:c2b0597522d4c5bb18aa5cf258746a2d4a90f25878cbe865e4d35526abd1b9fc"},
|
||||||
|
{file = "fonttools-4.59.1-cp314-cp314-win_amd64.whl", hash = "sha256:e9ad4ce044e3236f0814c906ccce8647046cc557539661e35211faadf76f283b"},
|
||||||
|
{file = "fonttools-4.59.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:652159e8214eb4856e8387ebcd6b6bd336ee258cbeb639c8be52005b122b9609"},
|
||||||
|
{file = "fonttools-4.59.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:43d177cd0e847ea026fedd9f099dc917da136ed8792d142298a252836390c478"},
|
||||||
|
{file = "fonttools-4.59.1-cp314-cp314t-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e54437651e1440ee53a95e6ceb6ee440b67a3d348c76f45f4f48de1a5ecab019"},
|
||||||
|
{file = "fonttools-4.59.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6065fdec8ff44c32a483fd44abe5bcdb40dd5e2571a5034b555348f2b3a52cea"},
|
||||||
|
{file = "fonttools-4.59.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:42052b56d176f8b315fbc09259439c013c0cb2109df72447148aeda677599612"},
|
||||||
|
{file = "fonttools-4.59.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:bcd52eaa5c4c593ae9f447c1d13e7e4a00ca21d755645efa660b6999425b3c88"},
|
||||||
|
{file = "fonttools-4.59.1-cp314-cp314t-win32.whl", hash = "sha256:02e4fdf27c550dded10fe038a5981c29f81cb9bc649ff2eaa48e80dab8998f97"},
|
||||||
|
{file = "fonttools-4.59.1-cp314-cp314t-win_amd64.whl", hash = "sha256:412a5fd6345872a7c249dac5bcce380393f40c1c316ac07f447bc17d51900922"},
|
||||||
|
{file = "fonttools-4.59.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1ab4c1fb45f2984b8b4a3face7cff0f67f9766e9414cbb6fd061e9d77819de98"},
|
||||||
|
{file = "fonttools-4.59.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8ee39da0227950f88626c91e219659e6cd725ede826b1c13edd85fc4cec9bbe6"},
|
||||||
|
{file = "fonttools-4.59.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:58a8844f96cff35860647a65345bfca87f47a2494bfb4bef754e58c082511443"},
|
||||||
|
{file = "fonttools-4.59.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5f3f021cea6e36410874763f4a517a5e2d6ac36ca8f95521f3a9fdaad0fe73dc"},
|
||||||
|
{file = "fonttools-4.59.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:bf5fb864f80061a40c1747e0dbc4f6e738de58dd6675b07eb80bd06a93b063c4"},
|
||||||
|
{file = "fonttools-4.59.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c29ea087843e27a7cffc78406d32a5abf166d92afde7890394e9e079c9b4dbe9"},
|
||||||
|
{file = "fonttools-4.59.1-cp39-cp39-win32.whl", hash = "sha256:a960b09ff50c2e87864e83f352e5a90bcf1ad5233df579b1124660e1643de272"},
|
||||||
|
{file = "fonttools-4.59.1-cp39-cp39-win_amd64.whl", hash = "sha256:e3680884189e2b7c3549f6d304376e64711fd15118e4b1ae81940cb6b1eaa267"},
|
||||||
|
{file = "fonttools-4.59.1-py3-none-any.whl", hash = "sha256:647db657073672a8330608970a984d51573557f328030566521bc03415535042"},
|
||||||
|
{file = "fonttools-4.59.1.tar.gz", hash = "sha256:74995b402ad09822a4c8002438e54940d9f1ecda898d2bb057729d7da983e4cb"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
all = ["brotli (>=1.0.1) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\"", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres ; platform_python_implementation == \"PyPy\"", "pycairo", "scipy ; platform_python_implementation != \"PyPy\"", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0) ; python_version <= \"3.12\"", "xattr ; sys_platform == \"darwin\"", "zopfli (>=0.1.4)"]
|
||||||
|
graphite = ["lz4 (>=1.7.4.2)"]
|
||||||
|
interpolatable = ["munkres ; platform_python_implementation == \"PyPy\"", "pycairo", "scipy ; platform_python_implementation != \"PyPy\""]
|
||||||
|
lxml = ["lxml (>=4.0)"]
|
||||||
|
pathops = ["skia-pathops (>=0.5.0)"]
|
||||||
|
plot = ["matplotlib"]
|
||||||
|
repacker = ["uharfbuzz (>=0.23.0)"]
|
||||||
|
symfont = ["sympy"]
|
||||||
|
type1 = ["xattr ; sys_platform == \"darwin\""]
|
||||||
|
unicode = ["unicodedata2 (>=15.1.0) ; python_version <= \"3.12\""]
|
||||||
|
woff = ["brotli (>=1.0.1) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\"", "zopfli (>=0.1.4)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "freezegun"
|
name = "freezegun"
|
||||||
version = "1.5.4"
|
version = "1.5.4"
|
||||||
@@ -2332,6 +2430,35 @@ files = [
|
|||||||
{file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"},
|
{file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pycairo"
|
||||||
|
version = "1.28.0"
|
||||||
|
description = "Python interface for cairo"
|
||||||
|
optional = true
|
||||||
|
python-versions = ">=3.9"
|
||||||
|
groups = ["main"]
|
||||||
|
markers = "extra == \"gui-gtk\""
|
||||||
|
files = [
|
||||||
|
{file = "pycairo-1.28.0-cp310-cp310-win32.whl", hash = "sha256:53e6dbc98456f789965dad49ef89ce2c62f9a10fc96c8d084e14da0ffb73d8a6"},
|
||||||
|
{file = "pycairo-1.28.0-cp310-cp310-win_amd64.whl", hash = "sha256:c8ab91a75025f984bc327ada335c787efb61c929ea0512063793cb36cee503d4"},
|
||||||
|
{file = "pycairo-1.28.0-cp310-cp310-win_arm64.whl", hash = "sha256:e955328c1a5147bf71ee94e206413ce15e12630296a79788fcd246c80e5337b8"},
|
||||||
|
{file = "pycairo-1.28.0-cp311-cp311-win32.whl", hash = "sha256:0fee15f5d72b13ba5fd065860312493dc1bca6ff2dce200ee9d704e11c94e60a"},
|
||||||
|
{file = "pycairo-1.28.0-cp311-cp311-win_amd64.whl", hash = "sha256:6339979bfec8b58a06476094a9a5c104bd5a99932ddaff16ca0d9203d2f4482c"},
|
||||||
|
{file = "pycairo-1.28.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6ae15392e28ebfc0b35d8dc05d395d3b6be4bad9ad4caecf0fa12c8e7150225"},
|
||||||
|
{file = "pycairo-1.28.0-cp312-cp312-win32.whl", hash = "sha256:c00cfbb7f30eb7ca1d48886712932e2d91e8835a8496f4e423878296ceba573e"},
|
||||||
|
{file = "pycairo-1.28.0-cp312-cp312-win_amd64.whl", hash = "sha256:d50d190f5033992b55050b9f337ee42a45c3568445d5e5d7987bab96c278d8a6"},
|
||||||
|
{file = "pycairo-1.28.0-cp312-cp312-win_arm64.whl", hash = "sha256:957e0340ee1c279d197d4f7cfa96f6d8b48e453eec711fca999748d752468ff4"},
|
||||||
|
{file = "pycairo-1.28.0-cp313-cp313-win32.whl", hash = "sha256:d13352429d8a08a1cb3607767d23d2fb32e4c4f9faa642155383980ec1478c24"},
|
||||||
|
{file = "pycairo-1.28.0-cp313-cp313-win_amd64.whl", hash = "sha256:082aef6b3a9dcc328fa648d38ed6b0a31c863e903ead57dd184b2e5f86790140"},
|
||||||
|
{file = "pycairo-1.28.0-cp313-cp313-win_arm64.whl", hash = "sha256:026afd53b75291917a7412d9fe46dcfbaa0c028febd46ff1132d44a53ac2c8b6"},
|
||||||
|
{file = "pycairo-1.28.0-cp314-cp314-win32.whl", hash = "sha256:d0ab30585f536101ad6f09052fc3895e2a437ba57531ea07223d0e076248025d"},
|
||||||
|
{file = "pycairo-1.28.0-cp314-cp314-win_amd64.whl", hash = "sha256:94f2ed204999ab95a0671a0fa948ffbb9f3d6fb8731fe787917f6d022d9c1c0f"},
|
||||||
|
{file = "pycairo-1.28.0-cp39-cp39-win32.whl", hash = "sha256:3ed16d48b8a79cc584cb1cb0ad62dfb265f2dda6d6a19ef5aab181693e19c83c"},
|
||||||
|
{file = "pycairo-1.28.0-cp39-cp39-win_amd64.whl", hash = "sha256:da0d1e6d4842eed4d52779222c6e43d254244a486ca9fdab14e30042fd5bdf28"},
|
||||||
|
{file = "pycairo-1.28.0-cp39-cp39-win_arm64.whl", hash = "sha256:458877513eb2125513122e8aa9c938630e94bb0574f94f4fb5ab55eb23d6e9ac"},
|
||||||
|
{file = "pycairo-1.28.0.tar.gz", hash = "sha256:26ec5c6126781eb167089a123919f87baa2740da2cca9098be8b3a6b91cc5fbc"},
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pycparser"
|
name = "pycparser"
|
||||||
version = "2.22"
|
version = "2.22"
|
||||||
@@ -2544,6 +2671,21 @@ files = [
|
|||||||
[package.extras]
|
[package.extras]
|
||||||
windows-terminal = ["colorama (>=0.4.6)"]
|
windows-terminal = ["colorama (>=0.4.6)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pygobject"
|
||||||
|
version = "3.52.3"
|
||||||
|
description = "Python bindings for GObject Introspection"
|
||||||
|
optional = true
|
||||||
|
python-versions = "<4.0,>=3.9"
|
||||||
|
groups = ["main"]
|
||||||
|
markers = "extra == \"gui-gtk\""
|
||||||
|
files = [
|
||||||
|
{file = "pygobject-3.52.3.tar.gz", hash = "sha256:00e427d291e957462a8fad659a9f9c8be776ff82a8b76bdf402f1eaeec086d82"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
pycairo = ">=1.16"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyjwt"
|
name = "pyjwt"
|
||||||
version = "2.10.1"
|
version = "2.10.1"
|
||||||
@@ -2702,6 +2844,22 @@ files = [
|
|||||||
{file = "python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13"},
|
{file = "python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13"},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pythonnet"
|
||||||
|
version = "3.0.5"
|
||||||
|
description = ".NET and Mono integration for Python"
|
||||||
|
optional = true
|
||||||
|
python-versions = "<3.14,>=3.7"
|
||||||
|
groups = ["main"]
|
||||||
|
markers = "extra == \"gui-win\""
|
||||||
|
files = [
|
||||||
|
{file = "pythonnet-3.0.5-py3-none-any.whl", hash = "sha256:f6702d694d5d5b163c9f3f5cc34e0bed8d6857150237fae411fefb883a656d20"},
|
||||||
|
{file = "pythonnet-3.0.5.tar.gz", hash = "sha256:48e43ca463941b3608b32b4e236db92d8d40db4c58a75ace902985f76dac21cf"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
clr_loader = ">=0.2.7,<0.3.0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pywin32"
|
name = "pywin32"
|
||||||
version = "311"
|
version = "311"
|
||||||
@@ -2794,6 +2952,23 @@ pygments = ">=2.13.0,<3.0.0"
|
|||||||
[package.extras]
|
[package.extras]
|
||||||
jupyter = ["ipywidgets (>=7.5.1,<9)"]
|
jupyter = ["ipywidgets (>=7.5.1,<9)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rubicon-objc"
|
||||||
|
version = "0.5.2"
|
||||||
|
description = "A bridge between an Objective C runtime environment and Python."
|
||||||
|
optional = true
|
||||||
|
python-versions = ">=3.9"
|
||||||
|
groups = ["main"]
|
||||||
|
markers = "extra == \"gui-mac\""
|
||||||
|
files = [
|
||||||
|
{file = "rubicon_objc-0.5.2-py3-none-any.whl", hash = "sha256:829b253c579e51fc34f4bb6587c34806e78960dcc1eb24e62b38141a1fe02b39"},
|
||||||
|
{file = "rubicon_objc-0.5.2.tar.gz", hash = "sha256:1180593935f6a8a39c23b5f4b7baa24aedf9f7285e80804a1d9d6b50a50572f5"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
dev = ["pre-commit (==4.2.0)", "pytest (==8.4.1)", "setuptools_scm (==8.3.1)", "tox (==4.28.4)"]
|
||||||
|
docs = ["furo (==2025.7.19)", "pyenchant (==3.2.2)", "sphinx (==8.2.3)", "sphinx-autobuild (==2024.10.3)", "sphinx-copybutton (==0.5.2)", "sphinx_tabs (==3.4.7)", "sphinxcontrib-spelling (==8.0.1)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "shellingham"
|
name = "shellingham"
|
||||||
version = "1.5.4"
|
version = "1.5.4"
|
||||||
@@ -2894,6 +3069,24 @@ files = [
|
|||||||
[package.extras]
|
[package.extras]
|
||||||
tests = ["pytest", "pytest-cov"]
|
tests = ["pytest", "pytest-cov"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "toga-cocoa"
|
||||||
|
version = "0.5.2"
|
||||||
|
description = "A Cocoa (macOS) backend for the Toga widget toolkit."
|
||||||
|
optional = true
|
||||||
|
python-versions = ">=3.9"
|
||||||
|
groups = ["main"]
|
||||||
|
markers = "extra == \"gui-mac\""
|
||||||
|
files = [
|
||||||
|
{file = "toga_cocoa-0.5.2-py3-none-any.whl", hash = "sha256:a4d5d1546bf92372a6fb1b450164735fb107b2ee69d15bf87421fec3c78465f9"},
|
||||||
|
{file = "toga_cocoa-0.5.2.tar.gz", hash = "sha256:dd8e1e29eff53c2e4cbe3ded9ea037716062b65f82b9cac478a82e15ba0a2750"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
fonttools = ">=4.42.1,<5.0.0"
|
||||||
|
rubicon-objc = ">=0.5.1,<0.6.0"
|
||||||
|
toga-core = "0.5.2"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "toga-core"
|
name = "toga-core"
|
||||||
version = "0.5.2"
|
version = "0.5.2"
|
||||||
@@ -2905,7 +3098,7 @@ files = [
|
|||||||
{file = "toga_core-0.5.2-py3-none-any.whl", hash = "sha256:e872cebd2d899e9138f73393e8cd834a55a057aa269608ff7314a853ab33cb4e"},
|
{file = "toga_core-0.5.2-py3-none-any.whl", hash = "sha256:e872cebd2d899e9138f73393e8cd834a55a057aa269608ff7314a853ab33cb4e"},
|
||||||
{file = "toga_core-0.5.2.tar.gz", hash = "sha256:bdd3760146b74c8d315cb901392c2b645ab3e5d4cd90114f3e36e0e7dad3d6d1"},
|
{file = "toga_core-0.5.2.tar.gz", hash = "sha256:bdd3760146b74c8d315cb901392c2b645ab3e5d4cd90114f3e36e0e7dad3d6d1"},
|
||||||
]
|
]
|
||||||
markers = {main = "extra == \"gui\""}
|
markers = {main = "extra == \"gui\" or extra == \"gui-gtk\" or extra == \"gui-win\" or extra == \"gui-mac\""}
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
travertino = "0.5.2"
|
travertino = "0.5.2"
|
||||||
@@ -2929,6 +3122,42 @@ files = [
|
|||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
toga-core = "0.5.2"
|
toga-core = "0.5.2"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "toga-gtk"
|
||||||
|
version = "0.5.2"
|
||||||
|
description = "A GTK backend for the Toga widget toolkit."
|
||||||
|
optional = true
|
||||||
|
python-versions = ">=3.9"
|
||||||
|
groups = ["main"]
|
||||||
|
markers = "extra == \"gui-gtk\""
|
||||||
|
files = [
|
||||||
|
{file = "toga_gtk-0.5.2-py3-none-any.whl", hash = "sha256:15b346ac1a2584de5effe5e73a3888f055c68c93300aeb111db9d64186b31646"},
|
||||||
|
{file = "toga_gtk-0.5.2.tar.gz", hash = "sha256:9212db774dd5f47820d2242bb09c9f44e12e87e3db49ccb967016c6bb311139b"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
pycairo = ">=1.17.0"
|
||||||
|
pygobject = ">=3.50.0"
|
||||||
|
toga-core = "0.5.2"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "toga-winforms"
|
||||||
|
version = "0.5.2"
|
||||||
|
description = "A Windows backend for the Toga widget toolkit using the WinForms API."
|
||||||
|
optional = true
|
||||||
|
python-versions = ">=3.9"
|
||||||
|
groups = ["main"]
|
||||||
|
markers = "extra == \"gui-win\""
|
||||||
|
files = [
|
||||||
|
{file = "toga_winforms-0.5.2-py3-none-any.whl", hash = "sha256:83181309f204bcc4a34709d23fdfd68467ae8ecc39c906d13c661cb9a0ef581b"},
|
||||||
|
{file = "toga_winforms-0.5.2.tar.gz", hash = "sha256:7e65ee9a31db6588c41c01cc49c12df1b2019581410be6ba7c685e7ac75f7c4a"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
pillow = ">=10.0.0"
|
||||||
|
pythonnet = ">=3.0.0"
|
||||||
|
toga-core = "0.5.2"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "toml"
|
name = "toml"
|
||||||
version = "0.10.2"
|
version = "0.10.2"
|
||||||
@@ -2995,7 +3224,7 @@ files = [
|
|||||||
{file = "travertino-0.5.2-py3-none-any.whl", hash = "sha256:fd69ac3b14f2847e4c972198588b8a86ca3b437aaa0c8ce7259bbe5dab17aff1"},
|
{file = "travertino-0.5.2-py3-none-any.whl", hash = "sha256:fd69ac3b14f2847e4c972198588b8a86ca3b437aaa0c8ce7259bbe5dab17aff1"},
|
||||||
{file = "travertino-0.5.2.tar.gz", hash = "sha256:5afcc673e14e16c3c04c0e3fe387062633e6bc88e87bc0bbd214a04b4dfbbcd4"},
|
{file = "travertino-0.5.2.tar.gz", hash = "sha256:5afcc673e14e16c3c04c0e3fe387062633e6bc88e87bc0bbd214a04b4dfbbcd4"},
|
||||||
]
|
]
|
||||||
markers = {main = "extra == \"gui\""}
|
markers = {main = "extra == \"gui\" or extra == \"gui-gtk\" or extra == \"gui-win\" or extra == \"gui-mac\""}
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
dev = ["coverage-conditional-plugin (==0.9.0)", "coverage[toml] (==7.9.2)", "pytest (==8.4.1)", "tox (==4.27.0)", "typing-extensions (==4.12.2) ; python_version < \"3.10\""]
|
dev = ["coverage-conditional-plugin (==0.9.0)", "coverage[toml] (==7.9.2)", "pytest (==8.4.1)", "tox (==4.27.0)", "typing-extensions (==4.12.2) ; python_version < \"3.10\""]
|
||||||
@@ -3390,8 +3619,11 @@ propcache = ">=0.2.1"
|
|||||||
|
|
||||||
[extras]
|
[extras]
|
||||||
gui = ["pillow", "toga-core"]
|
gui = ["pillow", "toga-core"]
|
||||||
|
gui-gtk = ["toga-gtk"]
|
||||||
|
gui-mac = ["toga-cocoa"]
|
||||||
|
gui-win = ["toga-winforms"]
|
||||||
|
|
||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "2.1"
|
lock-version = "2.1"
|
||||||
python-versions = ">=3.10,<3.13"
|
python-versions = ">=3.10,<3.13"
|
||||||
content-hash = "8d9d5db692f39b9b05e0a365d779505583074f510d34de17627ac1849ca61bde"
|
content-hash = "9cdc15f624271aab6d58e5f945c0e99878079da7c3f5a397b0753166c06f9612"
|
||||||
|
@@ -36,9 +36,15 @@ PyJWT = ">=2.8.0"
|
|||||||
slowapi = "^0.1.9"
|
slowapi = "^0.1.9"
|
||||||
toga-core = { version = ">=0.5.2", optional = true }
|
toga-core = { version = ">=0.5.2", optional = true }
|
||||||
pillow = { version = "*", optional = true }
|
pillow = { version = "*", optional = true }
|
||||||
|
toga-gtk = { version = ">=0.5.2", optional = true }
|
||||||
|
toga-winforms = { version = ">=0.5.2", optional = true }
|
||||||
|
toga-cocoa = { version = ">=0.5.2", optional = true }
|
||||||
|
|
||||||
[tool.poetry.extras]
|
[tool.poetry.extras]
|
||||||
gui = ["toga-core", "pillow"]
|
gui = ["toga-core", "pillow"]
|
||||||
|
gui-gtk = ["toga-gtk"]
|
||||||
|
gui-win = ["toga-winforms"]
|
||||||
|
gui-mac = ["toga-cocoa"]
|
||||||
|
|
||||||
[tool.poetry.group.dev.dependencies]
|
[tool.poetry.group.dev.dependencies]
|
||||||
pytest = "^8.2"
|
pytest = "^8.2"
|
||||||
|
@@ -43,6 +43,7 @@ from seedpass.core.vault import Vault
|
|||||||
from seedpass.core.config_manager import ConfigManager
|
from seedpass.core.config_manager import ConfigManager
|
||||||
from seedpass.core.backup import BackupManager
|
from seedpass.core.backup import BackupManager
|
||||||
from seedpass.core.entry_management import EntryManager
|
from seedpass.core.entry_management import EntryManager
|
||||||
|
from seedpass.core.state_manager import StateManager
|
||||||
from nostr.client import NostrClient
|
from nostr.client import NostrClient
|
||||||
from utils.fingerprint import generate_fingerprint
|
from utils.fingerprint import generate_fingerprint
|
||||||
from utils.fingerprint_manager import FingerprintManager
|
from utils.fingerprint_manager import FingerprintManager
|
||||||
@@ -195,11 +196,13 @@ def main() -> None:
|
|||||||
|
|
||||||
encrypted = entry_mgr.vault.get_encrypted_index()
|
encrypted = entry_mgr.vault.get_encrypted_index()
|
||||||
if encrypted:
|
if encrypted:
|
||||||
|
idx = StateManager(dir_path).state.get("nostr_account_idx", 0)
|
||||||
client = NostrClient(
|
client = NostrClient(
|
||||||
entry_mgr.vault.encryption_manager,
|
entry_mgr.vault.encryption_manager,
|
||||||
fingerprint or dir_path.name,
|
fingerprint or dir_path.name,
|
||||||
parent_seed=seed,
|
parent_seed=seed,
|
||||||
config_manager=cfg_mgr,
|
config_manager=cfg_mgr,
|
||||||
|
account_index=idx,
|
||||||
)
|
)
|
||||||
asyncio.run(client.publish_snapshot(encrypted))
|
asyncio.run(client.publish_snapshot(encrypted))
|
||||||
print("[+] Data synchronized to Nostr.")
|
print("[+] Data synchronized to Nostr.")
|
||||||
|
@@ -17,6 +17,7 @@ VENV_DIR="$INSTALL_DIR/venv"
|
|||||||
LAUNCHER_DIR="$HOME/.local/bin"
|
LAUNCHER_DIR="$HOME/.local/bin"
|
||||||
LAUNCHER_PATH="$LAUNCHER_DIR/seedpass"
|
LAUNCHER_PATH="$LAUNCHER_DIR/seedpass"
|
||||||
BRANCH="main" # Default branch
|
BRANCH="main" # Default branch
|
||||||
|
MODE="tui"
|
||||||
INSTALL_GUI=false
|
INSTALL_GUI=false
|
||||||
|
|
||||||
# --- Helper Functions ---
|
# --- Helper Functions ---
|
||||||
@@ -59,9 +60,9 @@ install_dependencies() {
|
|||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
usage() {
|
usage() {
|
||||||
echo "Usage: $0 [-b | --branch <branch_name>] [--with-gui] [-h | --help]"
|
echo "Usage: $0 [-b | --branch <branch_name>] [-m | --mode <tui|gui|both>] [-h | --help]"
|
||||||
echo " -b, --branch Specify the git branch to install (default: main)"
|
echo " -b, --branch Specify the git branch to install (default: main)"
|
||||||
echo " --with-gui Include graphical interface dependencies"
|
echo " -m, --mode Installation mode: tui, gui, both (default: tui)"
|
||||||
echo " -h, --help Display this help message"
|
echo " -h, --help Display this help message"
|
||||||
exit 0
|
exit 0
|
||||||
}
|
}
|
||||||
@@ -82,9 +83,13 @@ main() {
|
|||||||
-h|--help)
|
-h|--help)
|
||||||
usage
|
usage
|
||||||
;;
|
;;
|
||||||
--with-gui)
|
-m|--mode)
|
||||||
INSTALL_GUI=true
|
if [ -n "$2" ]; then
|
||||||
shift
|
MODE="$2"
|
||||||
|
shift 2
|
||||||
|
else
|
||||||
|
print_error "Error: --mode requires an argument (tui|gui|both)."
|
||||||
|
fi
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
print_error "Unknown parameter passed: $1"; usage
|
print_error "Unknown parameter passed: $1"; usage
|
||||||
@@ -92,6 +97,26 @@ main() {
|
|||||||
esac
|
esac
|
||||||
done
|
done
|
||||||
|
|
||||||
|
case "$MODE" in
|
||||||
|
tui|gui|both) ;;
|
||||||
|
*)
|
||||||
|
print_error "Invalid mode: $MODE. Use 'tui', 'gui', or 'both'."
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
DISPLAY_DETECTED=false
|
||||||
|
if [ -n "${DISPLAY:-}" ] || [ -n "${WAYLAND_DISPLAY:-}" ]; then
|
||||||
|
DISPLAY_DETECTED=true
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "$MODE" == "gui" || "$MODE" == "both" ]]; then
|
||||||
|
if [ "$DISPLAY_DETECTED" = true ]; then
|
||||||
|
INSTALL_GUI=true
|
||||||
|
else
|
||||||
|
print_warning "No display detected. Skipping GUI installation."
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
# 1. Detect OS
|
# 1. Detect OS
|
||||||
OS_NAME=$(uname -s)
|
OS_NAME=$(uname -s)
|
||||||
print_info "Installing SeedPass from branch: '$BRANCH'"
|
print_info "Installing SeedPass from branch: '$BRANCH'"
|
||||||
@@ -172,14 +197,15 @@ main() {
|
|||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
if [ "$GUI_READY" = true ]; then
|
if [ "$GUI_READY" = true ]; then
|
||||||
pip install -e .[gui]
|
|
||||||
print_info "Installing platform-specific Toga backend..."
|
|
||||||
if [ "$OS_NAME" = "Linux" ]; then
|
if [ "$OS_NAME" = "Linux" ]; then
|
||||||
print_info "Installing toga-gtk for Linux..."
|
print_info "Installing Linux GUI dependencies..."
|
||||||
pip install toga-gtk
|
pip install -e ".[gui-gtk]"
|
||||||
elif [ "$OS_NAME" = "Darwin" ]; then
|
elif [ "$OS_NAME" = "Darwin" ]; then
|
||||||
print_info "Installing toga-cocoa for macOS..."
|
print_info "Installing macOS GUI dependencies..."
|
||||||
pip install toga-cocoa
|
pip install -e ".[gui-mac]"
|
||||||
|
else
|
||||||
|
print_warning "Unsupported OS for GUI installation. Installing core package only."
|
||||||
|
pip install -e .
|
||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
print_warning "Skipping GUI installation."
|
print_warning "Skipping GUI installation."
|
||||||
|
@@ -34,13 +34,9 @@ def initialize_app() -> None:
|
|||||||
"""Ensure the application directory exists."""
|
"""Ensure the application directory exists."""
|
||||||
try:
|
try:
|
||||||
APP_DIR.mkdir(exist_ok=True, parents=True)
|
APP_DIR.mkdir(exist_ok=True, parents=True)
|
||||||
if logger.isEnabledFor(logging.DEBUG):
|
logger.debug("Application directory created at %s", APP_DIR)
|
||||||
logger.info(f"Application directory created at {APP_DIR}")
|
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
if logger.isEnabledFor(logging.DEBUG):
|
logger.error("Failed to create application directory: %s", exc, exc_info=True)
|
||||||
logger.error(
|
|
||||||
f"Failed to create application directory: {exc}", exc_info=True
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# -----------------------------------
|
# -----------------------------------
|
||||||
|
169
src/main.py
169
src/main.py
@@ -19,7 +19,7 @@ from termcolor import colored
|
|||||||
from utils.color_scheme import color_text
|
from utils.color_scheme import color_text
|
||||||
import importlib
|
import importlib
|
||||||
|
|
||||||
from seedpass.core.manager import PasswordManager
|
from seedpass.core.manager import PasswordManager, restore_backup_index
|
||||||
from nostr.client import NostrClient
|
from nostr.client import NostrClient
|
||||||
from seedpass.core.entry_types import EntryType
|
from seedpass.core.entry_types import EntryType
|
||||||
from seedpass.core.config_manager import ConfigManager
|
from seedpass.core.config_manager import ConfigManager
|
||||||
@@ -38,7 +38,11 @@ from utils import (
|
|||||||
)
|
)
|
||||||
from utils.clipboard import ClipboardUnavailableError
|
from utils.clipboard import ClipboardUnavailableError
|
||||||
from utils.atomic_write import atomic_write
|
from utils.atomic_write import atomic_write
|
||||||
import queue
|
from utils.logging_utils import (
|
||||||
|
ConsolePauseFilter,
|
||||||
|
ChecksumWarningFilter,
|
||||||
|
pause_logging_for_ui,
|
||||||
|
)
|
||||||
from local_bip85.bip85 import Bip85Error
|
from local_bip85.bip85 import Bip85Error
|
||||||
|
|
||||||
|
|
||||||
@@ -57,7 +61,7 @@ def _warn_missing_optional_dependencies() -> None:
|
|||||||
try:
|
try:
|
||||||
importlib.import_module(module)
|
importlib.import_module(module)
|
||||||
except ModuleNotFoundError:
|
except ModuleNotFoundError:
|
||||||
logging.warning(
|
logging.debug(
|
||||||
"Optional dependency '%s' is not installed; %s will be unavailable.",
|
"Optional dependency '%s' is not installed; %s will be unavailable.",
|
||||||
module,
|
module,
|
||||||
feature,
|
feature,
|
||||||
@@ -77,43 +81,39 @@ def load_global_config() -> dict:
|
|||||||
return {}
|
return {}
|
||||||
|
|
||||||
|
|
||||||
def configure_logging():
|
def configure_logging() -> None:
|
||||||
logger = logging.getLogger()
|
"""Configure application-wide logging handlers."""
|
||||||
logger.setLevel(logging.DEBUG) # Keep this as DEBUG to capture all logs
|
|
||||||
|
|
||||||
# Remove all handlers associated with the root logger object
|
|
||||||
for handler in logger.handlers[:]:
|
|
||||||
logger.removeHandler(handler)
|
|
||||||
|
|
||||||
# Ensure the 'logs' directory exists
|
|
||||||
log_directory = Path("logs")
|
log_directory = Path("logs")
|
||||||
if not log_directory.exists():
|
log_directory.mkdir(parents=True, exist_ok=True)
|
||||||
log_directory.mkdir(parents=True, exist_ok=True)
|
|
||||||
|
|
||||||
# Create handlers
|
console_handler = logging.StreamHandler(sys.stderr)
|
||||||
c_handler = logging.StreamHandler(sys.stdout)
|
console_handler.setLevel(logging.WARNING)
|
||||||
f_handler = logging.FileHandler(log_directory / "main.log")
|
console_handler.addFilter(ConsolePauseFilter())
|
||||||
|
console_handler.addFilter(ChecksumWarningFilter())
|
||||||
|
|
||||||
# Set levels: only errors and critical messages will be shown in the console
|
file_handler = logging.FileHandler(log_directory / "main.log")
|
||||||
c_handler.setLevel(logging.ERROR)
|
file_handler.setLevel(logging.DEBUG)
|
||||||
f_handler.setLevel(logging.DEBUG)
|
|
||||||
|
|
||||||
# Create formatters and add them to handlers
|
|
||||||
formatter = logging.Formatter(
|
formatter = logging.Formatter(
|
||||||
"%(asctime)s [%(levelname)s] %(message)s [%(filename)s:%(lineno)d]"
|
"%(asctime)s [%(levelname)s] %(message)s [%(filename)s:%(lineno)d]",
|
||||||
)
|
)
|
||||||
c_handler.setFormatter(formatter)
|
console_handler.setFormatter(formatter)
|
||||||
f_handler.setFormatter(formatter)
|
file_handler.setFormatter(formatter)
|
||||||
|
|
||||||
# Add handlers to the logger
|
root_logger = logging.getLogger()
|
||||||
logger.addHandler(c_handler)
|
root_logger.setLevel(logging.DEBUG)
|
||||||
logger.addHandler(f_handler)
|
root_logger.handlers.clear()
|
||||||
|
root_logger.addHandler(console_handler)
|
||||||
|
root_logger.addHandler(file_handler)
|
||||||
|
|
||||||
# Set logging level for third-party libraries to WARNING to suppress their debug logs
|
logging.captureWarnings(True)
|
||||||
logging.getLogger("monstr").setLevel(logging.WARNING)
|
|
||||||
logging.getLogger("nostr").setLevel(logging.WARNING)
|
logging.getLogger("monstr").setLevel(logging.ERROR)
|
||||||
|
logging.getLogger("nostr").setLevel(logging.ERROR)
|
||||||
|
|
||||||
|
|
||||||
|
@pause_logging_for_ui
|
||||||
def confirm_action(prompt: str) -> bool:
|
def confirm_action(prompt: str) -> bool:
|
||||||
"""
|
"""
|
||||||
Prompts the user for confirmation.
|
Prompts the user for confirmation.
|
||||||
@@ -162,6 +162,7 @@ def get_notification_text(pm: PasswordManager) -> str:
|
|||||||
return color_text(getattr(note, "message", ""), category)
|
return color_text(getattr(note, "message", ""), category)
|
||||||
|
|
||||||
|
|
||||||
|
@pause_logging_for_ui
|
||||||
def handle_switch_fingerprint(password_manager: PasswordManager):
|
def handle_switch_fingerprint(password_manager: PasswordManager):
|
||||||
"""
|
"""
|
||||||
Handles switching the active fingerprint.
|
Handles switching the active fingerprint.
|
||||||
@@ -670,33 +671,49 @@ def handle_set_inactivity_timeout(password_manager: PasswordManager) -> None:
|
|||||||
|
|
||||||
|
|
||||||
def handle_set_kdf_iterations(password_manager: PasswordManager) -> None:
|
def handle_set_kdf_iterations(password_manager: PasswordManager) -> None:
|
||||||
"""Change the PBKDF2 iteration count."""
|
"""Interactive slider for PBKDF2 iteration strength with benchmarking."""
|
||||||
|
import hashlib
|
||||||
|
import time
|
||||||
|
|
||||||
cfg_mgr = password_manager.config_manager
|
cfg_mgr = password_manager.config_manager
|
||||||
if cfg_mgr is None:
|
if cfg_mgr is None:
|
||||||
print(colored("Configuration manager unavailable.", "red"))
|
print(colored("Configuration manager unavailable.", "red"))
|
||||||
return
|
return
|
||||||
|
levels = [
|
||||||
|
("1", "Very Fast", 10_000),
|
||||||
|
("2", "Fast", 50_000),
|
||||||
|
("3", "Balanced", 100_000),
|
||||||
|
("4", "Slow", 200_000),
|
||||||
|
("5", "Paranoid", 500_000),
|
||||||
|
]
|
||||||
try:
|
try:
|
||||||
current = cfg_mgr.get_kdf_iterations()
|
current = cfg_mgr.get_kdf_iterations()
|
||||||
print(colored(f"Current iterations: {current}", "cyan"))
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"Error loading iterations: {e}")
|
logging.error(f"Error loading iterations: {e}")
|
||||||
print(colored(f"Error: {e}", "red"))
|
print(colored(f"Error: {e}", "red"))
|
||||||
return
|
return
|
||||||
value = input("Enter new iteration count: ").strip()
|
print(colored(f"Current iterations: {current}", "cyan"))
|
||||||
if not value:
|
for key, label, iters in levels:
|
||||||
print(colored("No iteration count entered.", "yellow"))
|
marker = "*" if iters == current else " "
|
||||||
|
print(colored(f"{key}. {label} ({iters}) {marker}", "menu"))
|
||||||
|
print(colored("b. Benchmark current setting", "menu"))
|
||||||
|
choice = input("Select strength or 'b' to benchmark: ").strip().lower()
|
||||||
|
if not choice:
|
||||||
|
print(colored("No change made.", "yellow"))
|
||||||
|
return
|
||||||
|
if choice == "b":
|
||||||
|
start = time.perf_counter()
|
||||||
|
hashlib.pbkdf2_hmac("sha256", b"bench", b"salt", current)
|
||||||
|
elapsed = time.perf_counter() - start
|
||||||
|
print(colored(f"{current} iterations took {elapsed:.2f}s", "green"))
|
||||||
|
return
|
||||||
|
selected = {k: v for k, _, v in levels}.get(choice)
|
||||||
|
if not selected:
|
||||||
|
print(colored("Invalid choice.", "red"))
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
iterations = int(value)
|
cfg_mgr.set_kdf_iterations(selected)
|
||||||
if iterations <= 0:
|
print(colored(f"KDF iteration count set to {selected}.", "green"))
|
||||||
print(colored("Iterations must be positive.", "red"))
|
|
||||||
return
|
|
||||||
except ValueError:
|
|
||||||
print(colored("Invalid number.", "red"))
|
|
||||||
return
|
|
||||||
try:
|
|
||||||
cfg_mgr.set_kdf_iterations(iterations)
|
|
||||||
print(colored("KDF iteration count updated.", "green"))
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"Error saving iterations: {e}")
|
logging.error(f"Error saving iterations: {e}")
|
||||||
print(colored(f"Error: {e}", "red"))
|
print(colored(f"Error: {e}", "red"))
|
||||||
@@ -1014,12 +1031,12 @@ def handle_settings(password_manager: PasswordManager) -> None:
|
|||||||
print(color_text("8. Import database", "menu"))
|
print(color_text("8. Import database", "menu"))
|
||||||
print(color_text("9. Export 2FA codes", "menu"))
|
print(color_text("9. Export 2FA codes", "menu"))
|
||||||
print(color_text("10. Set additional backup location", "menu"))
|
print(color_text("10. Set additional backup location", "menu"))
|
||||||
print(color_text("11. Set KDF iterations", "menu"))
|
print(color_text("11. KDF strength & benchmark", "menu"))
|
||||||
print(color_text("12. Set inactivity timeout", "menu"))
|
print(color_text("12. Set inactivity timeout", "menu"))
|
||||||
print(color_text("13. Lock Vault", "menu"))
|
print(color_text("13. Lock Vault", "menu"))
|
||||||
print(color_text("14. Stats", "menu"))
|
print(color_text("14. Stats", "menu"))
|
||||||
print(color_text("15. Toggle Secret Mode", "menu"))
|
print(color_text("15. Toggle Secret Mode", "menu"))
|
||||||
print(color_text("16. Toggle Offline Mode", "menu"))
|
print(color_text("16. Toggle Offline Mode (default ON)", "menu"))
|
||||||
print(color_text("17. Toggle Quick Unlock", "menu"))
|
print(color_text("17. Toggle Quick Unlock", "menu"))
|
||||||
choice = input("Select an option or press Enter to go back: ").strip()
|
choice = input("Select an option or press Enter to go back: ").strip()
|
||||||
if choice == "1":
|
if choice == "1":
|
||||||
@@ -1285,11 +1302,20 @@ def main(argv: list[str] | None = None, *, fingerprint: str | None = None) -> in
|
|||||||
load_global_config()
|
load_global_config()
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument("--fingerprint")
|
parser.add_argument("--fingerprint")
|
||||||
|
parser.add_argument(
|
||||||
|
"--restore-backup",
|
||||||
|
help="Restore index from backup file before starting",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--no-clipboard",
|
"--no-clipboard",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Disable clipboard support and print secrets",
|
help="Disable clipboard support and print secrets",
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--deterministic-totp",
|
||||||
|
action="store_true",
|
||||||
|
help="Derive TOTP secrets deterministically",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--max-prompt-attempts",
|
"--max-prompt-attempts",
|
||||||
type=int,
|
type=int,
|
||||||
@@ -1300,6 +1326,11 @@ def main(argv: list[str] | None = None, *, fingerprint: str | None = None) -> in
|
|||||||
|
|
||||||
exp = sub.add_parser("export")
|
exp = sub.add_parser("export")
|
||||||
exp.add_argument("--file")
|
exp.add_argument("--file")
|
||||||
|
exp.add_argument(
|
||||||
|
"--unencrypted",
|
||||||
|
action="store_true",
|
||||||
|
help="Export without encryption",
|
||||||
|
)
|
||||||
|
|
||||||
imp = sub.add_parser("import")
|
imp = sub.add_parser("import")
|
||||||
imp.add_argument("--file")
|
imp.add_argument("--file")
|
||||||
@@ -1315,6 +1346,41 @@ def main(argv: list[str] | None = None, *, fingerprint: str | None = None) -> in
|
|||||||
|
|
||||||
args = parser.parse_args(argv)
|
args = parser.parse_args(argv)
|
||||||
|
|
||||||
|
if args.restore_backup:
|
||||||
|
fp_target = args.fingerprint or fingerprint
|
||||||
|
if fp_target is None:
|
||||||
|
print(
|
||||||
|
colored(
|
||||||
|
"Error: --fingerprint is required when using --restore-backup.",
|
||||||
|
"red",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return 1
|
||||||
|
try:
|
||||||
|
restore_backup_index(Path(args.restore_backup), fp_target)
|
||||||
|
logger.info("Restored backup from %s", args.restore_backup)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to restore backup: {e}", exc_info=True)
|
||||||
|
print(colored(f"Error: Failed to restore backup: {e}", "red"))
|
||||||
|
return 1
|
||||||
|
elif args.command is None:
|
||||||
|
print("Startup Options:")
|
||||||
|
print("1. Continue")
|
||||||
|
print("2. Restore from backup")
|
||||||
|
choice = input("Select an option: ").strip()
|
||||||
|
if choice == "2":
|
||||||
|
path = input("Enter backup file path: ").strip()
|
||||||
|
fp_target = args.fingerprint or fingerprint
|
||||||
|
if fp_target is None:
|
||||||
|
fp_target = input("Enter fingerprint for restore: ").strip()
|
||||||
|
try:
|
||||||
|
restore_backup_index(Path(path), fp_target)
|
||||||
|
logger.info("Restored backup from %s", path)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to restore backup: {e}", exc_info=True)
|
||||||
|
print(colored(f"Error: Failed to restore backup: {e}", "red"))
|
||||||
|
return 1
|
||||||
|
|
||||||
if args.max_prompt_attempts is not None:
|
if args.max_prompt_attempts is not None:
|
||||||
os.environ["SEEDPASS_MAX_PROMPT_ATTEMPTS"] = str(args.max_prompt_attempts)
|
os.environ["SEEDPASS_MAX_PROMPT_ATTEMPTS"] = str(args.max_prompt_attempts)
|
||||||
|
|
||||||
@@ -1332,9 +1398,13 @@ def main(argv: list[str] | None = None, *, fingerprint: str | None = None) -> in
|
|||||||
|
|
||||||
if args.no_clipboard:
|
if args.no_clipboard:
|
||||||
password_manager.secret_mode_enabled = False
|
password_manager.secret_mode_enabled = False
|
||||||
|
if args.deterministic_totp:
|
||||||
|
password_manager.deterministic_totp = True
|
||||||
|
|
||||||
if args.command == "export":
|
if args.command == "export":
|
||||||
password_manager.handle_export_database(Path(args.file))
|
password_manager.handle_export_database(
|
||||||
|
Path(args.file), encrypt=not args.unencrypted
|
||||||
|
)
|
||||||
return 0
|
return 0
|
||||||
if args.command == "import":
|
if args.command == "import":
|
||||||
password_manager.handle_import_database(Path(args.file))
|
password_manager.handle_import_database(Path(args.file))
|
||||||
@@ -1376,9 +1446,10 @@ def main(argv: list[str] | None = None, *, fingerprint: str | None = None) -> in
|
|||||||
if entry.get("type") != EntryType.TOTP.value:
|
if entry.get("type") != EntryType.TOTP.value:
|
||||||
print(colored("Entry is not a TOTP entry.", "red"))
|
print(colored("Entry is not a TOTP entry.", "red"))
|
||||||
return 1
|
return 1
|
||||||
code = password_manager.entry_manager.get_totp_code(
|
key = getattr(password_manager, "KEY_TOTP_DET", None) or getattr(
|
||||||
idx, password_manager.parent_seed
|
password_manager, "parent_seed", None
|
||||||
)
|
)
|
||||||
|
code = password_manager.entry_manager.get_totp_code(idx, key)
|
||||||
print(code)
|
print(code)
|
||||||
try:
|
try:
|
||||||
if copy_to_clipboard(code, password_manager.clipboard_clear_delay):
|
if copy_to_clipboard(code, password_manager.clipboard_clear_delay):
|
||||||
|
@@ -25,3 +25,4 @@ class Manifest:
|
|||||||
algo: str
|
algo: str
|
||||||
chunks: List[ChunkMeta]
|
chunks: List[ChunkMeta]
|
||||||
delta_since: Optional[int] = None
|
delta_since: Optional[int] = None
|
||||||
|
nonce: Optional[str] = None
|
||||||
|
@@ -33,7 +33,7 @@ from .backup_models import (
|
|||||||
)
|
)
|
||||||
from .connection import ConnectionHandler, DEFAULT_RELAYS
|
from .connection import ConnectionHandler, DEFAULT_RELAYS
|
||||||
from .key_manager import KeyManager as SeedPassKeyManager
|
from .key_manager import KeyManager as SeedPassKeyManager
|
||||||
from .snapshot import MANIFEST_ID_PREFIX, SnapshotHandler, prepare_snapshot
|
from .snapshot import SnapshotHandler, prepare_snapshot
|
||||||
|
|
||||||
if TYPE_CHECKING: # pragma: no cover - imported for type hints
|
if TYPE_CHECKING: # pragma: no cover - imported for type hints
|
||||||
from seedpass.core.config_manager import ConfigManager
|
from seedpass.core.config_manager import ConfigManager
|
||||||
@@ -57,6 +57,8 @@ class NostrClient(ConnectionHandler, SnapshotHandler):
|
|||||||
parent_seed: Optional[str] = None,
|
parent_seed: Optional[str] = None,
|
||||||
offline_mode: bool = False,
|
offline_mode: bool = False,
|
||||||
config_manager: Optional["ConfigManager"] = None,
|
config_manager: Optional["ConfigManager"] = None,
|
||||||
|
key_index: bytes | None = None,
|
||||||
|
account_index: int | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
self.encryption_manager = encryption_manager
|
self.encryption_manager = encryption_manager
|
||||||
self.fingerprint = fingerprint
|
self.fingerprint = fingerprint
|
||||||
@@ -68,7 +70,7 @@ class NostrClient(ConnectionHandler, SnapshotHandler):
|
|||||||
parent_seed = self.encryption_manager.decrypt_parent_seed()
|
parent_seed = self.encryption_manager.decrypt_parent_seed()
|
||||||
|
|
||||||
# Use our project's KeyManager to derive the private key
|
# Use our project's KeyManager to derive the private key
|
||||||
self.key_manager = KeyManager(parent_seed, fingerprint)
|
self.key_manager = KeyManager(parent_seed, fingerprint, account_index)
|
||||||
|
|
||||||
# Create a nostr-sdk Keys object from our derived private key
|
# Create a nostr-sdk Keys object from our derived private key
|
||||||
private_key_hex = self.key_manager.keys.private_key_hex()
|
private_key_hex = self.key_manager.keys.private_key_hex()
|
||||||
@@ -99,6 +101,7 @@ class NostrClient(ConnectionHandler, SnapshotHandler):
|
|||||||
self.current_manifest: Manifest | None = None
|
self.current_manifest: Manifest | None = None
|
||||||
self.current_manifest_id: str | None = None
|
self.current_manifest_id: str | None = None
|
||||||
self._delta_events: list[str] = []
|
self._delta_events: list[str] = []
|
||||||
|
self.key_index = key_index or b""
|
||||||
|
|
||||||
# Configure and initialize the nostr-sdk Client
|
# Configure and initialize the nostr-sdk Client
|
||||||
signer = NostrSigner.keys(self.keys)
|
signer = NostrSigner.keys(self.keys)
|
||||||
@@ -111,5 +114,4 @@ __all__ = [
|
|||||||
"NostrClient",
|
"NostrClient",
|
||||||
"prepare_snapshot",
|
"prepare_snapshot",
|
||||||
"DEFAULT_RELAYS",
|
"DEFAULT_RELAYS",
|
||||||
"MANIFEST_ID_PREFIX",
|
|
||||||
]
|
]
|
||||||
|
@@ -16,17 +16,22 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
class KeyManager:
|
class KeyManager:
|
||||||
"""
|
"""Manages key generation, encoding, and derivation for ``NostrClient``."""
|
||||||
Manages key generation, encoding, and derivation for NostrClient.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, parent_seed: str, fingerprint: str):
|
def __init__(
|
||||||
"""
|
self, parent_seed: str, fingerprint: str, account_index: int | None = None
|
||||||
Initializes the KeyManager with the provided parent_seed and fingerprint.
|
):
|
||||||
|
"""Initialize the key manager.
|
||||||
|
|
||||||
Parameters:
|
Parameters
|
||||||
parent_seed (str): The parent seed used for key derivation.
|
----------
|
||||||
fingerprint (str): The fingerprint to differentiate key derivations.
|
parent_seed:
|
||||||
|
The BIP-39 seed used as the root for derivations.
|
||||||
|
fingerprint:
|
||||||
|
Seed profile fingerprint used for legacy derivations and logging.
|
||||||
|
account_index:
|
||||||
|
Optional explicit index for BIP-85 Nostr key derivation. When ``None``
|
||||||
|
the index defaults to ``0``.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
if not isinstance(parent_seed, str):
|
if not isinstance(parent_seed, str):
|
||||||
@@ -40,12 +45,15 @@ class KeyManager:
|
|||||||
|
|
||||||
self.parent_seed = parent_seed
|
self.parent_seed = parent_seed
|
||||||
self.fingerprint = fingerprint
|
self.fingerprint = fingerprint
|
||||||
logger.debug(f"KeyManager initialized with parent_seed and fingerprint.")
|
self.account_index = account_index
|
||||||
|
logger.debug(
|
||||||
|
"KeyManager initialized with parent_seed, fingerprint and account index."
|
||||||
|
)
|
||||||
|
|
||||||
# Initialize BIP85
|
# Initialize BIP85
|
||||||
self.bip85 = self.initialize_bip85()
|
self.bip85 = self.initialize_bip85()
|
||||||
|
|
||||||
# Generate Nostr keys using the fingerprint
|
# Generate Nostr keys using the provided account index
|
||||||
self.keys = self.generate_nostr_keys()
|
self.keys = self.generate_nostr_keys()
|
||||||
logger.debug("Nostr Keys initialized successfully.")
|
logger.debug("Nostr Keys initialized successfully.")
|
||||||
|
|
||||||
@@ -70,34 +78,36 @@ class KeyManager:
|
|||||||
raise
|
raise
|
||||||
|
|
||||||
def generate_nostr_keys(self) -> Keys:
|
def generate_nostr_keys(self) -> Keys:
|
||||||
"""
|
"""Derive a Nostr key pair using the configured ``account_index``."""
|
||||||
Derives a unique Nostr key pair for the given fingerprint using BIP-85.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Keys: An instance of Keys containing the Nostr key pair.
|
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
# Convert fingerprint to an integer index (using a hash function)
|
index = self.account_index if self.account_index is not None else 0
|
||||||
index = int(hashlib.sha256(self.fingerprint.encode()).hexdigest(), 16) % (
|
|
||||||
2**31
|
|
||||||
)
|
|
||||||
|
|
||||||
# Derive entropy for Nostr key (32 bytes)
|
|
||||||
entropy_bytes = self.bip85.derive_entropy(
|
entropy_bytes = self.bip85.derive_entropy(
|
||||||
index=index,
|
index=index, entropy_bytes=32, app_no=NOSTR_KEY_APP_ID
|
||||||
entropy_bytes=32,
|
|
||||||
app_no=NOSTR_KEY_APP_ID,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Generate Nostr key pair from entropy
|
|
||||||
private_key_hex = entropy_bytes.hex()
|
private_key_hex = entropy_bytes.hex()
|
||||||
keys = Keys(priv_k=private_key_hex)
|
keys = Keys(priv_k=private_key_hex)
|
||||||
logger.debug(f"Nostr keys generated for fingerprint {self.fingerprint}.")
|
logger.debug("Nostr keys generated for account index %s", index)
|
||||||
return keys
|
return keys
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Failed to generate Nostr keys: {e}", exc_info=True)
|
logger.error(f"Failed to generate Nostr keys: {e}", exc_info=True)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
def generate_v1_nostr_keys(self) -> Keys:
|
||||||
|
"""Derive keys using the legacy fingerprint-hash method."""
|
||||||
|
try:
|
||||||
|
index = int(hashlib.sha256(self.fingerprint.encode()).hexdigest(), 16) % (
|
||||||
|
2**31
|
||||||
|
)
|
||||||
|
entropy_bytes = self.bip85.derive_entropy(
|
||||||
|
index=index, entropy_bytes=32, app_no=NOSTR_KEY_APP_ID
|
||||||
|
)
|
||||||
|
return Keys(priv_k=entropy_bytes.hex())
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to generate v1 Nostr keys: {e}", exc_info=True)
|
||||||
|
raise
|
||||||
|
|
||||||
def generate_legacy_nostr_keys(self) -> Keys:
|
def generate_legacy_nostr_keys(self) -> Keys:
|
||||||
"""Derive Nostr keys using the legacy application ID."""
|
"""Derive Nostr keys using the legacy application ID."""
|
||||||
try:
|
try:
|
||||||
|
@@ -2,8 +2,10 @@ import asyncio
|
|||||||
import base64
|
import base64
|
||||||
import gzip
|
import gzip
|
||||||
import hashlib
|
import hashlib
|
||||||
|
import hmac
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
import os
|
||||||
import time
|
import time
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from typing import Tuple
|
from typing import Tuple
|
||||||
@@ -23,9 +25,6 @@ from .backup_models import (
|
|||||||
logger = logging.getLogger("nostr.client")
|
logger = logging.getLogger("nostr.client")
|
||||||
logger.setLevel(logging.WARNING)
|
logger.setLevel(logging.WARNING)
|
||||||
|
|
||||||
# Identifier prefix for replaceable manifest events
|
|
||||||
MANIFEST_ID_PREFIX = "seedpass-manifest-"
|
|
||||||
|
|
||||||
|
|
||||||
def prepare_snapshot(
|
def prepare_snapshot(
|
||||||
encrypted_bytes: bytes, limit: int
|
encrypted_bytes: bytes, limit: int
|
||||||
@@ -47,6 +46,19 @@ def prepare_snapshot(
|
|||||||
return manifest, chunks
|
return manifest, chunks
|
||||||
|
|
||||||
|
|
||||||
|
def new_manifest_id(key_index: bytes) -> tuple[str, bytes]:
|
||||||
|
"""Return a new manifest identifier and nonce.
|
||||||
|
|
||||||
|
The identifier is computed as HMAC-SHA256 of ``b"manifest|" + nonce``
|
||||||
|
using ``key_index`` as the HMAC key. The nonce is returned so it can be
|
||||||
|
embedded inside the manifest itself.
|
||||||
|
"""
|
||||||
|
|
||||||
|
nonce = os.urandom(16)
|
||||||
|
digest = hmac.new(key_index, b"manifest|" + nonce, hashlib.sha256).hexdigest()
|
||||||
|
return digest, nonce
|
||||||
|
|
||||||
|
|
||||||
class SnapshotHandler:
|
class SnapshotHandler:
|
||||||
"""Mixin providing chunk and manifest handling."""
|
"""Mixin providing chunk and manifest handling."""
|
||||||
|
|
||||||
@@ -84,34 +96,43 @@ class SnapshotHandler:
|
|||||||
except Exception:
|
except Exception:
|
||||||
meta.event_id = None
|
meta.event_id = None
|
||||||
|
|
||||||
|
if (
|
||||||
|
self.current_manifest_id
|
||||||
|
and self.current_manifest
|
||||||
|
and getattr(self.current_manifest, "nonce", None)
|
||||||
|
):
|
||||||
|
manifest_id = self.current_manifest_id
|
||||||
|
manifest.nonce = self.current_manifest.nonce
|
||||||
|
else:
|
||||||
|
manifest_id, nonce = new_manifest_id(self.key_index)
|
||||||
|
manifest.nonce = base64.b64encode(nonce).decode("utf-8")
|
||||||
|
|
||||||
manifest_json = json.dumps(
|
manifest_json = json.dumps(
|
||||||
{
|
{
|
||||||
"ver": manifest.ver,
|
"ver": manifest.ver,
|
||||||
"algo": manifest.algo,
|
"algo": manifest.algo,
|
||||||
"chunks": [meta.__dict__ for meta in manifest.chunks],
|
"chunks": [meta.__dict__ for meta in manifest.chunks],
|
||||||
"delta_since": manifest.delta_since,
|
"delta_since": manifest.delta_since,
|
||||||
|
"nonce": manifest.nonce,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
manifest_identifier = (
|
|
||||||
self.current_manifest_id or f"{MANIFEST_ID_PREFIX}{self.fingerprint}"
|
|
||||||
)
|
|
||||||
manifest_event = (
|
manifest_event = (
|
||||||
nostr_client.EventBuilder(nostr_client.Kind(KIND_MANIFEST), manifest_json)
|
nostr_client.EventBuilder(nostr_client.Kind(KIND_MANIFEST), manifest_json)
|
||||||
.tags([nostr_client.Tag.identifier(manifest_identifier)])
|
.tags([nostr_client.Tag.identifier(manifest_id)])
|
||||||
.build(self.keys.public_key())
|
.build(self.keys.public_key())
|
||||||
.sign_with_keys(self.keys)
|
.sign_with_keys(self.keys)
|
||||||
)
|
)
|
||||||
await self.client.send_event(manifest_event)
|
await self.client.send_event(manifest_event)
|
||||||
with self._state_lock:
|
with self._state_lock:
|
||||||
self.current_manifest = manifest
|
self.current_manifest = manifest
|
||||||
self.current_manifest_id = manifest_identifier
|
self.current_manifest_id = manifest_id
|
||||||
self.current_manifest.delta_since = int(time.time())
|
self.current_manifest.delta_since = int(time.time())
|
||||||
self._delta_events = []
|
self._delta_events = []
|
||||||
if getattr(self, "verbose_timing", False):
|
if getattr(self, "verbose_timing", False):
|
||||||
duration = time.perf_counter() - start
|
duration = time.perf_counter() - start
|
||||||
logger.info("publish_snapshot completed in %.2f seconds", duration)
|
logger.info("publish_snapshot completed in %.2f seconds", duration)
|
||||||
return manifest, manifest_identifier
|
return manifest, manifest_id
|
||||||
|
|
||||||
async def _fetch_chunks_with_retry(
|
async def _fetch_chunks_with_retry(
|
||||||
self, manifest_event
|
self, manifest_event
|
||||||
@@ -129,6 +150,7 @@ class SnapshotHandler:
|
|||||||
if data.get("delta_since") is not None
|
if data.get("delta_since") is not None
|
||||||
else None
|
else None
|
||||||
),
|
),
|
||||||
|
nonce=data.get("nonce"),
|
||||||
)
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
return None
|
return None
|
||||||
@@ -204,14 +226,11 @@ class SnapshotHandler:
|
|||||||
pubkey = self.keys.public_key()
|
pubkey = self.keys.public_key()
|
||||||
timeout = timedelta(seconds=10)
|
timeout = timedelta(seconds=10)
|
||||||
|
|
||||||
ident = f"{MANIFEST_ID_PREFIX}{self.fingerprint}"
|
ident = self.current_manifest_id
|
||||||
f = (
|
f = nostr_client.Filter().author(pubkey).kind(nostr_client.Kind(KIND_MANIFEST))
|
||||||
nostr_client.Filter()
|
if ident:
|
||||||
.author(pubkey)
|
f = f.identifier(ident)
|
||||||
.kind(nostr_client.Kind(KIND_MANIFEST))
|
f = f.limit(1)
|
||||||
.identifier(ident)
|
|
||||||
.limit(1)
|
|
||||||
)
|
|
||||||
try:
|
try:
|
||||||
events = (await self.client.fetch_events(f, timeout)).to_vec()
|
events = (await self.client.fetch_events(f, timeout)).to_vec()
|
||||||
except Exception as e: # pragma: no cover - network errors
|
except Exception as e: # pragma: no cover - network errors
|
||||||
@@ -223,13 +242,11 @@ class SnapshotHandler:
|
|||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if not events:
|
if not events and ident:
|
||||||
ident = MANIFEST_ID_PREFIX.rstrip("-")
|
|
||||||
f = (
|
f = (
|
||||||
nostr_client.Filter()
|
nostr_client.Filter()
|
||||||
.author(pubkey)
|
.author(pubkey)
|
||||||
.kind(nostr_client.Kind(KIND_MANIFEST))
|
.kind(nostr_client.Kind(KIND_MANIFEST))
|
||||||
.identifier(ident)
|
|
||||||
.limit(1)
|
.limit(1)
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
@@ -245,8 +262,6 @@ class SnapshotHandler:
|
|||||||
if not events:
|
if not events:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
logger.info("Fetched manifest using identifier %s", ident)
|
|
||||||
|
|
||||||
for manifest_event in events:
|
for manifest_event in events:
|
||||||
try:
|
try:
|
||||||
result = await self._fetch_chunks_with_retry(manifest_event)
|
result = await self._fetch_chunks_with_retry(manifest_event)
|
||||||
@@ -300,7 +315,9 @@ class SnapshotHandler:
|
|||||||
return
|
return
|
||||||
await self._connect_async()
|
await self._connect_async()
|
||||||
pubkey = self.keys.public_key()
|
pubkey = self.keys.public_key()
|
||||||
ident = self.current_manifest_id or f"{MANIFEST_ID_PREFIX}{self.fingerprint}"
|
ident = self.current_manifest_id
|
||||||
|
if ident is None:
|
||||||
|
return
|
||||||
f = (
|
f = (
|
||||||
nostr_client.Filter()
|
nostr_client.Filter()
|
||||||
.author(pubkey)
|
.author(pubkey)
|
||||||
@@ -358,6 +375,7 @@ class SnapshotHandler:
|
|||||||
meta.__dict__ for meta in self.current_manifest.chunks
|
meta.__dict__ for meta in self.current_manifest.chunks
|
||||||
],
|
],
|
||||||
"delta_since": self.current_manifest.delta_since,
|
"delta_since": self.current_manifest.delta_since,
|
||||||
|
"nonce": self.current_manifest.nonce,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
manifest_event = (
|
manifest_event = (
|
||||||
|
@@ -9,8 +9,6 @@ import secrets
|
|||||||
import queue
|
import queue
|
||||||
from typing import Any, List, Optional
|
from typing import Any, List, Optional
|
||||||
|
|
||||||
from datetime import datetime, timedelta, timezone
|
|
||||||
import jwt
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from fastapi import FastAPI, Header, HTTPException, Request, Response
|
from fastapi import FastAPI, Header, HTTPException, Request, Response
|
||||||
@@ -18,8 +16,8 @@ from fastapi.concurrency import run_in_threadpool
|
|||||||
import asyncio
|
import asyncio
|
||||||
import sys
|
import sys
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
import hashlib
|
|
||||||
import hmac
|
import bcrypt
|
||||||
|
|
||||||
from slowapi import Limiter, _rate_limit_exceeded_handler
|
from slowapi import Limiter, _rate_limit_exceeded_handler
|
||||||
from slowapi.errors import RateLimitExceeded
|
from slowapi.errors import RateLimitExceeded
|
||||||
@@ -50,16 +48,9 @@ def _get_pm(request: Request) -> PasswordManager:
|
|||||||
def _check_token(request: Request, auth: str | None) -> None:
|
def _check_token(request: Request, auth: str | None) -> None:
|
||||||
if auth is None or not auth.startswith("Bearer "):
|
if auth is None or not auth.startswith("Bearer "):
|
||||||
raise HTTPException(status_code=401, detail="Unauthorized")
|
raise HTTPException(status_code=401, detail="Unauthorized")
|
||||||
token = auth.split(" ", 1)[1]
|
token = auth.split(" ", 1)[1].encode()
|
||||||
jwt_secret = getattr(request.app.state, "jwt_secret", "")
|
token_hash = getattr(request.app.state, "token_hash", b"")
|
||||||
token_hash = getattr(request.app.state, "token_hash", "")
|
if not token_hash or not bcrypt.checkpw(token, token_hash):
|
||||||
try:
|
|
||||||
jwt.decode(token, jwt_secret, algorithms=["HS256"])
|
|
||||||
except jwt.ExpiredSignatureError:
|
|
||||||
raise HTTPException(status_code=401, detail="Token expired")
|
|
||||||
except jwt.InvalidTokenError:
|
|
||||||
raise HTTPException(status_code=401, detail="Unauthorized")
|
|
||||||
if not hmac.compare_digest(hashlib.sha256(token.encode()).hexdigest(), token_hash):
|
|
||||||
raise HTTPException(status_code=401, detail="Unauthorized")
|
raise HTTPException(status_code=401, detail="Unauthorized")
|
||||||
|
|
||||||
|
|
||||||
@@ -78,7 +69,7 @@ def _reload_relays(request: Request, relays: list[str]) -> None:
|
|||||||
|
|
||||||
|
|
||||||
def start_server(fingerprint: str | None = None) -> str:
|
def start_server(fingerprint: str | None = None) -> str:
|
||||||
"""Initialize global state and return a short-lived JWT token.
|
"""Initialize global state and return a random API token.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
@@ -90,10 +81,8 @@ def start_server(fingerprint: str | None = None) -> str:
|
|||||||
else:
|
else:
|
||||||
pm = PasswordManager(fingerprint=fingerprint)
|
pm = PasswordManager(fingerprint=fingerprint)
|
||||||
app.state.pm = pm
|
app.state.pm = pm
|
||||||
app.state.jwt_secret = secrets.token_urlsafe(32)
|
raw_token = secrets.token_urlsafe(32)
|
||||||
payload = {"exp": datetime.now(timezone.utc) + timedelta(minutes=5)}
|
app.state.token_hash = bcrypt.hashpw(raw_token.encode(), bcrypt.gensalt())
|
||||||
raw_token = jwt.encode(payload, app.state.jwt_secret, algorithm="HS256")
|
|
||||||
app.state.token_hash = hashlib.sha256(raw_token.encode()).hexdigest()
|
|
||||||
if not getattr(app.state, "limiter", None):
|
if not getattr(app.state, "limiter", None):
|
||||||
app.state.limiter = limiter
|
app.state.limiter = limiter
|
||||||
app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler)
|
app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler)
|
||||||
@@ -214,13 +203,14 @@ async def create_entry(
|
|||||||
uri = await run_in_threadpool(
|
uri = await run_in_threadpool(
|
||||||
pm.entry_manager.add_totp,
|
pm.entry_manager.add_totp,
|
||||||
entry.get("label"),
|
entry.get("label"),
|
||||||
pm.parent_seed,
|
pm.KEY_TOTP_DET if entry.get("deterministic", False) else None,
|
||||||
secret=entry.get("secret"),
|
secret=entry.get("secret"),
|
||||||
index=entry.get("index"),
|
index=entry.get("index"),
|
||||||
period=int(entry.get("period", 30)),
|
period=int(entry.get("period", 30)),
|
||||||
digits=int(entry.get("digits", 6)),
|
digits=int(entry.get("digits", 6)),
|
||||||
notes=entry.get("notes", ""),
|
notes=entry.get("notes", ""),
|
||||||
archived=entry.get("archived", False),
|
archived=entry.get("archived", False),
|
||||||
|
deterministic=entry.get("deterministic", False),
|
||||||
)
|
)
|
||||||
return {"id": index, "uri": uri}
|
return {"id": index, "uri": uri}
|
||||||
|
|
||||||
@@ -464,7 +454,8 @@ def export_totp(
|
|||||||
_check_token(request, authorization)
|
_check_token(request, authorization)
|
||||||
_require_password(request, password)
|
_require_password(request, password)
|
||||||
pm = _get_pm(request)
|
pm = _get_pm(request)
|
||||||
return pm.entry_manager.export_totp_entries(pm.parent_seed)
|
key = getattr(pm, "KEY_TOTP_DET", None) or getattr(pm, "parent_seed", None)
|
||||||
|
return pm.entry_manager.export_totp_entries(key)
|
||||||
|
|
||||||
|
|
||||||
@app.get("/api/v1/totp")
|
@app.get("/api/v1/totp")
|
||||||
@@ -478,11 +469,12 @@ def get_totp_codes(
|
|||||||
_require_password(request, password)
|
_require_password(request, password)
|
||||||
pm = _get_pm(request)
|
pm = _get_pm(request)
|
||||||
entries = pm.entry_manager.list_entries(
|
entries = pm.entry_manager.list_entries(
|
||||||
filter_kind=EntryType.TOTP.value, include_archived=False
|
filter_kinds=[EntryType.TOTP.value], include_archived=False
|
||||||
)
|
)
|
||||||
codes = []
|
codes = []
|
||||||
for idx, label, _u, _url, _arch in entries:
|
for idx, label, _u, _url, _arch in entries:
|
||||||
code = pm.entry_manager.get_totp_code(idx, pm.parent_seed)
|
key = getattr(pm, "KEY_TOTP_DET", None) or getattr(pm, "parent_seed", None)
|
||||||
|
code = pm.entry_manager.get_totp_code(idx, key)
|
||||||
|
|
||||||
rem = pm.entry_manager.get_totp_time_remaining(idx)
|
rem = pm.entry_manager.get_totp_time_remaining(idx)
|
||||||
|
|
||||||
|
@@ -9,6 +9,7 @@ from typing import Optional
|
|||||||
import typer
|
import typer
|
||||||
|
|
||||||
from .common import _get_services
|
from .common import _get_services
|
||||||
|
from seedpass.core.errors import SeedPassError
|
||||||
|
|
||||||
app = typer.Typer(
|
app = typer.Typer(
|
||||||
help="SeedPass command line interface",
|
help="SeedPass command line interface",
|
||||||
@@ -30,6 +31,13 @@ no_clipboard_option = typer.Option(
|
|||||||
is_flag=True,
|
is_flag=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
deterministic_totp_option = typer.Option(
|
||||||
|
False,
|
||||||
|
"--deterministic-totp",
|
||||||
|
help="Derive TOTP secrets deterministically",
|
||||||
|
is_flag=True,
|
||||||
|
)
|
||||||
|
|
||||||
# Sub command groups
|
# Sub command groups
|
||||||
from . import entry, vault, nostr, config, fingerprint, util, api
|
from . import entry, vault, nostr, config, fingerprint, util, api
|
||||||
|
|
||||||
@@ -42,6 +50,15 @@ app.add_typer(util.app, name="util")
|
|||||||
app.add_typer(api.app, name="api")
|
app.add_typer(api.app, name="api")
|
||||||
|
|
||||||
|
|
||||||
|
def run() -> None:
|
||||||
|
"""Invoke the CLI, handling SeedPass errors gracefully."""
|
||||||
|
try:
|
||||||
|
app()
|
||||||
|
except SeedPassError as exc:
|
||||||
|
typer.echo(str(exc), err=True)
|
||||||
|
raise typer.Exit(1) from exc
|
||||||
|
|
||||||
|
|
||||||
def _gui_backend_available() -> bool:
|
def _gui_backend_available() -> bool:
|
||||||
"""Return True if a platform-specific BeeWare backend is installed."""
|
"""Return True if a platform-specific BeeWare backend is installed."""
|
||||||
for pkg in ("toga_gtk", "toga_winforms", "toga_cocoa"):
|
for pkg in ("toga_gtk", "toga_winforms", "toga_cocoa"):
|
||||||
@@ -55,12 +72,17 @@ def main(
|
|||||||
ctx: typer.Context,
|
ctx: typer.Context,
|
||||||
fingerprint: Optional[str] = fingerprint_option,
|
fingerprint: Optional[str] = fingerprint_option,
|
||||||
no_clipboard: bool = no_clipboard_option,
|
no_clipboard: bool = no_clipboard_option,
|
||||||
|
deterministic_totp: bool = deterministic_totp_option,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""SeedPass CLI entry point.
|
"""SeedPass CLI entry point.
|
||||||
|
|
||||||
When called without a subcommand this launches the interactive TUI.
|
When called without a subcommand this launches the interactive TUI.
|
||||||
"""
|
"""
|
||||||
ctx.obj = {"fingerprint": fingerprint, "no_clipboard": no_clipboard}
|
ctx.obj = {
|
||||||
|
"fingerprint": fingerprint,
|
||||||
|
"no_clipboard": no_clipboard,
|
||||||
|
"deterministic_totp": deterministic_totp,
|
||||||
|
}
|
||||||
if ctx.invoked_subcommand is None:
|
if ctx.invoked_subcommand is None:
|
||||||
tui = importlib.import_module("main")
|
tui = importlib.import_module("main")
|
||||||
raise typer.Exit(tui.main(fingerprint=fingerprint))
|
raise typer.Exit(tui.main(fingerprint=fingerprint))
|
||||||
@@ -161,4 +183,4 @@ def gui(
|
|||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__": # pragma: no cover
|
if __name__ == "__main__": # pragma: no cover
|
||||||
app()
|
run()
|
||||||
|
@@ -13,19 +13,25 @@ app = typer.Typer(help="Run the API server")
|
|||||||
def api_start(ctx: typer.Context, host: str = "127.0.0.1", port: int = 8000) -> None:
|
def api_start(ctx: typer.Context, host: str = "127.0.0.1", port: int = 8000) -> None:
|
||||||
"""Start the SeedPass API server."""
|
"""Start the SeedPass API server."""
|
||||||
token = api_module.start_server(ctx.obj.get("fingerprint"))
|
token = api_module.start_server(ctx.obj.get("fingerprint"))
|
||||||
typer.echo(f"API token: {token}")
|
typer.echo(
|
||||||
|
f"API token: {token}\nWARNING: Store this token securely; it cannot be recovered."
|
||||||
|
)
|
||||||
uvicorn.run(api_module.app, host=host, port=port)
|
uvicorn.run(api_module.app, host=host, port=port)
|
||||||
|
|
||||||
|
|
||||||
@app.command("stop")
|
@app.command("stop")
|
||||||
def api_stop(ctx: typer.Context, host: str = "127.0.0.1", port: int = 8000) -> None:
|
def api_stop(
|
||||||
|
token: str = typer.Option(..., help="API token"),
|
||||||
|
host: str = "127.0.0.1",
|
||||||
|
port: int = 8000,
|
||||||
|
) -> None:
|
||||||
"""Stop the SeedPass API server."""
|
"""Stop the SeedPass API server."""
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
try:
|
try:
|
||||||
requests.post(
|
requests.post(
|
||||||
f"http://{host}:{port}/api/v1/shutdown",
|
f"http://{host}:{port}/api/v1/shutdown",
|
||||||
headers={"Authorization": f"Bearer {api_module.app.state.token_hash}"},
|
headers={"Authorization": f"Bearer {token}"},
|
||||||
timeout=2,
|
timeout=2,
|
||||||
)
|
)
|
||||||
except Exception as exc: # pragma: no cover - best effort
|
except Exception as exc: # pragma: no cover - best effort
|
||||||
|
@@ -29,6 +29,8 @@ def _get_pm(ctx: typer.Context) -> PasswordManager:
|
|||||||
pm = PasswordManager(fingerprint=fp)
|
pm = PasswordManager(fingerprint=fp)
|
||||||
if ctx.obj.get("no_clipboard"):
|
if ctx.obj.get("no_clipboard"):
|
||||||
pm.secret_mode_enabled = False
|
pm.secret_mode_enabled = False
|
||||||
|
if ctx.obj.get("deterministic_totp"):
|
||||||
|
pm.deterministic_totp = True
|
||||||
return pm
|
return pm
|
||||||
|
|
||||||
|
|
||||||
|
@@ -6,8 +6,10 @@ from pathlib import Path
|
|||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
import typer
|
import typer
|
||||||
|
import click
|
||||||
|
|
||||||
from .common import _get_entry_service, EntryType
|
from .common import _get_entry_service, EntryType
|
||||||
|
from seedpass.core.entry_types import ALL_ENTRY_TYPES
|
||||||
from utils.clipboard import ClipboardUnavailableError
|
from utils.clipboard import ClipboardUnavailableError
|
||||||
|
|
||||||
|
|
||||||
@@ -20,13 +22,20 @@ def entry_list(
|
|||||||
sort: str = typer.Option(
|
sort: str = typer.Option(
|
||||||
"index", "--sort", help="Sort by 'index', 'label', or 'updated'"
|
"index", "--sort", help="Sort by 'index', 'label', or 'updated'"
|
||||||
),
|
),
|
||||||
kind: Optional[str] = typer.Option(None, "--kind", help="Filter by entry type"),
|
kind: Optional[str] = typer.Option(
|
||||||
|
None,
|
||||||
|
"--kind",
|
||||||
|
help="Filter by entry type",
|
||||||
|
click_type=click.Choice(ALL_ENTRY_TYPES),
|
||||||
|
),
|
||||||
archived: bool = typer.Option(False, "--archived", help="Include archived"),
|
archived: bool = typer.Option(False, "--archived", help="Include archived"),
|
||||||
) -> None:
|
) -> None:
|
||||||
"""List entries in the vault."""
|
"""List entries in the vault."""
|
||||||
service = _get_entry_service(ctx)
|
service = _get_entry_service(ctx)
|
||||||
entries = service.list_entries(
|
entries = service.list_entries(
|
||||||
sort_by=sort, filter_kind=kind, include_archived=archived
|
sort_by=sort,
|
||||||
|
filter_kinds=[kind] if kind else None,
|
||||||
|
include_archived=archived,
|
||||||
)
|
)
|
||||||
for idx, label, username, url, is_archived in entries:
|
for idx, label, username, url, is_archived in entries:
|
||||||
line = f"{idx}: {label}"
|
line = f"{idx}: {label}"
|
||||||
@@ -43,16 +52,17 @@ def entry_list(
|
|||||||
def entry_search(
|
def entry_search(
|
||||||
ctx: typer.Context,
|
ctx: typer.Context,
|
||||||
query: str,
|
query: str,
|
||||||
kind: List[str] = typer.Option(
|
kinds: List[str] = typer.Option(
|
||||||
None,
|
None,
|
||||||
"--kind",
|
"--kind",
|
||||||
"-k",
|
"-k",
|
||||||
help="Filter by entry kinds (can be repeated)",
|
help="Filter by entry kinds (can be repeated)",
|
||||||
|
click_type=click.Choice(ALL_ENTRY_TYPES),
|
||||||
),
|
),
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Search entries."""
|
"""Search entries."""
|
||||||
service = _get_entry_service(ctx)
|
service = _get_entry_service(ctx)
|
||||||
kinds = list(kind) if kind else None
|
kinds = list(kinds) if kinds else None
|
||||||
results = service.search_entries(query, kinds=kinds)
|
results = service.search_entries(query, kinds=kinds)
|
||||||
if not results:
|
if not results:
|
||||||
typer.echo("No matching entries found")
|
typer.echo("No matching entries found")
|
||||||
@@ -167,6 +177,9 @@ def entry_add_totp(
|
|||||||
secret: Optional[str] = typer.Option(None, "--secret", help="Import secret"),
|
secret: Optional[str] = typer.Option(None, "--secret", help="Import secret"),
|
||||||
period: int = typer.Option(30, "--period", help="TOTP period in seconds"),
|
period: int = typer.Option(30, "--period", help="TOTP period in seconds"),
|
||||||
digits: int = typer.Option(6, "--digits", help="Number of TOTP digits"),
|
digits: int = typer.Option(6, "--digits", help="Number of TOTP digits"),
|
||||||
|
deterministic_totp: bool = typer.Option(
|
||||||
|
False, "--deterministic-totp", help="Derive secret deterministically"
|
||||||
|
),
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Add a TOTP entry and output the otpauth URI."""
|
"""Add a TOTP entry and output the otpauth URI."""
|
||||||
service = _get_entry_service(ctx)
|
service = _get_entry_service(ctx)
|
||||||
@@ -176,6 +189,7 @@ def entry_add_totp(
|
|||||||
secret=secret,
|
secret=secret,
|
||||||
period=period,
|
period=period,
|
||||||
digits=digits,
|
digits=digits,
|
||||||
|
deterministic=deterministic_totp,
|
||||||
)
|
)
|
||||||
typer.echo(uri)
|
typer.echo(uri)
|
||||||
|
|
||||||
|
@@ -265,13 +265,13 @@ class EntryService:
|
|||||||
def list_entries(
|
def list_entries(
|
||||||
self,
|
self,
|
||||||
sort_by: str = "index",
|
sort_by: str = "index",
|
||||||
filter_kind: str | None = None,
|
filter_kinds: list[str] | None = None,
|
||||||
include_archived: bool = False,
|
include_archived: bool = False,
|
||||||
):
|
):
|
||||||
with self._lock:
|
with self._lock:
|
||||||
return self._manager.entry_manager.list_entries(
|
return self._manager.entry_manager.list_entries(
|
||||||
sort_by=sort_by,
|
sort_by=sort_by,
|
||||||
filter_kind=filter_kind,
|
filter_kinds=filter_kinds,
|
||||||
include_archived=include_archived,
|
include_archived=include_archived,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -305,9 +305,10 @@ class EntryService:
|
|||||||
|
|
||||||
def get_totp_code(self, entry_id: int) -> str:
|
def get_totp_code(self, entry_id: int) -> str:
|
||||||
with self._lock:
|
with self._lock:
|
||||||
return self._manager.entry_manager.get_totp_code(
|
key = getattr(self._manager, "KEY_TOTP_DET", None) or getattr(
|
||||||
entry_id, self._manager.parent_seed
|
self._manager, "parent_seed", None
|
||||||
)
|
)
|
||||||
|
return self._manager.entry_manager.get_totp_code(entry_id, key)
|
||||||
|
|
||||||
def add_entry(
|
def add_entry(
|
||||||
self,
|
self,
|
||||||
@@ -362,15 +363,18 @@ class EntryService:
|
|||||||
secret: str | None = None,
|
secret: str | None = None,
|
||||||
period: int = 30,
|
period: int = 30,
|
||||||
digits: int = 6,
|
digits: int = 6,
|
||||||
|
deterministic: bool = False,
|
||||||
) -> str:
|
) -> str:
|
||||||
with self._lock:
|
with self._lock:
|
||||||
|
key = self._manager.KEY_TOTP_DET if deterministic else None
|
||||||
uri = self._manager.entry_manager.add_totp(
|
uri = self._manager.entry_manager.add_totp(
|
||||||
label,
|
label,
|
||||||
self._manager.parent_seed,
|
key,
|
||||||
index=index,
|
index=index,
|
||||||
secret=secret,
|
secret=secret,
|
||||||
period=period,
|
period=period,
|
||||||
digits=digits,
|
digits=digits,
|
||||||
|
deterministic=deterministic,
|
||||||
)
|
)
|
||||||
self._manager.start_background_vault_sync()
|
self._manager.start_background_vault_sync()
|
||||||
return uri
|
return uri
|
||||||
@@ -515,9 +519,10 @@ class EntryService:
|
|||||||
|
|
||||||
def export_totp_entries(self) -> dict:
|
def export_totp_entries(self) -> dict:
|
||||||
with self._lock:
|
with self._lock:
|
||||||
return self._manager.entry_manager.export_totp_entries(
|
key = getattr(self._manager, "KEY_TOTP_DET", None) or getattr(
|
||||||
self._manager.parent_seed
|
self._manager, "parent_seed", None
|
||||||
)
|
)
|
||||||
|
return self._manager.entry_manager.export_totp_entries(key)
|
||||||
|
|
||||||
def display_totp_codes(self) -> None:
|
def display_totp_codes(self) -> None:
|
||||||
with self._lock:
|
with self._lock:
|
||||||
|
@@ -145,6 +145,28 @@ class BackupManager:
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def restore_from_backup(self, backup_path: str) -> None:
|
||||||
|
"""Restore the index file from a user-specified backup path."""
|
||||||
|
try:
|
||||||
|
src = Path(backup_path)
|
||||||
|
if not src.exists():
|
||||||
|
logger.error(f"Backup file '{src}' does not exist.")
|
||||||
|
print(colored(f"Error: Backup file '{src}' does not exist.", "red"))
|
||||||
|
return
|
||||||
|
shutil.copy2(src, self.index_file)
|
||||||
|
os.chmod(self.index_file, 0o600)
|
||||||
|
logger.info(f"Index file restored from backup '{src}'.")
|
||||||
|
print(colored(f"[+] Index file restored from backup '{src}'.", "green"))
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(
|
||||||
|
f"Failed to restore from backup '{backup_path}': {e}", exc_info=True
|
||||||
|
)
|
||||||
|
print(
|
||||||
|
colored(
|
||||||
|
f"Error: Failed to restore from backup '{backup_path}': {e}", "red"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
def list_backups(self) -> None:
|
def list_backups(self) -> None:
|
||||||
try:
|
try:
|
||||||
backup_files = sorted(
|
backup_files = sorted(
|
||||||
|
@@ -41,7 +41,7 @@ class ConfigManager:
|
|||||||
logger.info("Config file not found; returning defaults")
|
logger.info("Config file not found; returning defaults")
|
||||||
return {
|
return {
|
||||||
"relays": list(DEFAULT_NOSTR_RELAYS),
|
"relays": list(DEFAULT_NOSTR_RELAYS),
|
||||||
"offline_mode": False,
|
"offline_mode": True,
|
||||||
"pin_hash": "",
|
"pin_hash": "",
|
||||||
"password_hash": "",
|
"password_hash": "",
|
||||||
"inactivity_timeout": INACTIVITY_TIMEOUT,
|
"inactivity_timeout": INACTIVITY_TIMEOUT,
|
||||||
@@ -71,7 +71,7 @@ class ConfigManager:
|
|||||||
raise ValueError("Config data must be a dictionary")
|
raise ValueError("Config data must be a dictionary")
|
||||||
# Ensure defaults for missing keys
|
# Ensure defaults for missing keys
|
||||||
data.setdefault("relays", list(DEFAULT_NOSTR_RELAYS))
|
data.setdefault("relays", list(DEFAULT_NOSTR_RELAYS))
|
||||||
data.setdefault("offline_mode", False)
|
data.setdefault("offline_mode", True)
|
||||||
data.setdefault("pin_hash", "")
|
data.setdefault("pin_hash", "")
|
||||||
data.setdefault("password_hash", "")
|
data.setdefault("password_hash", "")
|
||||||
data.setdefault("inactivity_timeout", INACTIVITY_TIMEOUT)
|
data.setdefault("inactivity_timeout", INACTIVITY_TIMEOUT)
|
||||||
@@ -243,7 +243,7 @@ class ConfigManager:
|
|||||||
def get_offline_mode(self) -> bool:
|
def get_offline_mode(self) -> bool:
|
||||||
"""Retrieve the offline mode setting."""
|
"""Retrieve the offline mode setting."""
|
||||||
config = self.load_config(require_pin=False)
|
config = self.load_config(require_pin=False)
|
||||||
return bool(config.get("offline_mode", False))
|
return bool(config.get("offline_mode", True))
|
||||||
|
|
||||||
def set_clipboard_clear_delay(self, delay: int) -> None:
|
def set_clipboard_clear_delay(self, delay: int) -> None:
|
||||||
"""Persist clipboard clear timeout in seconds."""
|
"""Persist clipboard clear timeout in seconds."""
|
||||||
|
@@ -16,8 +16,10 @@ except Exception: # pragma: no cover - fallback for environments without orjson
|
|||||||
import hashlib
|
import hashlib
|
||||||
import os
|
import os
|
||||||
import base64
|
import base64
|
||||||
|
import zlib
|
||||||
|
from dataclasses import asdict
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Optional
|
from typing import Optional, Tuple
|
||||||
|
|
||||||
from cryptography.hazmat.primitives.ciphers.aead import AESGCM
|
from cryptography.hazmat.primitives.ciphers.aead import AESGCM
|
||||||
from cryptography.exceptions import InvalidTag
|
from cryptography.exceptions import InvalidTag
|
||||||
@@ -26,6 +28,8 @@ from termcolor import colored
|
|||||||
from utils.file_lock import exclusive_lock
|
from utils.file_lock import exclusive_lock
|
||||||
from mnemonic import Mnemonic
|
from mnemonic import Mnemonic
|
||||||
from utils.password_prompt import prompt_existing_password
|
from utils.password_prompt import prompt_existing_password
|
||||||
|
from utils.key_derivation import KdfConfig, CURRENT_KDF_VERSION
|
||||||
|
from .errors import DecryptionError
|
||||||
|
|
||||||
# Instantiate the logger
|
# Instantiate the logger
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@@ -89,16 +93,23 @@ class EncryptionManager:
|
|||||||
# Track user preference for handling legacy indexes
|
# Track user preference for handling legacy indexes
|
||||||
self._legacy_migrate_flag = True
|
self._legacy_migrate_flag = True
|
||||||
self.last_migration_performed = False
|
self.last_migration_performed = False
|
||||||
|
# Track nonces to detect accidental reuse
|
||||||
|
self.nonce_crc_table: set[int] = set()
|
||||||
|
|
||||||
def encrypt_data(self, data: bytes) -> bytes:
|
def encrypt_data(self, data: bytes) -> bytes:
|
||||||
"""
|
"""
|
||||||
(2) Encrypts data using the NEW AES-GCM format, prepending a version
|
Encrypt data using AES-GCM, emitting ``b"V3|" + nonce + ciphertext + tag``.
|
||||||
header and the nonce. All new data will be in this format.
|
A fresh 96-bit nonce is generated for each call and tracked via a CRC
|
||||||
|
table to detect accidental reuse during batch operations.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
nonce = os.urandom(12) # 96-bit nonce is recommended for AES-GCM
|
nonce = os.urandom(12) # 96-bit nonce is recommended for AES-GCM
|
||||||
|
crc = zlib.crc32(nonce)
|
||||||
|
if crc in self.nonce_crc_table:
|
||||||
|
raise ValueError("Nonce reuse detected")
|
||||||
|
self.nonce_crc_table.add(crc)
|
||||||
ciphertext = self.cipher.encrypt(nonce, data, None)
|
ciphertext = self.cipher.encrypt(nonce, data, None)
|
||||||
return b"V2:" + nonce + ciphertext
|
return b"V3|" + nonce + ciphertext
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Failed to encrypt data: {e}", exc_info=True)
|
logger.error(f"Failed to encrypt data: {e}", exc_info=True)
|
||||||
raise
|
raise
|
||||||
@@ -120,14 +131,33 @@ class EncryptionManager:
|
|||||||
ctx = f" {context}" if context else ""
|
ctx = f" {context}" if context else ""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Try the new V2 format first
|
# Try the new V3 format first
|
||||||
|
if encrypted_data.startswith(b"V3|"):
|
||||||
|
try:
|
||||||
|
nonce = encrypted_data[3:15]
|
||||||
|
ciphertext = encrypted_data[15:]
|
||||||
|
if len(ciphertext) < 16:
|
||||||
|
logger.error("AES-GCM payload too short")
|
||||||
|
raise DecryptionError(
|
||||||
|
f"Failed to decrypt{ctx}: AES-GCM payload too short"
|
||||||
|
)
|
||||||
|
return self.cipher.decrypt(nonce, ciphertext, None)
|
||||||
|
except InvalidTag as e:
|
||||||
|
logger.error(f"Failed to decrypt{ctx}: invalid key or corrupt file")
|
||||||
|
raise DecryptionError(
|
||||||
|
f"Failed to decrypt{ctx}: invalid key or corrupt file"
|
||||||
|
) from e
|
||||||
|
|
||||||
|
# Next try the older V2 format
|
||||||
if encrypted_data.startswith(b"V2:"):
|
if encrypted_data.startswith(b"V2:"):
|
||||||
try:
|
try:
|
||||||
nonce = encrypted_data[3:15]
|
nonce = encrypted_data[3:15]
|
||||||
ciphertext = encrypted_data[15:]
|
ciphertext = encrypted_data[15:]
|
||||||
if len(ciphertext) < 16:
|
if len(ciphertext) < 16:
|
||||||
logger.error("AES-GCM payload too short")
|
logger.error("AES-GCM payload too short")
|
||||||
raise InvalidToken("AES-GCM payload too short")
|
raise DecryptionError(
|
||||||
|
f"Failed to decrypt{ctx}: AES-GCM payload too short"
|
||||||
|
)
|
||||||
return self.cipher.decrypt(nonce, ciphertext, None)
|
return self.cipher.decrypt(nonce, ciphertext, None)
|
||||||
except InvalidTag as e:
|
except InvalidTag as e:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
@@ -140,31 +170,38 @@ class EncryptionManager:
|
|||||||
)
|
)
|
||||||
return result
|
return result
|
||||||
except InvalidToken:
|
except InvalidToken:
|
||||||
msg = f"Failed to decrypt{ctx}: invalid key or corrupt file"
|
logger.error(
|
||||||
logger.error(msg)
|
f"Failed to decrypt{ctx}: invalid key or corrupt file"
|
||||||
raise InvalidToken(msg) from e
|
)
|
||||||
|
raise DecryptionError(
|
||||||
|
f"Failed to decrypt{ctx}: invalid key or corrupt file"
|
||||||
|
) from e
|
||||||
|
|
||||||
# If it's not V2, it must be the legacy Fernet format
|
# If it's neither V3 nor V2, assume legacy Fernet format
|
||||||
else:
|
logger.warning("Data is in legacy Fernet format. Attempting migration.")
|
||||||
logger.warning("Data is in legacy Fernet format. Attempting migration.")
|
try:
|
||||||
try:
|
return self.fernet.decrypt(encrypted_data)
|
||||||
return self.fernet.decrypt(encrypted_data)
|
except InvalidToken as e:
|
||||||
except InvalidToken as e:
|
logger.error(
|
||||||
logger.error(
|
"Legacy Fernet decryption failed. Vault may be corrupt or key is incorrect."
|
||||||
"Legacy Fernet decryption failed. Vault may be corrupt or key is incorrect."
|
)
|
||||||
)
|
raise DecryptionError(
|
||||||
raise e
|
f"Failed to decrypt{ctx}: invalid key or corrupt file"
|
||||||
|
) from e
|
||||||
|
|
||||||
except (InvalidToken, InvalidTag) as e:
|
except DecryptionError as e:
|
||||||
if encrypted_data.startswith(b"V2:"):
|
if (
|
||||||
# Already determined not to be legacy; re-raise
|
encrypted_data.startswith(b"V3|")
|
||||||
raise
|
or encrypted_data.startswith(b"V2:")
|
||||||
if isinstance(e, InvalidToken) and str(e) == "AES-GCM payload too short":
|
or not self._legacy_migrate_flag
|
||||||
raise
|
):
|
||||||
if not self._legacy_migrate_flag:
|
|
||||||
raise
|
raise
|
||||||
logger.debug(f"Could not decrypt data{ctx}: {e}")
|
logger.debug(f"Could not decrypt data{ctx}: {e}")
|
||||||
raise LegacyFormatRequiresMigrationError(context)
|
raise LegacyFormatRequiresMigrationError(context) from e
|
||||||
|
except (InvalidToken, InvalidTag) as e: # pragma: no cover - safety net
|
||||||
|
raise DecryptionError(
|
||||||
|
f"Failed to decrypt{ctx}: invalid key or corrupt file"
|
||||||
|
) from e
|
||||||
|
|
||||||
def decrypt_legacy(
|
def decrypt_legacy(
|
||||||
self, encrypted_data: bytes, password: str, context: Optional[str] = None
|
self, encrypted_data: bytes, password: str, context: Optional[str] = None
|
||||||
@@ -201,8 +238,8 @@ class EncryptionManager:
|
|||||||
except Exception as e2: # pragma: no cover - try next iteration
|
except Exception as e2: # pragma: no cover - try next iteration
|
||||||
last_exc = e2
|
last_exc = e2
|
||||||
logger.error(f"Failed legacy decryption attempt: {last_exc}", exc_info=True)
|
logger.error(f"Failed legacy decryption attempt: {last_exc}", exc_info=True)
|
||||||
raise InvalidToken(
|
raise DecryptionError(
|
||||||
f"Could not decrypt{ctx} with any available method."
|
f"Failed to decrypt{ctx}: invalid key or corrupt file"
|
||||||
) from last_exc
|
) from last_exc
|
||||||
|
|
||||||
# --- All functions below this point now use the smart `decrypt_data` method ---
|
# --- All functions below this point now use the smart `decrypt_data` method ---
|
||||||
@@ -231,40 +268,78 @@ class EncryptionManager:
|
|||||||
raise ValueError("Invalid path outside fingerprint directory")
|
raise ValueError("Invalid path outside fingerprint directory")
|
||||||
return candidate
|
return candidate
|
||||||
|
|
||||||
def encrypt_parent_seed(self, parent_seed: str) -> None:
|
def encrypt_parent_seed(
|
||||||
|
self, parent_seed: str, kdf: Optional[KdfConfig] = None
|
||||||
|
) -> None:
|
||||||
"""Encrypts and saves the parent seed to 'parent_seed.enc'."""
|
"""Encrypts and saves the parent seed to 'parent_seed.enc'."""
|
||||||
data = parent_seed.encode("utf-8")
|
data = parent_seed.encode("utf-8")
|
||||||
encrypted_data = self.encrypt_data(data) # This now creates V2 format
|
self.encrypt_and_save_file(data, self.parent_seed_file, kdf=kdf)
|
||||||
with exclusive_lock(self.parent_seed_file) as fh:
|
|
||||||
fh.seek(0)
|
|
||||||
fh.truncate()
|
|
||||||
fh.write(encrypted_data)
|
|
||||||
os.chmod(self.parent_seed_file, 0o600)
|
|
||||||
logger.info(f"Parent seed encrypted and saved to '{self.parent_seed_file}'.")
|
logger.info(f"Parent seed encrypted and saved to '{self.parent_seed_file}'.")
|
||||||
|
|
||||||
def decrypt_parent_seed(self) -> str:
|
def decrypt_parent_seed(self) -> str:
|
||||||
"""Decrypts and returns the parent seed, handling migration."""
|
"""Decrypts and returns the parent seed, handling migration."""
|
||||||
with exclusive_lock(self.parent_seed_file) as fh:
|
with exclusive_lock(self.parent_seed_file) as fh:
|
||||||
fh.seek(0)
|
fh.seek(0)
|
||||||
encrypted_data = fh.read()
|
blob = fh.read()
|
||||||
|
|
||||||
is_legacy = not encrypted_data.startswith(b"V2:")
|
kdf, encrypted_data = self._deserialize(blob)
|
||||||
|
is_legacy = not (
|
||||||
|
encrypted_data.startswith(b"V3|") or encrypted_data.startswith(b"V2:")
|
||||||
|
)
|
||||||
decrypted_data = self.decrypt_data(encrypted_data, context="seed")
|
decrypted_data = self.decrypt_data(encrypted_data, context="seed")
|
||||||
|
|
||||||
if is_legacy:
|
if is_legacy:
|
||||||
logger.info("Parent seed was in legacy format. Re-encrypting to V2 format.")
|
logger.info("Parent seed was in legacy format. Re-encrypting to V3 format.")
|
||||||
self.encrypt_parent_seed(decrypted_data.decode("utf-8").strip())
|
self.encrypt_parent_seed(decrypted_data.decode("utf-8").strip(), kdf=kdf)
|
||||||
|
|
||||||
return decrypted_data.decode("utf-8").strip()
|
return decrypted_data.decode("utf-8").strip()
|
||||||
|
|
||||||
def encrypt_and_save_file(self, data: bytes, relative_path: Path) -> None:
|
def _serialize(self, kdf: KdfConfig, ciphertext: bytes) -> bytes:
|
||||||
|
payload = {"kdf": asdict(kdf), "ct": base64.b64encode(ciphertext).decode()}
|
||||||
|
if USE_ORJSON:
|
||||||
|
return json_lib.dumps(payload)
|
||||||
|
return json_lib.dumps(payload, separators=(",", ":")).encode("utf-8")
|
||||||
|
|
||||||
|
def _deserialize(self, blob: bytes) -> Tuple[KdfConfig, bytes]:
|
||||||
|
"""Return ``(KdfConfig, ciphertext)`` from serialized *blob*.
|
||||||
|
|
||||||
|
Legacy files stored the raw ciphertext without a JSON wrapper. If
|
||||||
|
decoding the wrapper fails, treat ``blob`` as the ciphertext and return
|
||||||
|
a default HKDF configuration.
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
if USE_ORJSON:
|
||||||
|
obj = json_lib.loads(blob)
|
||||||
|
else:
|
||||||
|
obj = json_lib.loads(blob.decode("utf-8"))
|
||||||
|
kdf = KdfConfig(**obj.get("kdf", {}))
|
||||||
|
ct_b64 = obj.get("ct", "")
|
||||||
|
ciphertext = base64.b64decode(ct_b64)
|
||||||
|
if ciphertext:
|
||||||
|
return kdf, ciphertext
|
||||||
|
except Exception: # pragma: no cover - fall back to legacy path
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Legacy format: ``blob`` already contains the ciphertext
|
||||||
|
return (
|
||||||
|
KdfConfig(name="hkdf", version=CURRENT_KDF_VERSION, params={}, salt_b64=""),
|
||||||
|
blob,
|
||||||
|
)
|
||||||
|
|
||||||
|
def encrypt_and_save_file(
|
||||||
|
self, data: bytes, relative_path: Path, *, kdf: Optional[KdfConfig] = None
|
||||||
|
) -> None:
|
||||||
|
if kdf is None:
|
||||||
|
kdf = KdfConfig()
|
||||||
file_path = self.resolve_relative_path(relative_path)
|
file_path = self.resolve_relative_path(relative_path)
|
||||||
file_path.parent.mkdir(parents=True, exist_ok=True)
|
file_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
encrypted_data = self.encrypt_data(data)
|
encrypted_data = self.encrypt_data(data)
|
||||||
|
payload = self._serialize(kdf, encrypted_data)
|
||||||
with exclusive_lock(file_path) as fh:
|
with exclusive_lock(file_path) as fh:
|
||||||
fh.seek(0)
|
fh.seek(0)
|
||||||
fh.truncate()
|
fh.truncate()
|
||||||
fh.write(encrypted_data)
|
fh.write(payload)
|
||||||
fh.flush()
|
fh.flush()
|
||||||
os.fsync(fh.fileno())
|
os.fsync(fh.fileno())
|
||||||
os.chmod(file_path, 0o600)
|
os.chmod(file_path, 0o600)
|
||||||
@@ -273,20 +348,37 @@ class EncryptionManager:
|
|||||||
file_path = self.resolve_relative_path(relative_path)
|
file_path = self.resolve_relative_path(relative_path)
|
||||||
with exclusive_lock(file_path) as fh:
|
with exclusive_lock(file_path) as fh:
|
||||||
fh.seek(0)
|
fh.seek(0)
|
||||||
encrypted_data = fh.read()
|
blob = fh.read()
|
||||||
|
_, encrypted_data = self._deserialize(blob)
|
||||||
return self.decrypt_data(encrypted_data, context=str(relative_path))
|
return self.decrypt_data(encrypted_data, context=str(relative_path))
|
||||||
|
|
||||||
def save_json_data(self, data: dict, relative_path: Optional[Path] = None) -> None:
|
def get_file_kdf(self, relative_path: Path) -> KdfConfig:
|
||||||
|
file_path = self.resolve_relative_path(relative_path)
|
||||||
|
with exclusive_lock(file_path) as fh:
|
||||||
|
fh.seek(0)
|
||||||
|
blob = fh.read()
|
||||||
|
kdf, _ = self._deserialize(blob)
|
||||||
|
return kdf
|
||||||
|
|
||||||
|
def save_json_data(
|
||||||
|
self,
|
||||||
|
data: dict,
|
||||||
|
relative_path: Optional[Path] = None,
|
||||||
|
*,
|
||||||
|
kdf: Optional[KdfConfig] = None,
|
||||||
|
) -> None:
|
||||||
if relative_path is None:
|
if relative_path is None:
|
||||||
relative_path = Path("seedpass_entries_db.json.enc")
|
relative_path = Path("seedpass_entries_db.json.enc")
|
||||||
if USE_ORJSON:
|
if USE_ORJSON:
|
||||||
json_data = json_lib.dumps(data)
|
json_data = json_lib.dumps(data)
|
||||||
else:
|
else:
|
||||||
json_data = json_lib.dumps(data, separators=(",", ":")).encode("utf-8")
|
json_data = json_lib.dumps(data, separators=(",", ":")).encode("utf-8")
|
||||||
self.encrypt_and_save_file(json_data, relative_path)
|
self.encrypt_and_save_file(json_data, relative_path, kdf=kdf)
|
||||||
logger.debug(f"JSON data encrypted and saved to '{relative_path}'.")
|
logger.debug(f"JSON data encrypted and saved to '{relative_path}'.")
|
||||||
|
|
||||||
def load_json_data(self, relative_path: Optional[Path] = None) -> dict:
|
def load_json_data(
|
||||||
|
self, relative_path: Optional[Path] = None, *, return_kdf: bool = False
|
||||||
|
) -> dict | Tuple[dict, KdfConfig]:
|
||||||
"""
|
"""
|
||||||
Loads and decrypts JSON data, automatically migrating and re-saving
|
Loads and decrypts JSON data, automatically migrating and re-saving
|
||||||
if it's in the legacy format.
|
if it's in the legacy format.
|
||||||
@@ -295,13 +387,21 @@ class EncryptionManager:
|
|||||||
relative_path = Path("seedpass_entries_db.json.enc")
|
relative_path = Path("seedpass_entries_db.json.enc")
|
||||||
file_path = self.resolve_relative_path(relative_path)
|
file_path = self.resolve_relative_path(relative_path)
|
||||||
if not file_path.exists():
|
if not file_path.exists():
|
||||||
return {"entries": {}}
|
empty: dict = {"entries": {}}
|
||||||
|
if return_kdf:
|
||||||
|
return empty, KdfConfig(
|
||||||
|
name="hkdf", version=CURRENT_KDF_VERSION, params={}, salt_b64=""
|
||||||
|
)
|
||||||
|
return empty
|
||||||
|
|
||||||
with exclusive_lock(file_path) as fh:
|
with exclusive_lock(file_path) as fh:
|
||||||
fh.seek(0)
|
fh.seek(0)
|
||||||
encrypted_data = fh.read()
|
blob = fh.read()
|
||||||
|
|
||||||
is_legacy = not encrypted_data.startswith(b"V2:")
|
kdf, encrypted_data = self._deserialize(blob)
|
||||||
|
is_legacy = not (
|
||||||
|
encrypted_data.startswith(b"V3|") or encrypted_data.startswith(b"V2:")
|
||||||
|
)
|
||||||
self.last_migration_performed = False
|
self.last_migration_performed = False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -316,16 +416,26 @@ class EncryptionManager:
|
|||||||
# If it was a legacy file, re-save it in the new format now
|
# If it was a legacy file, re-save it in the new format now
|
||||||
if is_legacy and self._legacy_migrate_flag:
|
if is_legacy and self._legacy_migrate_flag:
|
||||||
logger.info(f"Migrating and re-saving legacy vault file: {file_path}")
|
logger.info(f"Migrating and re-saving legacy vault file: {file_path}")
|
||||||
self.save_json_data(data, relative_path)
|
self.save_json_data(data, relative_path, kdf=kdf)
|
||||||
self.update_checksum(relative_path)
|
self.update_checksum(relative_path)
|
||||||
self.last_migration_performed = True
|
self.last_migration_performed = True
|
||||||
|
|
||||||
|
if return_kdf:
|
||||||
|
return data, kdf
|
||||||
return data
|
return data
|
||||||
except (InvalidToken, InvalidTag, JSONDecodeError) as e:
|
except DecryptionError as e:
|
||||||
logger.error(
|
msg = f"Failed to decrypt or parse data from {file_path}: {e}"
|
||||||
f"FATAL: Could not decrypt or parse data from {file_path}: {e}",
|
logger.error(msg)
|
||||||
exc_info=True,
|
raise
|
||||||
)
|
except (InvalidToken, InvalidTag) as e: # pragma: no cover - legacy safety
|
||||||
|
msg = f"Failed to decrypt or parse data from {file_path}: {e}"
|
||||||
|
logger.error(msg)
|
||||||
|
raise DecryptionError(
|
||||||
|
f"Failed to decrypt {file_path}: invalid key or corrupt file"
|
||||||
|
) from e
|
||||||
|
except JSONDecodeError as e:
|
||||||
|
msg = f"Failed to parse JSON data from {file_path}: {e}"
|
||||||
|
logger.error(msg)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def get_encrypted_index(self) -> Optional[bytes]:
|
def get_encrypted_index(self) -> Optional[bytes]:
|
||||||
@@ -360,7 +470,8 @@ class EncryptionManager:
|
|||||||
if relative_path is None:
|
if relative_path is None:
|
||||||
relative_path = Path("seedpass_entries_db.json.enc")
|
relative_path = Path("seedpass_entries_db.json.enc")
|
||||||
|
|
||||||
is_legacy = not encrypted_data.startswith(b"V2:")
|
kdf, ciphertext = self._deserialize(encrypted_data)
|
||||||
|
is_legacy = not (ciphertext.startswith(b"V3|") or ciphertext.startswith(b"V2:"))
|
||||||
self.last_migration_performed = False
|
self.last_migration_performed = False
|
||||||
|
|
||||||
def _process(decrypted: bytes) -> dict:
|
def _process(decrypted: bytes) -> dict:
|
||||||
@@ -386,25 +497,23 @@ class EncryptionManager:
|
|||||||
return data
|
return data
|
||||||
|
|
||||||
try:
|
try:
|
||||||
decrypted_data = self.decrypt_data(
|
decrypted_data = self.decrypt_data(ciphertext, context=str(relative_path))
|
||||||
encrypted_data, context=str(relative_path)
|
|
||||||
)
|
|
||||||
data = _process(decrypted_data)
|
data = _process(decrypted_data)
|
||||||
self.save_json_data(data, relative_path) # This always saves in V2 format
|
self.save_json_data(data, relative_path, kdf=kdf)
|
||||||
self.update_checksum(relative_path)
|
self.update_checksum(relative_path)
|
||||||
logger.info("Index file from Nostr was processed and saved successfully.")
|
logger.info("Index file from Nostr was processed and saved successfully.")
|
||||||
self.last_migration_performed = is_legacy
|
self.last_migration_performed = is_legacy
|
||||||
return True
|
return True
|
||||||
except (InvalidToken, LegacyFormatRequiresMigrationError):
|
except (DecryptionError, LegacyFormatRequiresMigrationError):
|
||||||
try:
|
try:
|
||||||
password = prompt_existing_password(
|
password = prompt_existing_password(
|
||||||
"Enter your master password for legacy decryption: "
|
"Enter your master password for legacy decryption: "
|
||||||
)
|
)
|
||||||
decrypted_data = self.decrypt_legacy(
|
decrypted_data = self.decrypt_legacy(
|
||||||
encrypted_data, password, context=str(relative_path)
|
ciphertext, password, context=str(relative_path)
|
||||||
)
|
)
|
||||||
data = _process(decrypted_data)
|
data = _process(decrypted_data)
|
||||||
self.save_json_data(data, relative_path)
|
self.save_json_data(data, relative_path, kdf=kdf)
|
||||||
self.update_checksum(relative_path)
|
self.update_checksum(relative_path)
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"Index decrypted using legacy password-only key derivation."
|
"Index decrypted using legacy password-only key derivation."
|
||||||
|
@@ -25,7 +25,6 @@ except Exception: # pragma: no cover - fallback when orjson is missing
|
|||||||
USE_ORJSON = False
|
USE_ORJSON = False
|
||||||
import logging
|
import logging
|
||||||
import hashlib
|
import hashlib
|
||||||
import sys
|
|
||||||
import shutil
|
import shutil
|
||||||
import time
|
import time
|
||||||
from typing import Optional, Tuple, Dict, Any, List
|
from typing import Optional, Tuple, Dict, Any, List
|
||||||
@@ -33,8 +32,8 @@ from pathlib import Path
|
|||||||
|
|
||||||
from termcolor import colored
|
from termcolor import colored
|
||||||
from .migrations import LATEST_VERSION
|
from .migrations import LATEST_VERSION
|
||||||
from .entry_types import EntryType
|
from .entry_types import EntryType, ALL_ENTRY_TYPES
|
||||||
from .totp import TotpManager
|
from .totp import TotpManager, random_totp_secret
|
||||||
from utils.fingerprint import generate_fingerprint
|
from utils.fingerprint import generate_fingerprint
|
||||||
from utils.checksum import canonical_json_dumps
|
from utils.checksum import canonical_json_dumps
|
||||||
from utils.atomic_write import atomic_write
|
from utils.atomic_write import atomic_write
|
||||||
@@ -48,6 +47,7 @@ from utils.key_validation import (
|
|||||||
|
|
||||||
from .vault import Vault
|
from .vault import Vault
|
||||||
from .backup import BackupManager
|
from .backup import BackupManager
|
||||||
|
from .errors import SeedPassError
|
||||||
|
|
||||||
|
|
||||||
# Instantiate the logger
|
# Instantiate the logger
|
||||||
@@ -148,7 +148,7 @@ class EntryManager:
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error determining next index: {e}", exc_info=True)
|
logger.error(f"Error determining next index: {e}", exc_info=True)
|
||||||
print(colored(f"Error determining next index: {e}", "red"))
|
print(colored(f"Error determining next index: {e}", "red"))
|
||||||
sys.exit(1)
|
raise SeedPassError(f"Error determining next index: {e}") from e
|
||||||
|
|
||||||
def add_entry(
|
def add_entry(
|
||||||
self,
|
self,
|
||||||
@@ -238,7 +238,7 @@ class EntryManager:
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Failed to add entry: {e}", exc_info=True)
|
logger.error(f"Failed to add entry: {e}", exc_info=True)
|
||||||
print(colored(f"Error: Failed to add entry: {e}", "red"))
|
print(colored(f"Error: Failed to add entry: {e}", "red"))
|
||||||
sys.exit(1)
|
raise SeedPassError(f"Failed to add entry: {e}") from e
|
||||||
|
|
||||||
def get_next_totp_index(self) -> int:
|
def get_next_totp_index(self) -> int:
|
||||||
"""Return the next available derivation index for TOTP secrets."""
|
"""Return the next available derivation index for TOTP secrets."""
|
||||||
@@ -257,7 +257,7 @@ class EntryManager:
|
|||||||
def add_totp(
|
def add_totp(
|
||||||
self,
|
self,
|
||||||
label: str,
|
label: str,
|
||||||
parent_seed: str,
|
parent_seed: str | bytes | None = None,
|
||||||
*,
|
*,
|
||||||
archived: bool = False,
|
archived: bool = False,
|
||||||
secret: str | None = None,
|
secret: str | None = None,
|
||||||
@@ -266,13 +266,16 @@ class EntryManager:
|
|||||||
digits: int = 6,
|
digits: int = 6,
|
||||||
notes: str = "",
|
notes: str = "",
|
||||||
tags: list[str] | None = None,
|
tags: list[str] | None = None,
|
||||||
|
deterministic: bool = False,
|
||||||
) -> str:
|
) -> str:
|
||||||
"""Add a new TOTP entry and return the provisioning URI."""
|
"""Add a new TOTP entry and return the provisioning URI."""
|
||||||
entry_id = self.get_next_index()
|
entry_id = self.get_next_index()
|
||||||
data = self._load_index()
|
data = self._load_index()
|
||||||
data.setdefault("entries", {})
|
data.setdefault("entries", {})
|
||||||
|
|
||||||
if secret is None:
|
if deterministic:
|
||||||
|
if parent_seed is None:
|
||||||
|
raise ValueError("Seed required for deterministic TOTP")
|
||||||
if index is None:
|
if index is None:
|
||||||
index = self.get_next_totp_index()
|
index = self.get_next_totp_index()
|
||||||
secret = TotpManager.derive_secret(parent_seed, index)
|
secret = TotpManager.derive_secret(parent_seed, index)
|
||||||
@@ -289,8 +292,11 @@ class EntryManager:
|
|||||||
"archived": archived,
|
"archived": archived,
|
||||||
"notes": notes,
|
"notes": notes,
|
||||||
"tags": tags or [],
|
"tags": tags or [],
|
||||||
|
"deterministic": True,
|
||||||
}
|
}
|
||||||
else:
|
else:
|
||||||
|
if secret is None:
|
||||||
|
secret = random_totp_secret()
|
||||||
if not validate_totp_secret(secret):
|
if not validate_totp_secret(secret):
|
||||||
raise ValueError("Invalid TOTP secret")
|
raise ValueError("Invalid TOTP secret")
|
||||||
entry = {
|
entry = {
|
||||||
@@ -304,6 +310,7 @@ class EntryManager:
|
|||||||
"archived": archived,
|
"archived": archived,
|
||||||
"notes": notes,
|
"notes": notes,
|
||||||
"tags": tags or [],
|
"tags": tags or [],
|
||||||
|
"deterministic": False,
|
||||||
}
|
}
|
||||||
|
|
||||||
data["entries"][str(entry_id)] = entry
|
data["entries"][str(entry_id)] = entry
|
||||||
@@ -689,7 +696,10 @@ class EntryManager:
|
|||||||
return derive_seed_phrase(bip85, seed_index, words)
|
return derive_seed_phrase(bip85, seed_index, words)
|
||||||
|
|
||||||
def get_totp_code(
|
def get_totp_code(
|
||||||
self, index: int, parent_seed: str | None = None, timestamp: int | None = None
|
self,
|
||||||
|
index: int,
|
||||||
|
parent_seed: str | bytes | None = None,
|
||||||
|
timestamp: int | None = None,
|
||||||
) -> str:
|
) -> str:
|
||||||
"""Return the current TOTP code for the specified entry."""
|
"""Return the current TOTP code for the specified entry."""
|
||||||
entry = self.retrieve_entry(index)
|
entry = self.retrieve_entry(index)
|
||||||
@@ -699,12 +709,12 @@ class EntryManager:
|
|||||||
etype != EntryType.TOTP.value and kind != EntryType.TOTP.value
|
etype != EntryType.TOTP.value and kind != EntryType.TOTP.value
|
||||||
):
|
):
|
||||||
raise ValueError("Entry is not a TOTP entry")
|
raise ValueError("Entry is not a TOTP entry")
|
||||||
if "secret" in entry:
|
if entry.get("deterministic", False) or "secret" not in entry:
|
||||||
return TotpManager.current_code_from_secret(entry["secret"], timestamp)
|
if parent_seed is None:
|
||||||
if parent_seed is None:
|
raise ValueError("Seed required for derived TOTP")
|
||||||
raise ValueError("Seed required for derived TOTP")
|
totp_index = int(entry.get("index", 0))
|
||||||
totp_index = int(entry.get("index", 0))
|
return TotpManager.current_code(parent_seed, totp_index, timestamp)
|
||||||
return TotpManager.current_code(parent_seed, totp_index, timestamp)
|
return TotpManager.current_code_from_secret(entry["secret"], timestamp)
|
||||||
|
|
||||||
def get_totp_time_remaining(self, index: int) -> int:
|
def get_totp_time_remaining(self, index: int) -> int:
|
||||||
"""Return seconds remaining in the TOTP period for the given entry."""
|
"""Return seconds remaining in the TOTP period for the given entry."""
|
||||||
@@ -719,7 +729,9 @@ class EntryManager:
|
|||||||
period = int(entry.get("period", 30))
|
period = int(entry.get("period", 30))
|
||||||
return TotpManager.time_remaining(period)
|
return TotpManager.time_remaining(period)
|
||||||
|
|
||||||
def export_totp_entries(self, parent_seed: str) -> dict[str, list[dict[str, Any]]]:
|
def export_totp_entries(
|
||||||
|
self, parent_seed: str | bytes | None
|
||||||
|
) -> dict[str, list[dict[str, Any]]]:
|
||||||
"""Return all TOTP secrets and metadata for external use."""
|
"""Return all TOTP secrets and metadata for external use."""
|
||||||
data = self._load_index()
|
data = self._load_index()
|
||||||
entries = data.get("entries", {})
|
entries = data.get("entries", {})
|
||||||
@@ -731,11 +743,13 @@ class EntryManager:
|
|||||||
label = entry.get("label", "")
|
label = entry.get("label", "")
|
||||||
period = int(entry.get("period", 30))
|
period = int(entry.get("period", 30))
|
||||||
digits = int(entry.get("digits", 6))
|
digits = int(entry.get("digits", 6))
|
||||||
if "secret" in entry:
|
if entry.get("deterministic", False) or "secret" not in entry:
|
||||||
secret = entry["secret"]
|
if parent_seed is None:
|
||||||
else:
|
raise ValueError("Seed required for deterministic TOTP export")
|
||||||
idx = int(entry.get("index", 0))
|
idx = int(entry.get("index", 0))
|
||||||
secret = TotpManager.derive_secret(parent_seed, idx)
|
secret = TotpManager.derive_secret(parent_seed, idx)
|
||||||
|
else:
|
||||||
|
secret = entry["secret"]
|
||||||
uri = TotpManager.make_otpauth_uri(label, secret, period, digits)
|
uri = TotpManager.make_otpauth_uri(label, secret, period, digits)
|
||||||
exported.append(
|
exported.append(
|
||||||
{
|
{
|
||||||
@@ -1076,7 +1090,7 @@ class EntryManager:
|
|||||||
def list_entries(
|
def list_entries(
|
||||||
self,
|
self,
|
||||||
sort_by: str = "index",
|
sort_by: str = "index",
|
||||||
filter_kind: str | None = None,
|
filter_kinds: list[str] | None = None,
|
||||||
*,
|
*,
|
||||||
include_archived: bool = False,
|
include_archived: bool = False,
|
||||||
verbose: bool = True,
|
verbose: bool = True,
|
||||||
@@ -1088,8 +1102,9 @@ class EntryManager:
|
|||||||
sort_by:
|
sort_by:
|
||||||
Field to sort by. Supported values are ``"index"``, ``"label"`` and
|
Field to sort by. Supported values are ``"index"``, ``"label"`` and
|
||||||
``"updated"``.
|
``"updated"``.
|
||||||
filter_kind:
|
filter_kinds:
|
||||||
Optional entry kind to restrict the results.
|
Optional list of entry kinds to restrict the results. Defaults to
|
||||||
|
``ALL_ENTRY_TYPES``.
|
||||||
|
|
||||||
Archived entries are omitted unless ``include_archived`` is ``True``.
|
Archived entries are omitted unless ``include_archived`` is ``True``.
|
||||||
"""
|
"""
|
||||||
@@ -1118,12 +1133,14 @@ class EntryManager:
|
|||||||
|
|
||||||
sorted_items = sorted(entries_data.items(), key=sort_key)
|
sorted_items = sorted(entries_data.items(), key=sort_key)
|
||||||
|
|
||||||
|
if filter_kinds is None:
|
||||||
|
filter_kinds = ALL_ENTRY_TYPES
|
||||||
|
|
||||||
filtered_items: List[Tuple[int, Dict[str, Any]]] = []
|
filtered_items: List[Tuple[int, Dict[str, Any]]] = []
|
||||||
for idx_str, entry in sorted_items:
|
for idx_str, entry in sorted_items:
|
||||||
if (
|
if (
|
||||||
filter_kind is not None
|
entry.get("type", entry.get("kind", EntryType.PASSWORD.value))
|
||||||
and entry.get("type", entry.get("kind", EntryType.PASSWORD.value))
|
not in filter_kinds
|
||||||
!= filter_kind
|
|
||||||
):
|
):
|
||||||
continue
|
continue
|
||||||
if not include_archived and entry.get(
|
if not include_archived and entry.get(
|
||||||
@@ -1371,7 +1388,7 @@ class EntryManager:
|
|||||||
def list_all_entries(
|
def list_all_entries(
|
||||||
self,
|
self,
|
||||||
sort_by: str = "index",
|
sort_by: str = "index",
|
||||||
filter_kind: str | None = None,
|
filter_kinds: list[str] | None = None,
|
||||||
*,
|
*,
|
||||||
include_archived: bool = False,
|
include_archived: bool = False,
|
||||||
) -> None:
|
) -> None:
|
||||||
@@ -1379,7 +1396,7 @@ class EntryManager:
|
|||||||
try:
|
try:
|
||||||
entries = self.list_entries(
|
entries = self.list_entries(
|
||||||
sort_by=sort_by,
|
sort_by=sort_by,
|
||||||
filter_kind=filter_kind,
|
filter_kinds=filter_kinds,
|
||||||
include_archived=include_archived,
|
include_archived=include_archived,
|
||||||
)
|
)
|
||||||
if not entries:
|
if not entries:
|
||||||
@@ -1403,7 +1420,7 @@ class EntryManager:
|
|||||||
|
|
||||||
def get_entry_summaries(
|
def get_entry_summaries(
|
||||||
self,
|
self,
|
||||||
filter_kind: str | None = None,
|
filter_kinds: list[str] | None = None,
|
||||||
*,
|
*,
|
||||||
include_archived: bool = False,
|
include_archived: bool = False,
|
||||||
) -> list[tuple[int, str, str]]:
|
) -> list[tuple[int, str, str]]:
|
||||||
@@ -1412,10 +1429,13 @@ class EntryManager:
|
|||||||
data = self._load_index()
|
data = self._load_index()
|
||||||
entries_data = data.get("entries", {})
|
entries_data = data.get("entries", {})
|
||||||
|
|
||||||
|
if filter_kinds is None:
|
||||||
|
filter_kinds = ALL_ENTRY_TYPES
|
||||||
|
|
||||||
summaries: list[tuple[int, str, str]] = []
|
summaries: list[tuple[int, str, str]] = []
|
||||||
for idx_str, entry in entries_data.items():
|
for idx_str, entry in entries_data.items():
|
||||||
etype = entry.get("type", entry.get("kind", EntryType.PASSWORD.value))
|
etype = entry.get("type", entry.get("kind", EntryType.PASSWORD.value))
|
||||||
if filter_kind and etype != filter_kind:
|
if etype not in filter_kinds:
|
||||||
continue
|
continue
|
||||||
if not include_archived and entry.get(
|
if not include_archived and entry.get(
|
||||||
"archived", entry.get("blacklisted", False)
|
"archived", entry.get("blacklisted", False)
|
||||||
|
@@ -15,3 +15,7 @@ class EntryType(str, Enum):
|
|||||||
NOSTR = "nostr"
|
NOSTR = "nostr"
|
||||||
KEY_VALUE = "key_value"
|
KEY_VALUE = "key_value"
|
||||||
MANAGED_ACCOUNT = "managed_account"
|
MANAGED_ACCOUNT = "managed_account"
|
||||||
|
|
||||||
|
|
||||||
|
# List of all entry type values for convenience
|
||||||
|
ALL_ENTRY_TYPES = [e.value for e in EntryType]
|
||||||
|
30
src/seedpass/core/errors.py
Normal file
30
src/seedpass/core/errors.py
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
"""Custom exceptions for SeedPass core modules.
|
||||||
|
|
||||||
|
This module defines :class:`SeedPassError`, a base exception used across the
|
||||||
|
core modules. Library code should raise this error instead of terminating the
|
||||||
|
process with ``sys.exit`` so that callers can handle failures gracefully.
|
||||||
|
|
||||||
|
When raised inside the CLI, :class:`SeedPassError` behaves like a Click
|
||||||
|
exception, displaying a friendly message and exiting with code ``1``.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from click import ClickException
|
||||||
|
from cryptography.fernet import InvalidToken
|
||||||
|
|
||||||
|
|
||||||
|
class SeedPassError(ClickException):
|
||||||
|
"""Base exception for SeedPass-related errors."""
|
||||||
|
|
||||||
|
def __init__(self, message: str):
|
||||||
|
super().__init__(message)
|
||||||
|
|
||||||
|
|
||||||
|
class DecryptionError(InvalidToken, SeedPassError):
|
||||||
|
"""Raised when encrypted data cannot be decrypted.
|
||||||
|
|
||||||
|
Subclasses :class:`cryptography.fernet.InvalidToken` so callers expecting
|
||||||
|
the cryptography exception continue to work.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ["SeedPassError", "DecryptionError"]
|
File diff suppressed because it is too large
Load Diff
@@ -6,10 +6,11 @@ from typing import TYPE_CHECKING
|
|||||||
|
|
||||||
from termcolor import colored
|
from termcolor import colored
|
||||||
|
|
||||||
from .entry_types import EntryType
|
from .entry_types import EntryType, ALL_ENTRY_TYPES
|
||||||
import seedpass.core.manager as manager_module
|
import seedpass.core.manager as manager_module
|
||||||
from utils.color_scheme import color_text
|
from utils.color_scheme import color_text
|
||||||
from utils.terminal_utils import clear_header_with_notification
|
from utils.terminal_utils import clear_header_with_notification
|
||||||
|
from utils.logging_utils import pause_logging_for_ui
|
||||||
|
|
||||||
if TYPE_CHECKING: # pragma: no cover - typing only
|
if TYPE_CHECKING: # pragma: no cover - typing only
|
||||||
from .manager import PasswordManager
|
from .manager import PasswordManager
|
||||||
@@ -21,6 +22,7 @@ class MenuHandler:
|
|||||||
def __init__(self, manager: PasswordManager) -> None:
|
def __init__(self, manager: PasswordManager) -> None:
|
||||||
self.manager = manager
|
self.manager = manager
|
||||||
|
|
||||||
|
@pause_logging_for_ui
|
||||||
def handle_list_entries(self) -> None:
|
def handle_list_entries(self) -> None:
|
||||||
"""List entries and optionally show details."""
|
"""List entries and optionally show details."""
|
||||||
pm = self.manager
|
pm = self.manager
|
||||||
@@ -36,33 +38,16 @@ class MenuHandler:
|
|||||||
)
|
)
|
||||||
print(color_text("\nList Entries:", "menu"))
|
print(color_text("\nList Entries:", "menu"))
|
||||||
print(color_text("1. All", "menu"))
|
print(color_text("1. All", "menu"))
|
||||||
print(color_text("2. Passwords", "menu"))
|
option_map: dict[str, str] = {}
|
||||||
print(color_text("3. 2FA (TOTP)", "menu"))
|
for i, etype in enumerate(ALL_ENTRY_TYPES, start=2):
|
||||||
print(color_text("4. SSH Key", "menu"))
|
label = etype.replace("_", " ").title()
|
||||||
print(color_text("5. Seed Phrase", "menu"))
|
print(color_text(f"{i}. {label}", "menu"))
|
||||||
print(color_text("6. Nostr Key Pair", "menu"))
|
option_map[str(i)] = etype
|
||||||
print(color_text("7. PGP", "menu"))
|
|
||||||
print(color_text("8. Key/Value", "menu"))
|
|
||||||
print(color_text("9. Managed Account", "menu"))
|
|
||||||
choice = input("Select entry type or press Enter to go back: ").strip()
|
choice = input("Select entry type or press Enter to go back: ").strip()
|
||||||
if choice == "1":
|
if choice == "1":
|
||||||
filter_kind = None
|
filter_kinds = None
|
||||||
elif choice == "2":
|
elif choice in option_map:
|
||||||
filter_kind = EntryType.PASSWORD.value
|
filter_kinds = [option_map[choice]]
|
||||||
elif choice == "3":
|
|
||||||
filter_kind = EntryType.TOTP.value
|
|
||||||
elif choice == "4":
|
|
||||||
filter_kind = EntryType.SSH.value
|
|
||||||
elif choice == "5":
|
|
||||||
filter_kind = EntryType.SEED.value
|
|
||||||
elif choice == "6":
|
|
||||||
filter_kind = EntryType.NOSTR.value
|
|
||||||
elif choice == "7":
|
|
||||||
filter_kind = EntryType.PGP.value
|
|
||||||
elif choice == "8":
|
|
||||||
filter_kind = EntryType.KEY_VALUE.value
|
|
||||||
elif choice == "9":
|
|
||||||
filter_kind = EntryType.MANAGED_ACCOUNT.value
|
|
||||||
elif not choice:
|
elif not choice:
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
@@ -71,7 +56,7 @@ class MenuHandler:
|
|||||||
|
|
||||||
while True:
|
while True:
|
||||||
summaries = pm.entry_manager.get_entry_summaries(
|
summaries = pm.entry_manager.get_entry_summaries(
|
||||||
filter_kind, include_archived=False
|
filter_kinds, include_archived=False
|
||||||
)
|
)
|
||||||
if not summaries:
|
if not summaries:
|
||||||
break
|
break
|
||||||
@@ -85,7 +70,7 @@ class MenuHandler:
|
|||||||
)
|
)
|
||||||
print(colored("\n[+] Entries:\n", "green"))
|
print(colored("\n[+] Entries:\n", "green"))
|
||||||
for idx, etype, label in summaries:
|
for idx, etype, label in summaries:
|
||||||
if filter_kind is None:
|
if filter_kinds is None:
|
||||||
display_type = etype.capitalize()
|
display_type = etype.capitalize()
|
||||||
print(colored(f"{idx}. {display_type} - {label}", "cyan"))
|
print(colored(f"{idx}. {display_type} - {label}", "cyan"))
|
||||||
else:
|
else:
|
||||||
@@ -103,6 +88,7 @@ class MenuHandler:
|
|||||||
logging.error(f"Failed to list entries: {e}", exc_info=True)
|
logging.error(f"Failed to list entries: {e}", exc_info=True)
|
||||||
print(colored(f"Error: Failed to list entries: {e}", "red"))
|
print(colored(f"Error: Failed to list entries: {e}", "red"))
|
||||||
|
|
||||||
|
@pause_logging_for_ui
|
||||||
def handle_display_totp_codes(self) -> None:
|
def handle_display_totp_codes(self) -> None:
|
||||||
"""Display all stored TOTP codes with a countdown progress bar."""
|
"""Display all stored TOTP codes with a countdown progress bar."""
|
||||||
pm = self.manager
|
pm = self.manager
|
||||||
@@ -148,7 +134,10 @@ class MenuHandler:
|
|||||||
if generated:
|
if generated:
|
||||||
print(colored("\nGenerated 2FA Codes:", "green"))
|
print(colored("\nGenerated 2FA Codes:", "green"))
|
||||||
for label, idx, period, _ in generated:
|
for label, idx, period, _ in generated:
|
||||||
code = pm.entry_manager.get_totp_code(idx, pm.parent_seed)
|
key = getattr(pm, "KEY_TOTP_DET", None) or getattr(
|
||||||
|
pm, "parent_seed", None
|
||||||
|
)
|
||||||
|
code = pm.entry_manager.get_totp_code(idx, key)
|
||||||
remaining = pm.entry_manager.get_totp_time_remaining(idx)
|
remaining = pm.entry_manager.get_totp_time_remaining(idx)
|
||||||
filled = int(20 * (period - remaining) / period)
|
filled = int(20 * (period - remaining) / period)
|
||||||
bar = "[" + "#" * filled + "-" * (20 - filled) + "]"
|
bar = "[" + "#" * filled + "-" * (20 - filled) + "]"
|
||||||
@@ -166,7 +155,10 @@ class MenuHandler:
|
|||||||
if imported_list:
|
if imported_list:
|
||||||
print(colored("\nImported 2FA Codes:", "green"))
|
print(colored("\nImported 2FA Codes:", "green"))
|
||||||
for label, idx, period, _ in imported_list:
|
for label, idx, period, _ in imported_list:
|
||||||
code = pm.entry_manager.get_totp_code(idx, pm.parent_seed)
|
key = getattr(pm, "KEY_TOTP_DET", None) or getattr(
|
||||||
|
pm, "parent_seed", None
|
||||||
|
)
|
||||||
|
code = pm.entry_manager.get_totp_code(idx, key)
|
||||||
remaining = pm.entry_manager.get_totp_time_remaining(idx)
|
remaining = pm.entry_manager.get_totp_time_remaining(idx)
|
||||||
filled = int(20 * (period - remaining) / period)
|
filled = int(20 * (period - remaining) / period)
|
||||||
bar = "[" + "#" * filled + "-" * (20 - filled) + "]"
|
bar = "[" + "#" * filled + "-" * (20 - filled) + "]"
|
||||||
|
@@ -113,10 +113,12 @@ class PasswordGenerator:
|
|||||||
self.bip85 = bip85
|
self.bip85 = bip85
|
||||||
self.policy = policy or PasswordPolicy()
|
self.policy = policy or PasswordPolicy()
|
||||||
|
|
||||||
# Derive seed bytes from parent_seed using BIP39 (handled by EncryptionManager)
|
if isinstance(parent_seed, (bytes, bytearray)):
|
||||||
self.seed_bytes = self.encryption_manager.derive_seed_from_mnemonic(
|
self.seed_bytes = bytes(parent_seed)
|
||||||
self.parent_seed
|
else:
|
||||||
)
|
self.seed_bytes = self.encryption_manager.derive_seed_from_mnemonic(
|
||||||
|
self.parent_seed
|
||||||
|
)
|
||||||
|
|
||||||
logger.debug("PasswordGenerator initialized successfully.")
|
logger.debug("PasswordGenerator initialized successfully.")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
@@ -21,6 +21,7 @@ from utils.key_derivation import (
|
|||||||
)
|
)
|
||||||
from .encryption import EncryptionManager
|
from .encryption import EncryptionManager
|
||||||
from utils.checksum import json_checksum, canonical_json_dumps
|
from utils.checksum import json_checksum, canonical_json_dumps
|
||||||
|
from .state_manager import StateManager
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -32,6 +33,7 @@ class PortableMode(Enum):
|
|||||||
"""Encryption mode for portable exports."""
|
"""Encryption mode for portable exports."""
|
||||||
|
|
||||||
SEED_ONLY = EncryptionMode.SEED_ONLY.value
|
SEED_ONLY = EncryptionMode.SEED_ONLY.value
|
||||||
|
NONE = "none"
|
||||||
|
|
||||||
|
|
||||||
def _derive_export_key(seed: str) -> bytes:
|
def _derive_export_key(seed: str) -> bytes:
|
||||||
@@ -47,8 +49,15 @@ def export_backup(
|
|||||||
*,
|
*,
|
||||||
publish: bool = False,
|
publish: bool = False,
|
||||||
parent_seed: str | None = None,
|
parent_seed: str | None = None,
|
||||||
|
encrypt: bool = True,
|
||||||
) -> Path:
|
) -> Path:
|
||||||
"""Export the current vault state to a portable encrypted file."""
|
"""Export the current vault state to a portable file.
|
||||||
|
|
||||||
|
When ``encrypt`` is ``True`` (the default) the payload is encrypted with a
|
||||||
|
key derived from the parent seed. When ``encrypt`` is ``False`` the payload
|
||||||
|
is written in plaintext and the wrapper records an ``encryption_mode`` of
|
||||||
|
:data:`PortableMode.NONE`.
|
||||||
|
"""
|
||||||
|
|
||||||
if dest_path is None:
|
if dest_path is None:
|
||||||
ts = int(time.time())
|
ts = int(time.time())
|
||||||
@@ -57,24 +66,32 @@ def export_backup(
|
|||||||
dest_path = dest_dir / EXPORT_NAME_TEMPLATE.format(ts=ts)
|
dest_path = dest_dir / EXPORT_NAME_TEMPLATE.format(ts=ts)
|
||||||
|
|
||||||
index_data = vault.load_index()
|
index_data = vault.load_index()
|
||||||
seed = (
|
|
||||||
parent_seed
|
|
||||||
if parent_seed is not None
|
|
||||||
else vault.encryption_manager.decrypt_parent_seed()
|
|
||||||
)
|
|
||||||
key = _derive_export_key(seed)
|
|
||||||
enc_mgr = EncryptionManager(key, vault.fingerprint_dir)
|
|
||||||
|
|
||||||
canonical = canonical_json_dumps(index_data)
|
canonical = canonical_json_dumps(index_data)
|
||||||
payload_bytes = enc_mgr.encrypt_data(canonical.encode("utf-8"))
|
|
||||||
|
if encrypt:
|
||||||
|
seed = (
|
||||||
|
parent_seed
|
||||||
|
if parent_seed is not None
|
||||||
|
else vault.encryption_manager.decrypt_parent_seed()
|
||||||
|
)
|
||||||
|
key = _derive_export_key(seed)
|
||||||
|
enc_mgr = EncryptionManager(key, vault.fingerprint_dir)
|
||||||
|
payload_bytes = enc_mgr.encrypt_data(canonical.encode("utf-8"))
|
||||||
|
mode = PortableMode.SEED_ONLY
|
||||||
|
cipher = "aes-gcm"
|
||||||
|
else:
|
||||||
|
payload_bytes = canonical.encode("utf-8")
|
||||||
|
mode = PortableMode.NONE
|
||||||
|
cipher = "none"
|
||||||
|
|
||||||
checksum = json_checksum(index_data)
|
checksum = json_checksum(index_data)
|
||||||
|
|
||||||
wrapper = {
|
wrapper = {
|
||||||
"format_version": FORMAT_VERSION,
|
"format_version": FORMAT_VERSION,
|
||||||
"created_at": int(time.time()),
|
"created_at": int(time.time()),
|
||||||
"fingerprint": vault.fingerprint_dir.name,
|
"fingerprint": vault.fingerprint_dir.name,
|
||||||
"encryption_mode": PortableMode.SEED_ONLY.value,
|
"encryption_mode": mode.value,
|
||||||
"cipher": "aes-gcm",
|
"cipher": cipher,
|
||||||
"checksum": checksum,
|
"checksum": checksum,
|
||||||
"payload": base64.b64encode(payload_bytes).decode("utf-8"),
|
"payload": base64.b64encode(payload_bytes).decode("utf-8"),
|
||||||
}
|
}
|
||||||
@@ -90,10 +107,12 @@ def export_backup(
|
|||||||
enc_file.write_bytes(encrypted)
|
enc_file.write_bytes(encrypted)
|
||||||
os.chmod(enc_file, 0o600)
|
os.chmod(enc_file, 0o600)
|
||||||
try:
|
try:
|
||||||
|
idx = StateManager(vault.fingerprint_dir).state.get("nostr_account_idx", 0)
|
||||||
client = NostrClient(
|
client = NostrClient(
|
||||||
vault.encryption_manager,
|
vault.encryption_manager,
|
||||||
vault.fingerprint_dir.name,
|
vault.fingerprint_dir.name,
|
||||||
config_manager=backup_manager.config_manager,
|
config_manager=backup_manager.config_manager,
|
||||||
|
account_index=idx,
|
||||||
)
|
)
|
||||||
asyncio.run(client.publish_snapshot(encrypted))
|
asyncio.run(client.publish_snapshot(encrypted))
|
||||||
except Exception:
|
except Exception:
|
||||||
@@ -118,19 +137,24 @@ def import_backup(
|
|||||||
if wrapper.get("format_version") != FORMAT_VERSION:
|
if wrapper.get("format_version") != FORMAT_VERSION:
|
||||||
raise ValueError("Unsupported backup format")
|
raise ValueError("Unsupported backup format")
|
||||||
|
|
||||||
if wrapper.get("encryption_mode") != PortableMode.SEED_ONLY.value:
|
mode = wrapper.get("encryption_mode")
|
||||||
raise ValueError("Unsupported encryption mode")
|
|
||||||
payload = base64.b64decode(wrapper["payload"])
|
payload = base64.b64decode(wrapper["payload"])
|
||||||
|
|
||||||
seed = (
|
if mode == PortableMode.SEED_ONLY.value:
|
||||||
parent_seed
|
seed = (
|
||||||
if parent_seed is not None
|
parent_seed
|
||||||
else vault.encryption_manager.decrypt_parent_seed()
|
if parent_seed is not None
|
||||||
)
|
else vault.encryption_manager.decrypt_parent_seed()
|
||||||
key = _derive_export_key(seed)
|
)
|
||||||
enc_mgr = EncryptionManager(key, vault.fingerprint_dir)
|
key = _derive_export_key(seed)
|
||||||
enc_mgr._legacy_migrate_flag = False
|
enc_mgr = EncryptionManager(key, vault.fingerprint_dir)
|
||||||
index_bytes = enc_mgr.decrypt_data(payload, context="backup payload")
|
enc_mgr._legacy_migrate_flag = False
|
||||||
|
index_bytes = enc_mgr.decrypt_data(payload, context="backup payload")
|
||||||
|
elif mode == PortableMode.NONE.value:
|
||||||
|
index_bytes = payload
|
||||||
|
else:
|
||||||
|
raise ValueError("Unsupported encryption mode")
|
||||||
|
|
||||||
index = json.loads(index_bytes.decode("utf-8"))
|
index = json.loads(index_bytes.decode("utf-8"))
|
||||||
|
|
||||||
checksum = json_checksum(index)
|
checksum = json_checksum(index)
|
||||||
|
@@ -6,7 +6,6 @@ from typing import Optional, TYPE_CHECKING
|
|||||||
from termcolor import colored
|
from termcolor import colored
|
||||||
|
|
||||||
import seedpass.core.manager as manager_module
|
import seedpass.core.manager as manager_module
|
||||||
from nostr.snapshot import MANIFEST_ID_PREFIX
|
|
||||||
|
|
||||||
from utils.password_prompt import prompt_existing_password
|
from utils.password_prompt import prompt_existing_password
|
||||||
|
|
||||||
@@ -44,7 +43,7 @@ class ProfileService:
|
|||||||
pm.fingerprint_manager.current_fingerprint = selected_fingerprint
|
pm.fingerprint_manager.current_fingerprint = selected_fingerprint
|
||||||
pm.current_fingerprint = selected_fingerprint
|
pm.current_fingerprint = selected_fingerprint
|
||||||
if not getattr(pm, "manifest_id", None):
|
if not getattr(pm, "manifest_id", None):
|
||||||
pm.manifest_id = f"{MANIFEST_ID_PREFIX}{selected_fingerprint}"
|
pm.manifest_id = None
|
||||||
|
|
||||||
pm.fingerprint_dir = pm.fingerprint_manager.get_current_fingerprint_dir()
|
pm.fingerprint_dir = pm.fingerprint_manager.get_current_fingerprint_dir()
|
||||||
if not pm.fingerprint_dir:
|
if not pm.fingerprint_dir:
|
||||||
@@ -77,6 +76,8 @@ class ProfileService:
|
|||||||
fingerprint=pm.current_fingerprint,
|
fingerprint=pm.current_fingerprint,
|
||||||
config_manager=getattr(pm, "config_manager", None),
|
config_manager=getattr(pm, "config_manager", None),
|
||||||
parent_seed=getattr(pm, "parent_seed", None),
|
parent_seed=getattr(pm, "parent_seed", None),
|
||||||
|
key_index=pm.KEY_INDEX,
|
||||||
|
account_index=pm.nostr_account_idx,
|
||||||
)
|
)
|
||||||
if getattr(pm, "manifest_id", None) and hasattr(
|
if getattr(pm, "manifest_id", None) and hasattr(
|
||||||
pm.nostr_client, "_state_lock"
|
pm.nostr_client, "_state_lock"
|
||||||
|
@@ -26,6 +26,7 @@ class StateManager:
|
|||||||
"manifest_id": None,
|
"manifest_id": None,
|
||||||
"delta_since": 0,
|
"delta_since": 0,
|
||||||
"relays": list(DEFAULT_RELAYS),
|
"relays": list(DEFAULT_RELAYS),
|
||||||
|
"nostr_account_idx": 0,
|
||||||
}
|
}
|
||||||
with shared_lock(self.state_path) as fh:
|
with shared_lock(self.state_path) as fh:
|
||||||
fh.seek(0)
|
fh.seek(0)
|
||||||
@@ -37,6 +38,7 @@ class StateManager:
|
|||||||
"manifest_id": None,
|
"manifest_id": None,
|
||||||
"delta_since": 0,
|
"delta_since": 0,
|
||||||
"relays": list(DEFAULT_RELAYS),
|
"relays": list(DEFAULT_RELAYS),
|
||||||
|
"nostr_account_idx": 0,
|
||||||
}
|
}
|
||||||
try:
|
try:
|
||||||
obj = json.loads(data.decode())
|
obj = json.loads(data.decode())
|
||||||
@@ -47,6 +49,7 @@ class StateManager:
|
|||||||
obj.setdefault("manifest_id", None)
|
obj.setdefault("manifest_id", None)
|
||||||
obj.setdefault("delta_since", 0)
|
obj.setdefault("delta_since", 0)
|
||||||
obj.setdefault("relays", list(DEFAULT_RELAYS))
|
obj.setdefault("relays", list(DEFAULT_RELAYS))
|
||||||
|
obj.setdefault("nostr_account_idx", 0)
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
def _save(self, data: dict) -> None:
|
def _save(self, data: dict) -> None:
|
||||||
|
@@ -2,8 +2,11 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
import base64
|
||||||
|
from typing import Union
|
||||||
from urllib.parse import quote
|
from urllib.parse import quote
|
||||||
from urllib.parse import urlparse, parse_qs, unquote
|
from urllib.parse import urlparse, parse_qs, unquote
|
||||||
|
|
||||||
@@ -14,17 +17,24 @@ import pyotp
|
|||||||
from utils import key_derivation
|
from utils import key_derivation
|
||||||
|
|
||||||
|
|
||||||
|
def random_totp_secret(length: int = 20) -> str:
|
||||||
|
"""Return a random Base32 encoded TOTP secret."""
|
||||||
|
return base64.b32encode(os.urandom(length)).decode("ascii").rstrip("=")
|
||||||
|
|
||||||
|
|
||||||
class TotpManager:
|
class TotpManager:
|
||||||
"""Helper methods for TOTP secrets and codes."""
|
"""Helper methods for TOTP secrets and codes."""
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def derive_secret(seed: str, index: int) -> str:
|
def derive_secret(seed: Union[str, bytes], index: int) -> str:
|
||||||
"""Derive a TOTP secret from a BIP39 seed and index."""
|
"""Derive a TOTP secret from a seed or raw key and index."""
|
||||||
return key_derivation.derive_totp_secret(seed, index)
|
return key_derivation.derive_totp_secret(seed, index)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def current_code(cls, seed: str, index: int, timestamp: int | None = None) -> str:
|
def current_code(
|
||||||
"""Return the TOTP code for the given seed and index."""
|
cls, seed: Union[str, bytes], index: int, timestamp: int | None = None
|
||||||
|
) -> str:
|
||||||
|
"""Return the TOTP code for the given seed/key and index."""
|
||||||
secret = cls.derive_secret(seed, index)
|
secret = cls.derive_secret(seed, index)
|
||||||
totp = pyotp.TOTP(secret)
|
totp = pyotp.TOTP(secret)
|
||||||
if timestamp is None:
|
if timestamp is None:
|
||||||
|
@@ -14,6 +14,7 @@ from .encryption import (
|
|||||||
USE_ORJSON,
|
USE_ORJSON,
|
||||||
json_lib,
|
json_lib,
|
||||||
)
|
)
|
||||||
|
from utils.key_derivation import KdfConfig, CURRENT_KDF_VERSION
|
||||||
from utils.password_prompt import prompt_existing_password
|
from utils.password_prompt import prompt_existing_password
|
||||||
|
|
||||||
|
|
||||||
@@ -38,6 +39,11 @@ class Vault:
|
|||||||
"""Replace the internal encryption manager."""
|
"""Replace the internal encryption manager."""
|
||||||
self.encryption_manager = manager
|
self.encryption_manager = manager
|
||||||
|
|
||||||
|
def _hkdf_kdf(self) -> KdfConfig:
|
||||||
|
return KdfConfig(
|
||||||
|
name="hkdf", version=CURRENT_KDF_VERSION, params={}, salt_b64=""
|
||||||
|
)
|
||||||
|
|
||||||
# ----- Password index helpers -----
|
# ----- Password index helpers -----
|
||||||
def load_index(self, *, return_migration_flags: bool = False):
|
def load_index(self, *, return_migration_flags: bool = False):
|
||||||
"""Return decrypted password index data, applying migrations.
|
"""Return decrypted password index data, applying migrations.
|
||||||
@@ -102,10 +108,24 @@ class Vault:
|
|||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
data = self.encryption_manager.load_json_data(self.index_file)
|
data, kdf = self.encryption_manager.load_json_data(
|
||||||
|
self.index_file, return_kdf=True
|
||||||
|
)
|
||||||
migration_performed = getattr(
|
migration_performed = getattr(
|
||||||
self.encryption_manager, "last_migration_performed", False
|
self.encryption_manager, "last_migration_performed", False
|
||||||
)
|
)
|
||||||
|
if kdf.version < CURRENT_KDF_VERSION:
|
||||||
|
new_kdf = KdfConfig(
|
||||||
|
name=kdf.name,
|
||||||
|
version=CURRENT_KDF_VERSION,
|
||||||
|
params=kdf.params,
|
||||||
|
salt_b64=kdf.salt_b64,
|
||||||
|
)
|
||||||
|
self.encryption_manager.save_json_data(
|
||||||
|
data, self.index_file, kdf=new_kdf
|
||||||
|
)
|
||||||
|
self.encryption_manager.update_checksum(self.index_file)
|
||||||
|
migration_performed = True
|
||||||
except LegacyFormatRequiresMigrationError:
|
except LegacyFormatRequiresMigrationError:
|
||||||
print(
|
print(
|
||||||
colored(
|
colored(
|
||||||
@@ -142,7 +162,9 @@ class Vault:
|
|||||||
else:
|
else:
|
||||||
data = json_lib.loads(decrypted.decode("utf-8"))
|
data = json_lib.loads(decrypted.decode("utf-8"))
|
||||||
if self.encryption_manager._legacy_migrate_flag:
|
if self.encryption_manager._legacy_migrate_flag:
|
||||||
self.encryption_manager.save_json_data(data, self.index_file)
|
self.encryption_manager.save_json_data(
|
||||||
|
data, self.index_file, kdf=self._hkdf_kdf()
|
||||||
|
)
|
||||||
self.encryption_manager.update_checksum(self.index_file)
|
self.encryption_manager.update_checksum(self.index_file)
|
||||||
migration_performed = getattr(
|
migration_performed = getattr(
|
||||||
self.encryption_manager, "last_migration_performed", False
|
self.encryption_manager, "last_migration_performed", False
|
||||||
@@ -181,7 +203,9 @@ class Vault:
|
|||||||
try:
|
try:
|
||||||
data = apply_migrations(data)
|
data = apply_migrations(data)
|
||||||
if schema_migrated:
|
if schema_migrated:
|
||||||
self.encryption_manager.save_json_data(data, self.index_file)
|
self.encryption_manager.save_json_data(
|
||||||
|
data, self.index_file, kdf=self._hkdf_kdf()
|
||||||
|
)
|
||||||
self.encryption_manager.update_checksum(self.index_file)
|
self.encryption_manager.update_checksum(self.index_file)
|
||||||
except Exception as exc: # noqa: BLE001 - surface clear error and restore
|
except Exception as exc: # noqa: BLE001 - surface clear error and restore
|
||||||
if legacy_detected and backup_dir is not None:
|
if legacy_detected and backup_dir is not None:
|
||||||
@@ -214,7 +238,9 @@ class Vault:
|
|||||||
|
|
||||||
def save_index(self, data: dict) -> None:
|
def save_index(self, data: dict) -> None:
|
||||||
"""Encrypt and write password index."""
|
"""Encrypt and write password index."""
|
||||||
self.encryption_manager.save_json_data(data, self.index_file)
|
self.encryption_manager.save_json_data(
|
||||||
|
data, self.index_file, kdf=self._hkdf_kdf()
|
||||||
|
)
|
||||||
|
|
||||||
def get_encrypted_index(self) -> Optional[bytes]:
|
def get_encrypted_index(self) -> Optional[bytes]:
|
||||||
"""Return the encrypted index bytes if present."""
|
"""Return the encrypted index bytes if present."""
|
||||||
@@ -252,4 +278,6 @@ class Vault:
|
|||||||
|
|
||||||
def save_config(self, config: dict) -> None:
|
def save_config(self, config: dict) -> None:
|
||||||
"""Encrypt and persist configuration."""
|
"""Encrypt and persist configuration."""
|
||||||
self.encryption_manager.save_json_data(config, self.config_file)
|
self.encryption_manager.save_json_data(
|
||||||
|
config, self.config_file, kdf=self._hkdf_kdf()
|
||||||
|
)
|
||||||
|
@@ -1,4 +1,13 @@
|
|||||||
class VaultLockedError(Exception):
|
"""Compatibility layer for historic exception types."""
|
||||||
|
|
||||||
|
from .core.errors import SeedPassError
|
||||||
|
|
||||||
|
|
||||||
|
class VaultLockedError(SeedPassError):
|
||||||
"""Raised when an operation requires an unlocked vault."""
|
"""Raised when an operation requires an unlocked vault."""
|
||||||
|
|
||||||
pass
|
def __init__(self, message: str = "Vault is locked") -> None:
|
||||||
|
super().__init__(message)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ["VaultLockedError", "SeedPassError"]
|
||||||
|
@@ -393,7 +393,7 @@ class TotpViewerWindow(toga.Window):
|
|||||||
def refresh_codes(self) -> None:
|
def refresh_codes(self) -> None:
|
||||||
self.table.data = []
|
self.table.data = []
|
||||||
for idx, label, *_rest in self.entries.list_entries(
|
for idx, label, *_rest in self.entries.list_entries(
|
||||||
filter_kind=EntryType.TOTP.value
|
filter_kinds=[EntryType.TOTP.value]
|
||||||
):
|
):
|
||||||
entry = self.entries.retrieve_entry(idx)
|
entry = self.entries.retrieve_entry(idx)
|
||||||
code = self.entries.get_totp_code(idx)
|
code = self.entries.get_totp_code(idx)
|
||||||
|
@@ -4,7 +4,7 @@ import sys
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from httpx import ASGITransport, AsyncClient
|
from httpx import ASGITransport, AsyncClient
|
||||||
import hashlib
|
import bcrypt
|
||||||
|
|
||||||
sys.path.append(str(Path(__file__).resolve().parents[1]))
|
sys.path.append(str(Path(__file__).resolve().parents[1]))
|
||||||
|
|
||||||
@@ -54,7 +54,7 @@ async def client(monkeypatch):
|
|||||||
async def test_token_hashed(client):
|
async def test_token_hashed(client):
|
||||||
_, token = client
|
_, token = client
|
||||||
assert api.app.state.token_hash != token
|
assert api.app.state.token_hash != token
|
||||||
assert api.app.state.token_hash == hashlib.sha256(token.encode()).hexdigest()
|
assert bcrypt.checkpw(token.encode(), api.app.state.token_hash)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.anyio
|
@pytest.mark.anyio
|
||||||
|
@@ -53,6 +53,7 @@ async def test_create_and_modify_totp_entry(client):
|
|||||||
"digits": 8,
|
"digits": 8,
|
||||||
"notes": "n",
|
"notes": "n",
|
||||||
"archived": False,
|
"archived": False,
|
||||||
|
"deterministic": False,
|
||||||
}
|
}
|
||||||
|
|
||||||
res = await cl.put(
|
res = await cl.put(
|
||||||
@@ -377,7 +378,7 @@ async def test_vault_export_endpoint(client, tmp_path):
|
|||||||
out = tmp_path / "out.json"
|
out = tmp_path / "out.json"
|
||||||
out.write_text("data")
|
out.write_text("data")
|
||||||
|
|
||||||
api.app.state.pm.handle_export_database = lambda: out
|
api.app.state.pm.handle_export_database = lambda *a, **k: out
|
||||||
|
|
||||||
headers = {
|
headers = {
|
||||||
"Authorization": f"Bearer {token}",
|
"Authorization": f"Bearer {token}",
|
||||||
|
@@ -36,6 +36,7 @@ def test_audit_logger_records_events(monkeypatch, tmp_path):
|
|||||||
monkeypatch.setattr(manager_module, "export_backup", lambda *a, **k: dest)
|
monkeypatch.setattr(manager_module, "export_backup", lambda *a, **k: dest)
|
||||||
pm.vault = object()
|
pm.vault = object()
|
||||||
pm.backup_manager = object()
|
pm.backup_manager = object()
|
||||||
|
monkeypatch.setattr("seedpass.core.manager.confirm_action", lambda *_a, **_k: True)
|
||||||
pm.handle_export_database(dest)
|
pm.handle_export_database(dest)
|
||||||
|
|
||||||
confirms = iter([True, False])
|
confirms = iter([True, False])
|
||||||
|
@@ -20,6 +20,7 @@ def test_switch_fingerprint_triggers_bg_sync(monkeypatch, tmp_path):
|
|||||||
pm.current_fingerprint = None
|
pm.current_fingerprint = None
|
||||||
pm.encryption_manager = object()
|
pm.encryption_manager = object()
|
||||||
pm.config_manager = SimpleNamespace(get_quick_unlock=lambda: False)
|
pm.config_manager = SimpleNamespace(get_quick_unlock=lambda: False)
|
||||||
|
pm.nostr_account_idx = 0
|
||||||
|
|
||||||
monkeypatch.setattr("builtins.input", lambda *_a, **_k: "1")
|
monkeypatch.setattr("builtins.input", lambda *_a, **_k: "1")
|
||||||
monkeypatch.setattr(
|
monkeypatch.setattr(
|
||||||
|
56
src/tests/test_backup_restore_startup.py
Normal file
56
src/tests/test_backup_restore_startup.py
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
import main
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
def test_cli_flag_restores_before_init(monkeypatch, tmp_path):
|
||||||
|
calls = []
|
||||||
|
backup = tmp_path / "bak.json"
|
||||||
|
backup.write_text("{}")
|
||||||
|
|
||||||
|
def fake_restore(path, fingerprint):
|
||||||
|
calls.append(("restore", Path(path), fingerprint))
|
||||||
|
|
||||||
|
class DummyPM:
|
||||||
|
def __init__(self, fingerprint=None):
|
||||||
|
calls.append(("init", fingerprint))
|
||||||
|
self.secret_mode_enabled = True
|
||||||
|
self.inactivity_timeout = 0
|
||||||
|
|
||||||
|
monkeypatch.setattr(main, "restore_backup_index", fake_restore)
|
||||||
|
monkeypatch.setattr(main, "PasswordManager", DummyPM)
|
||||||
|
monkeypatch.setattr(main, "display_menu", lambda pm, **k: None)
|
||||||
|
|
||||||
|
rc = main.main(["--fingerprint", "fp", "--restore-backup", str(backup)])
|
||||||
|
assert rc == 0
|
||||||
|
assert calls[0][0] == "restore"
|
||||||
|
assert calls[1][0] == "init"
|
||||||
|
assert calls[0][1] == backup
|
||||||
|
assert calls[0][2] == "fp"
|
||||||
|
|
||||||
|
|
||||||
|
def test_menu_option_restores_before_init(monkeypatch, tmp_path):
|
||||||
|
calls = []
|
||||||
|
backup = tmp_path / "bak.json"
|
||||||
|
backup.write_text("{}")
|
||||||
|
|
||||||
|
def fake_restore(path, fingerprint):
|
||||||
|
calls.append(("restore", Path(path), fingerprint))
|
||||||
|
|
||||||
|
class DummyPM:
|
||||||
|
def __init__(self, fingerprint=None):
|
||||||
|
calls.append(("init", fingerprint))
|
||||||
|
self.secret_mode_enabled = True
|
||||||
|
self.inactivity_timeout = 0
|
||||||
|
|
||||||
|
monkeypatch.setattr(main, "restore_backup_index", fake_restore)
|
||||||
|
monkeypatch.setattr(main, "PasswordManager", DummyPM)
|
||||||
|
monkeypatch.setattr(main, "display_menu", lambda pm, **k: None)
|
||||||
|
inputs = iter(["2", str(backup)])
|
||||||
|
monkeypatch.setattr("builtins.input", lambda _prompt="": next(inputs))
|
||||||
|
|
||||||
|
rc = main.main(["--fingerprint", "fp"])
|
||||||
|
assert rc == 0
|
||||||
|
assert calls[0][0] == "restore"
|
||||||
|
assert calls[1][0] == "init"
|
||||||
|
assert calls[0][1] == backup
|
||||||
|
assert calls[0][2] == "fp"
|
@@ -16,7 +16,7 @@ from seedpass.core.entry_types import EntryType
|
|||||||
class DummyPM:
|
class DummyPM:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.entry_manager = SimpleNamespace(
|
self.entry_manager = SimpleNamespace(
|
||||||
list_entries=lambda sort_by="index", filter_kind=None, include_archived=False: [
|
list_entries=lambda sort_by="index", filter_kinds=None, include_archived=False: [
|
||||||
(1, "Label", "user", "url", False)
|
(1, "Label", "user", "url", False)
|
||||||
],
|
],
|
||||||
search_entries=lambda q, kinds=None: [
|
search_entries=lambda q, kinds=None: [
|
||||||
@@ -25,7 +25,7 @@ class DummyPM:
|
|||||||
retrieve_entry=lambda idx: {"type": EntryType.PASSWORD.value, "length": 8},
|
retrieve_entry=lambda idx: {"type": EntryType.PASSWORD.value, "length": 8},
|
||||||
get_totp_code=lambda idx, seed: "123456",
|
get_totp_code=lambda idx, seed: "123456",
|
||||||
add_entry=lambda label, length, username, url, **kwargs: 1,
|
add_entry=lambda label, length, username, url, **kwargs: 1,
|
||||||
add_totp=lambda label, seed, index=None, secret=None, period=30, digits=6: "totp://",
|
add_totp=lambda label, seed, index=None, secret=None, period=30, digits=6, deterministic=False: "totp://",
|
||||||
add_ssh_key=lambda label, seed, index=None, notes="": 2,
|
add_ssh_key=lambda label, seed, index=None, notes="": 2,
|
||||||
add_pgp_key=lambda label, seed, index=None, key_type="ed25519", user_id="", notes="": 3,
|
add_pgp_key=lambda label, seed, index=None, key_type="ed25519", user_id="", notes="": 3,
|
||||||
add_nostr_key=lambda label, seed, index=None, notes="": 4,
|
add_nostr_key=lambda label, seed, index=None, notes="": 4,
|
||||||
@@ -42,7 +42,7 @@ class DummyPM:
|
|||||||
)
|
)
|
||||||
self.parent_seed = "seed"
|
self.parent_seed = "seed"
|
||||||
self.handle_display_totp_codes = lambda: None
|
self.handle_display_totp_codes = lambda: None
|
||||||
self.handle_export_database = lambda path: None
|
self.handle_export_database = lambda path, **kwargs: None
|
||||||
self.handle_import_database = lambda path: None
|
self.handle_import_database = lambda path: None
|
||||||
self.change_password = lambda *a, **kw: None
|
self.change_password = lambda *a, **kw: None
|
||||||
self.lock_vault = lambda: None
|
self.lock_vault = lambda: None
|
||||||
@@ -77,7 +77,7 @@ class DummyPM:
|
|||||||
set_offline_mode=lambda v: None,
|
set_offline_mode=lambda v: None,
|
||||||
get_secret_mode_enabled=lambda: True,
|
get_secret_mode_enabled=lambda: True,
|
||||||
get_clipboard_clear_delay=lambda: 30,
|
get_clipboard_clear_delay=lambda: 30,
|
||||||
get_offline_mode=lambda: False,
|
get_offline_mode=lambda: True,
|
||||||
)
|
)
|
||||||
self.secret_mode_enabled = True
|
self.secret_mode_enabled = True
|
||||||
self.clipboard_clear_delay = 30
|
self.clipboard_clear_delay = 30
|
||||||
|
@@ -65,8 +65,14 @@ runner = CliRunner()
|
|||||||
"--digits",
|
"--digits",
|
||||||
"7",
|
"7",
|
||||||
],
|
],
|
||||||
("Label", "seed"),
|
("Label", None),
|
||||||
{"index": 1, "secret": "abc", "period": 45, "digits": 7},
|
{
|
||||||
|
"index": 1,
|
||||||
|
"secret": "abc",
|
||||||
|
"period": 45,
|
||||||
|
"digits": 7,
|
||||||
|
"deterministic": False,
|
||||||
|
},
|
||||||
"otpauth://uri",
|
"otpauth://uri",
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
|
@@ -17,8 +17,8 @@ def _setup_pm(tmp_path: Path):
|
|||||||
cfg = ConfigManager(vault, tmp_path)
|
cfg = ConfigManager(vault, tmp_path)
|
||||||
backup = BackupManager(tmp_path, cfg)
|
backup = BackupManager(tmp_path, cfg)
|
||||||
pm = SimpleNamespace(
|
pm = SimpleNamespace(
|
||||||
handle_export_database=lambda p: export_backup(
|
handle_export_database=lambda p, encrypt=True: export_backup(
|
||||||
vault, backup, p, parent_seed=TEST_SEED
|
vault, backup, p, parent_seed=TEST_SEED, encrypt=encrypt
|
||||||
),
|
),
|
||||||
handle_import_database=lambda p: import_backup(
|
handle_import_database=lambda p: import_backup(
|
||||||
vault, backup, p, parent_seed=TEST_SEED
|
vault, backup, p, parent_seed=TEST_SEED
|
||||||
@@ -91,3 +91,36 @@ def test_cli_import_round_trip(monkeypatch, tmp_path):
|
|||||||
rc = main.main(["import", "--file", str(export_path)])
|
rc = main.main(["import", "--file", str(export_path)])
|
||||||
assert rc == 0
|
assert rc == 0
|
||||||
assert vault.load_index() == original
|
assert vault.load_index() == original
|
||||||
|
|
||||||
|
|
||||||
|
def test_cli_export_import_unencrypted(monkeypatch, tmp_path):
|
||||||
|
pm, vault = _setup_pm(tmp_path)
|
||||||
|
data = {
|
||||||
|
"schema_version": 4,
|
||||||
|
"entries": {
|
||||||
|
"0": {
|
||||||
|
"label": "example",
|
||||||
|
"type": "password",
|
||||||
|
"notes": "",
|
||||||
|
"custom_fields": [],
|
||||||
|
"origin": "",
|
||||||
|
"tags": [],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
vault.save_index(data)
|
||||||
|
|
||||||
|
monkeypatch.setattr(main, "PasswordManager", lambda *a, **k: pm)
|
||||||
|
monkeypatch.setattr(main, "configure_logging", lambda: None)
|
||||||
|
monkeypatch.setattr(main, "initialize_app", lambda: None)
|
||||||
|
monkeypatch.setattr(main.signal, "signal", lambda *a, **k: None)
|
||||||
|
|
||||||
|
export_path = tmp_path / "out.json"
|
||||||
|
rc = main.main(["export", "--file", str(export_path), "--unencrypted"])
|
||||||
|
assert rc == 0
|
||||||
|
assert export_path.exists()
|
||||||
|
|
||||||
|
vault.save_index({"schema_version": 4, "entries": {}})
|
||||||
|
rc = main.main(["import", "--file", str(export_path)])
|
||||||
|
assert rc == 0
|
||||||
|
assert vault.load_index() == data
|
||||||
|
@@ -7,7 +7,7 @@ from seedpass.cli import common as cli_common
|
|||||||
runner = CliRunner()
|
runner = CliRunner()
|
||||||
|
|
||||||
|
|
||||||
def _make_pm(called, enabled=False):
|
def _make_pm(called, enabled=True):
|
||||||
cfg = SimpleNamespace(
|
cfg = SimpleNamespace(
|
||||||
get_offline_mode=lambda: enabled,
|
get_offline_mode=lambda: enabled,
|
||||||
set_offline_mode=lambda v: called.setdefault("enabled", v),
|
set_offline_mode=lambda v: called.setdefault("enabled", v),
|
||||||
@@ -24,10 +24,10 @@ def test_toggle_offline_updates(monkeypatch):
|
|||||||
called = {}
|
called = {}
|
||||||
pm = _make_pm(called)
|
pm = _make_pm(called)
|
||||||
monkeypatch.setattr(cli_common, "PasswordManager", lambda: pm)
|
monkeypatch.setattr(cli_common, "PasswordManager", lambda: pm)
|
||||||
result = runner.invoke(app, ["config", "toggle-offline"], input="y\n")
|
result = runner.invoke(app, ["config", "toggle-offline"], input="n\n")
|
||||||
assert result.exit_code == 0
|
assert result.exit_code == 0
|
||||||
assert called == {"enabled": True}
|
assert called == {"enabled": False}
|
||||||
assert "Offline mode enabled." in result.stdout
|
assert "Offline mode disabled." in result.stdout
|
||||||
|
|
||||||
|
|
||||||
def test_toggle_offline_keep(monkeypatch):
|
def test_toggle_offline_keep(monkeypatch):
|
||||||
|
@@ -3,11 +3,15 @@ from pathlib import Path
|
|||||||
|
|
||||||
from hypothesis import given, strategies as st, settings, HealthCheck
|
from hypothesis import given, strategies as st, settings, HealthCheck
|
||||||
from mnemonic import Mnemonic
|
from mnemonic import Mnemonic
|
||||||
|
import hashlib
|
||||||
|
import base64
|
||||||
|
import os
|
||||||
|
|
||||||
from utils.key_derivation import (
|
from utils.key_derivation import (
|
||||||
derive_key_from_password,
|
derive_key_from_password,
|
||||||
derive_key_from_password_argon2,
|
derive_key_from_password_argon2,
|
||||||
derive_index_key,
|
derive_index_key,
|
||||||
|
KdfConfig,
|
||||||
)
|
)
|
||||||
from utils.fingerprint import generate_fingerprint
|
from utils.fingerprint import generate_fingerprint
|
||||||
from seedpass.core.encryption import EncryptionManager
|
from seedpass.core.encryption import EncryptionManager
|
||||||
@@ -36,16 +40,27 @@ def test_fuzz_key_round_trip(password, seed_bytes, config, mode, tmp_path: Path)
|
|||||||
seed_phrase = Mnemonic("english").to_mnemonic(seed_bytes)
|
seed_phrase = Mnemonic("english").to_mnemonic(seed_bytes)
|
||||||
fp = generate_fingerprint(seed_phrase)
|
fp = generate_fingerprint(seed_phrase)
|
||||||
if mode == "argon2":
|
if mode == "argon2":
|
||||||
key = derive_key_from_password_argon2(
|
cfg = KdfConfig(
|
||||||
password, fp, time_cost=1, memory_cost=8, parallelism=1
|
params={"time_cost": 1, "memory_cost": 8, "parallelism": 1},
|
||||||
|
salt_b64=base64.b64encode(
|
||||||
|
hashlib.sha256(fp.encode()).digest()[:16]
|
||||||
|
).decode(),
|
||||||
)
|
)
|
||||||
|
key = derive_key_from_password_argon2(password, cfg)
|
||||||
else:
|
else:
|
||||||
key = derive_key_from_password(password, fp, iterations=1)
|
key = derive_key_from_password(password, fp, iterations=1)
|
||||||
|
cfg = KdfConfig(
|
||||||
|
name="pbkdf2",
|
||||||
|
params={"iterations": 1},
|
||||||
|
salt_b64=base64.b64encode(
|
||||||
|
hashlib.sha256(fp.encode()).digest()[:16]
|
||||||
|
).decode(),
|
||||||
|
)
|
||||||
|
|
||||||
enc_mgr = EncryptionManager(key, tmp_path)
|
enc_mgr = EncryptionManager(key, tmp_path)
|
||||||
|
|
||||||
# Parent seed round trip
|
# Parent seed round trip
|
||||||
enc_mgr.encrypt_parent_seed(seed_phrase)
|
enc_mgr.encrypt_parent_seed(seed_phrase, kdf=cfg)
|
||||||
assert enc_mgr.decrypt_parent_seed() == seed_phrase
|
assert enc_mgr.decrypt_parent_seed() == seed_phrase
|
||||||
|
|
||||||
# JSON data round trip
|
# JSON data round trip
|
||||||
|
@@ -30,8 +30,8 @@ class DummyEntries:
|
|||||||
self.data = [(1, "Example", None, None, False)]
|
self.data = [(1, "Example", None, None, False)]
|
||||||
self.code = "111111"
|
self.code = "111111"
|
||||||
|
|
||||||
def list_entries(self, sort_by="index", filter_kind=None, include_archived=False):
|
def list_entries(self, sort_by="index", filter_kinds=None, include_archived=False):
|
||||||
if filter_kind:
|
if filter_kinds:
|
||||||
return [(idx, label, None, None, False) for idx, label, *_ in self.data]
|
return [(idx, label, None, None, False) for idx, label, *_ in self.data]
|
||||||
return self.data
|
return self.data
|
||||||
|
|
||||||
|
@@ -33,7 +33,9 @@ class FakeEntries:
|
|||||||
self.added.append(("password", label, length, username, url))
|
self.added.append(("password", label, length, username, url))
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
def add_totp(self, label):
|
def add_totp(
|
||||||
|
self, label, deterministic=False, index=None, secret=None, period=30, digits=6
|
||||||
|
):
|
||||||
self.added.append(("totp", label))
|
self.added.append(("totp", label))
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
|
@@ -9,7 +9,7 @@ from seedpass_gui.app import MainWindow
|
|||||||
|
|
||||||
|
|
||||||
class DummyEntries:
|
class DummyEntries:
|
||||||
def list_entries(self, sort_by="index", filter_kind=None, include_archived=False):
|
def list_entries(self, sort_by="index", filter_kinds=None, include_archived=False):
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def search_entries(self, q):
|
def search_entries(self, q):
|
||||||
|
23
src/tests/test_invalid_password_message.py
Normal file
23
src/tests/test_invalid_password_message.py
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
from pathlib import Path
|
||||||
|
from tempfile import TemporaryDirectory
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from seedpass.core.manager import PasswordManager
|
||||||
|
from seedpass.core.config_manager import ConfigManager
|
||||||
|
from seedpass.core.errors import SeedPassError
|
||||||
|
from helpers import create_vault, TEST_SEED, TEST_PASSWORD
|
||||||
|
|
||||||
|
|
||||||
|
def test_invalid_password_shows_friendly_message_once(capsys):
|
||||||
|
with TemporaryDirectory() as tmpdir:
|
||||||
|
tmp_path = Path(tmpdir)
|
||||||
|
vault, _ = create_vault(tmp_path, TEST_SEED, TEST_PASSWORD)
|
||||||
|
pm = PasswordManager.__new__(PasswordManager)
|
||||||
|
pm.config_manager = ConfigManager(vault, tmp_path)
|
||||||
|
pm.fingerprint_dir = tmp_path
|
||||||
|
pm.parent_seed = ""
|
||||||
|
with pytest.raises(SeedPassError):
|
||||||
|
pm.load_parent_seed(tmp_path, password="wrongpass")
|
||||||
|
captured = capsys.readouterr().out
|
||||||
|
assert captured.count("Incorrect password or corrupt file") == 1
|
@@ -1,4 +1,6 @@
|
|||||||
import bcrypt
|
import bcrypt
|
||||||
|
import hashlib
|
||||||
|
import base64
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from tempfile import TemporaryDirectory
|
from tempfile import TemporaryDirectory
|
||||||
from types import SimpleNamespace
|
from types import SimpleNamespace
|
||||||
@@ -7,6 +9,7 @@ from utils.key_derivation import (
|
|||||||
derive_key_from_password,
|
derive_key_from_password,
|
||||||
derive_key_from_password_argon2,
|
derive_key_from_password_argon2,
|
||||||
derive_index_key,
|
derive_index_key,
|
||||||
|
KdfConfig,
|
||||||
)
|
)
|
||||||
from seedpass.core.encryption import EncryptionManager
|
from seedpass.core.encryption import EncryptionManager
|
||||||
from seedpass.core.vault import Vault
|
from seedpass.core.vault import Vault
|
||||||
@@ -21,10 +24,24 @@ def _setup_profile(tmp: Path, mode: str):
|
|||||||
argon_kwargs = dict(time_cost=1, memory_cost=8, parallelism=1)
|
argon_kwargs = dict(time_cost=1, memory_cost=8, parallelism=1)
|
||||||
fp = tmp.name
|
fp = tmp.name
|
||||||
if mode == "argon2":
|
if mode == "argon2":
|
||||||
seed_key = derive_key_from_password_argon2(TEST_PASSWORD, fp, **argon_kwargs)
|
cfg = KdfConfig(
|
||||||
|
params=argon_kwargs,
|
||||||
|
salt_b64=base64.b64encode(
|
||||||
|
hashlib.sha256(fp.encode()).digest()[:16]
|
||||||
|
).decode(),
|
||||||
|
)
|
||||||
|
seed_key = derive_key_from_password_argon2(TEST_PASSWORD, cfg)
|
||||||
|
EncryptionManager(seed_key, tmp).encrypt_parent_seed(TEST_SEED, kdf=cfg)
|
||||||
else:
|
else:
|
||||||
seed_key = derive_key_from_password(TEST_PASSWORD, fp, iterations=1)
|
seed_key = derive_key_from_password(TEST_PASSWORD, fp, iterations=1)
|
||||||
EncryptionManager(seed_key, tmp).encrypt_parent_seed(TEST_SEED)
|
cfg = KdfConfig(
|
||||||
|
name="pbkdf2",
|
||||||
|
params={"iterations": 1},
|
||||||
|
salt_b64=base64.b64encode(
|
||||||
|
hashlib.sha256(fp.encode()).digest()[:16]
|
||||||
|
).decode(),
|
||||||
|
)
|
||||||
|
EncryptionManager(seed_key, tmp).encrypt_parent_seed(TEST_SEED, kdf=cfg)
|
||||||
|
|
||||||
index_key = derive_index_key(TEST_SEED)
|
index_key = derive_index_key(TEST_SEED)
|
||||||
enc_mgr = EncryptionManager(index_key, tmp)
|
enc_mgr = EncryptionManager(index_key, tmp)
|
||||||
@@ -65,9 +82,9 @@ def test_setup_encryption_manager_kdf_modes(monkeypatch):
|
|||||||
)
|
)
|
||||||
if mode == "argon2":
|
if mode == "argon2":
|
||||||
monkeypatch.setattr(
|
monkeypatch.setattr(
|
||||||
"seedpass.core.manager.derive_key_from_password_argon2",
|
"seedpass.core.manager.KdfConfig",
|
||||||
lambda pw, fp: derive_key_from_password_argon2(
|
lambda salt_b64, **_: KdfConfig(
|
||||||
pw, fp, **argon_kwargs
|
params=argon_kwargs, salt_b64=salt_b64
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
monkeypatch.setattr(PasswordManager, "initialize_bip85", lambda self: None)
|
monkeypatch.setattr(PasswordManager, "initialize_bip85", lambda self: None)
|
||||||
@@ -76,3 +93,26 @@ def test_setup_encryption_manager_kdf_modes(monkeypatch):
|
|||||||
)
|
)
|
||||||
assert pm.setup_encryption_manager(path, exit_on_fail=False)
|
assert pm.setup_encryption_manager(path, exit_on_fail=False)
|
||||||
assert pm.parent_seed == TEST_SEED
|
assert pm.parent_seed == TEST_SEED
|
||||||
|
|
||||||
|
|
||||||
|
def test_kdf_param_round_trip(tmp_path):
|
||||||
|
cfg = KdfConfig(
|
||||||
|
params={"time_cost": 3, "memory_cost": 32, "parallelism": 1},
|
||||||
|
salt_b64=base64.b64encode(b"static-salt-1234").decode(),
|
||||||
|
)
|
||||||
|
key = derive_key_from_password_argon2(TEST_PASSWORD, cfg)
|
||||||
|
mgr = EncryptionManager(key, tmp_path)
|
||||||
|
mgr.encrypt_parent_seed(TEST_SEED, kdf=cfg)
|
||||||
|
stored = mgr.get_file_kdf(Path("parent_seed.enc"))
|
||||||
|
assert stored.params == cfg.params
|
||||||
|
|
||||||
|
|
||||||
|
def test_vault_kdf_migration(tmp_path):
|
||||||
|
index_key = derive_index_key(TEST_SEED)
|
||||||
|
mgr = EncryptionManager(index_key, tmp_path)
|
||||||
|
vault = Vault(mgr, tmp_path)
|
||||||
|
old_kdf = KdfConfig(name="hkdf", version=0, params={}, salt_b64="")
|
||||||
|
mgr.save_json_data({"entries": {}}, vault.index_file, kdf=old_kdf)
|
||||||
|
vault.load_index()
|
||||||
|
new_kdf = mgr.get_file_kdf(vault.index_file)
|
||||||
|
assert new_kdf.version == KdfConfig().version
|
||||||
|
19
src/tests/test_kdf_strength_slider.py
Normal file
19
src/tests/test_kdf_strength_slider.py
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
from pathlib import Path
|
||||||
|
from tempfile import TemporaryDirectory
|
||||||
|
from types import SimpleNamespace
|
||||||
|
|
||||||
|
from helpers import create_vault, TEST_SEED, TEST_PASSWORD
|
||||||
|
from seedpass.core.config_manager import ConfigManager
|
||||||
|
from main import handle_set_kdf_iterations
|
||||||
|
|
||||||
|
|
||||||
|
def test_kdf_strength_slider_persists(monkeypatch):
|
||||||
|
with TemporaryDirectory() as tmpdir:
|
||||||
|
tmp_path = Path(tmpdir)
|
||||||
|
vault, enc_mgr = create_vault(tmp_path, TEST_SEED, TEST_PASSWORD)
|
||||||
|
cfg_mgr = ConfigManager(vault, tmp_path)
|
||||||
|
pm = SimpleNamespace(config_manager=cfg_mgr)
|
||||||
|
inputs = iter(["3"])
|
||||||
|
monkeypatch.setattr("builtins.input", lambda *_: next(inputs))
|
||||||
|
handle_set_kdf_iterations(pm)
|
||||||
|
assert cfg_mgr.get_kdf_iterations() == 100_000
|
@@ -1,11 +1,15 @@
|
|||||||
import logging
|
import logging
|
||||||
import pytest
|
import pytest
|
||||||
|
import logging
|
||||||
|
import hashlib
|
||||||
|
import base64
|
||||||
from utils.fingerprint import generate_fingerprint
|
from utils.fingerprint import generate_fingerprint
|
||||||
from utils.key_derivation import (
|
from utils.key_derivation import (
|
||||||
derive_key_from_password,
|
derive_key_from_password,
|
||||||
derive_key_from_password_argon2,
|
derive_key_from_password_argon2,
|
||||||
derive_index_key_seed_only,
|
derive_index_key_seed_only,
|
||||||
derive_index_key,
|
derive_index_key,
|
||||||
|
KdfConfig,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -48,15 +52,17 @@ def test_argon2_fingerprint_affects_key():
|
|||||||
fp1 = generate_fingerprint("seed one")
|
fp1 = generate_fingerprint("seed one")
|
||||||
fp2 = generate_fingerprint("seed two")
|
fp2 = generate_fingerprint("seed two")
|
||||||
|
|
||||||
k1 = derive_key_from_password_argon2(
|
cfg1 = KdfConfig(
|
||||||
password, fp1, time_cost=1, memory_cost=8, parallelism=1
|
params={"time_cost": 1, "memory_cost": 8, "parallelism": 1},
|
||||||
|
salt_b64=base64.b64encode(hashlib.sha256(fp1.encode()).digest()[:16]).decode(),
|
||||||
)
|
)
|
||||||
k2 = derive_key_from_password_argon2(
|
cfg2 = KdfConfig(
|
||||||
password, fp1, time_cost=1, memory_cost=8, parallelism=1
|
params={"time_cost": 1, "memory_cost": 8, "parallelism": 1},
|
||||||
)
|
salt_b64=base64.b64encode(hashlib.sha256(fp2.encode()).digest()[:16]).decode(),
|
||||||
k3 = derive_key_from_password_argon2(
|
|
||||||
password, fp2, time_cost=1, memory_cost=8, parallelism=1
|
|
||||||
)
|
)
|
||||||
|
k1 = derive_key_from_password_argon2(password, cfg1)
|
||||||
|
k2 = derive_key_from_password_argon2(password, cfg1)
|
||||||
|
k3 = derive_key_from_password_argon2(password, cfg2)
|
||||||
|
|
||||||
assert k1 == k2
|
assert k1 == k2
|
||||||
assert k1 != k3
|
assert k1 != k3
|
||||||
|
19
src/tests/test_key_hierarchy.py
Normal file
19
src/tests/test_key_hierarchy.py
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
import base64
|
||||||
|
from bip_utils import Bip39SeedGenerator
|
||||||
|
from utils.key_hierarchy import kd
|
||||||
|
from utils.key_derivation import derive_index_key
|
||||||
|
|
||||||
|
|
||||||
|
def test_kd_distinct_infos():
|
||||||
|
root = b"root" * 8
|
||||||
|
k1 = kd(root, b"info1")
|
||||||
|
k2 = kd(root, b"info2")
|
||||||
|
assert k1 != k2
|
||||||
|
|
||||||
|
|
||||||
|
def test_derive_index_key_matches_hierarchy():
|
||||||
|
seed = "abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon about"
|
||||||
|
seed_bytes = Bip39SeedGenerator(seed).Generate()
|
||||||
|
master = kd(seed_bytes, b"seedpass:v1:master")
|
||||||
|
expected = base64.urlsafe_b64encode(kd(master, b"seedpass:v1:storage"))
|
||||||
|
assert derive_index_key(seed) == expected
|
@@ -37,10 +37,30 @@ def test_add_and_modify_key_value():
|
|||||||
"tags": [],
|
"tags": [],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Appears in listing
|
||||||
|
assert em.list_entries() == [(idx, "API entry", None, None, False)]
|
||||||
|
|
||||||
|
# Modify key and value
|
||||||
em.modify_entry(idx, key="api_key2", value="def456")
|
em.modify_entry(idx, key="api_key2", value="def456")
|
||||||
updated = em.retrieve_entry(idx)
|
updated = em.retrieve_entry(idx)
|
||||||
assert updated["key"] == "api_key2"
|
assert updated["key"] == "api_key2"
|
||||||
assert updated["value"] == "def456"
|
assert updated["value"] == "def456"
|
||||||
|
|
||||||
|
# Archive and ensure it disappears from the default listing
|
||||||
|
em.archive_entry(idx)
|
||||||
|
archived = em.retrieve_entry(idx)
|
||||||
|
assert archived["archived"] is True
|
||||||
|
assert em.list_entries() == []
|
||||||
|
assert em.list_entries(include_archived=True) == [
|
||||||
|
(idx, "API entry", None, None, True)
|
||||||
|
]
|
||||||
|
|
||||||
|
# Restore and ensure it reappears
|
||||||
|
em.restore_entry(idx)
|
||||||
|
restored = em.retrieve_entry(idx)
|
||||||
|
assert restored["archived"] is False
|
||||||
|
assert em.list_entries() == [(idx, "API entry", None, None, False)]
|
||||||
|
|
||||||
|
# Values are not searchable
|
||||||
results = em.search_entries("def456")
|
results = em.search_entries("def456")
|
||||||
assert results == []
|
assert results == []
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
import json
|
import json
|
||||||
|
import base64
|
||||||
import hashlib
|
import hashlib
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
@@ -13,6 +14,7 @@ import gzip
|
|||||||
|
|
||||||
from seedpass.core.manager import PasswordManager, EncryptionMode
|
from seedpass.core.manager import PasswordManager, EncryptionMode
|
||||||
from seedpass.core.vault import Vault
|
from seedpass.core.vault import Vault
|
||||||
|
from seedpass.core.errors import SeedPassError
|
||||||
|
|
||||||
|
|
||||||
def test_legacy_index_migrates(monkeypatch, tmp_path: Path):
|
def test_legacy_index_migrates(monkeypatch, tmp_path: Path):
|
||||||
@@ -82,7 +84,7 @@ def test_failed_migration_restores_legacy(monkeypatch, tmp_path: Path):
|
|||||||
assert not vault.migrated_from_legacy
|
assert not vault.migrated_from_legacy
|
||||||
|
|
||||||
|
|
||||||
def test_migrated_index_has_v2_prefix(monkeypatch, tmp_path: Path):
|
def test_migrated_index_has_v3_prefix(monkeypatch, tmp_path: Path):
|
||||||
vault, _ = create_vault(tmp_path, TEST_SEED, TEST_PASSWORD)
|
vault, _ = create_vault(tmp_path, TEST_SEED, TEST_PASSWORD)
|
||||||
|
|
||||||
key = derive_index_key(TEST_SEED)
|
key = derive_index_key(TEST_SEED)
|
||||||
@@ -99,7 +101,8 @@ def test_migrated_index_has_v2_prefix(monkeypatch, tmp_path: Path):
|
|||||||
vault.load_index()
|
vault.load_index()
|
||||||
|
|
||||||
new_file = tmp_path / "seedpass_entries_db.json.enc"
|
new_file = tmp_path / "seedpass_entries_db.json.enc"
|
||||||
assert new_file.read_bytes().startswith(b"V2:")
|
payload = json.loads(new_file.read_text())
|
||||||
|
assert base64.b64decode(payload["ct"]).startswith(b"V3|")
|
||||||
assert vault.migrated_from_legacy
|
assert vault.migrated_from_legacy
|
||||||
|
|
||||||
|
|
||||||
@@ -154,6 +157,14 @@ def test_migration_syncs_when_confirmed(monkeypatch, tmp_path: Path):
|
|||||||
pm.fingerprint_dir = tmp_path
|
pm.fingerprint_dir = tmp_path
|
||||||
pm.current_fingerprint = tmp_path.name
|
pm.current_fingerprint = tmp_path.name
|
||||||
pm.bip85 = SimpleNamespace()
|
pm.bip85 = SimpleNamespace()
|
||||||
|
from seedpass.core.config_manager import ConfigManager
|
||||||
|
|
||||||
|
cfg_mgr = ConfigManager(pm.vault, tmp_path)
|
||||||
|
cfg = cfg_mgr.load_config(require_pin=False)
|
||||||
|
cfg["offline_mode"] = False
|
||||||
|
cfg_mgr.save_config(cfg)
|
||||||
|
pm.config_manager = cfg_mgr
|
||||||
|
pm.offline_mode = False
|
||||||
|
|
||||||
calls = {"sync": 0}
|
calls = {"sync": 0}
|
||||||
pm.sync_vault = lambda *a, **k: calls.__setitem__("sync", calls["sync"] + 1) or {
|
pm.sync_vault = lambda *a, **k: calls.__setitem__("sync", calls["sync"] + 1) or {
|
||||||
@@ -277,6 +288,7 @@ def test_legacy_index_reinit_syncs_once_when_confirmed(monkeypatch, tmp_path: Pa
|
|||||||
pm.fingerprint_dir = tmp_path
|
pm.fingerprint_dir = tmp_path
|
||||||
pm.current_fingerprint = tmp_path.name
|
pm.current_fingerprint = tmp_path.name
|
||||||
pm.bip85 = SimpleNamespace()
|
pm.bip85 = SimpleNamespace()
|
||||||
|
pm.offline_mode = True
|
||||||
|
|
||||||
monkeypatch.setattr(
|
monkeypatch.setattr(
|
||||||
"seedpass.core.manager.NostrClient", lambda *a, **k: SimpleNamespace()
|
"seedpass.core.manager.NostrClient", lambda *a, **k: SimpleNamespace()
|
||||||
@@ -294,7 +306,7 @@ def test_legacy_index_reinit_syncs_once_when_confirmed(monkeypatch, tmp_path: Pa
|
|||||||
pm.initialize_managers()
|
pm.initialize_managers()
|
||||||
pm.initialize_managers()
|
pm.initialize_managers()
|
||||||
|
|
||||||
assert calls["sync"] == 1
|
assert calls["sync"] == 0
|
||||||
assert enc_mgr.last_migration_performed is False
|
assert enc_mgr.last_migration_performed is False
|
||||||
|
|
||||||
|
|
||||||
@@ -314,6 +326,13 @@ def test_schema_migration_no_sync_prompt(monkeypatch, tmp_path: Path):
|
|||||||
pm.fingerprint_dir = tmp_path
|
pm.fingerprint_dir = tmp_path
|
||||||
pm.current_fingerprint = tmp_path.name
|
pm.current_fingerprint = tmp_path.name
|
||||||
pm.bip85 = SimpleNamespace()
|
pm.bip85 = SimpleNamespace()
|
||||||
|
from seedpass.core.config_manager import ConfigManager
|
||||||
|
|
||||||
|
cfg_mgr = ConfigManager(pm.vault, tmp_path)
|
||||||
|
cfg = cfg_mgr.load_config(require_pin=False)
|
||||||
|
cfg["offline_mode"] = False
|
||||||
|
cfg_mgr.save_config(cfg)
|
||||||
|
pm.config_manager = cfg_mgr
|
||||||
pm.offline_mode = False
|
pm.offline_mode = False
|
||||||
|
|
||||||
calls = {"sync": 0, "confirm": 0}
|
calls = {"sync": 0, "confirm": 0}
|
||||||
@@ -368,7 +387,7 @@ def test_declined_migration_no_sync_prompt(monkeypatch, tmp_path: Path):
|
|||||||
|
|
||||||
monkeypatch.setattr("seedpass.core.manager.confirm_action", fake_confirm)
|
monkeypatch.setattr("seedpass.core.manager.confirm_action", fake_confirm)
|
||||||
|
|
||||||
with pytest.raises(SystemExit):
|
with pytest.raises(SeedPassError):
|
||||||
pm.initialize_managers()
|
pm.initialize_managers()
|
||||||
|
|
||||||
assert calls["confirm"] == 0
|
assert calls["confirm"] == 0
|
||||||
@@ -407,7 +426,7 @@ def test_failed_migration_no_sync_prompt(monkeypatch, tmp_path: Path):
|
|||||||
|
|
||||||
monkeypatch.setattr("seedpass.core.manager.confirm_action", fake_confirm)
|
monkeypatch.setattr("seedpass.core.manager.confirm_action", fake_confirm)
|
||||||
|
|
||||||
with pytest.raises(SystemExit):
|
with pytest.raises(SeedPassError):
|
||||||
pm.initialize_managers()
|
pm.initialize_managers()
|
||||||
|
|
||||||
assert calls["confirm"] == 0
|
assert calls["confirm"] == 0
|
||||||
|
@@ -66,5 +66,5 @@ def test_migrate_iterations(tmp_path, monkeypatch, iterations):
|
|||||||
cfg = ConfigManager(vault, tmp_path)
|
cfg = ConfigManager(vault, tmp_path)
|
||||||
assert cfg.get_kdf_iterations() == iterations
|
assert cfg.get_kdf_iterations() == iterations
|
||||||
|
|
||||||
content = (tmp_path / "seedpass_entries_db.json.enc").read_bytes()
|
payload = json.loads((tmp_path / "seedpass_entries_db.json.enc").read_text())
|
||||||
assert content.startswith(b"V2:")
|
assert base64.b64decode(payload["ct"]).startswith(b"V3|")
|
||||||
|
@@ -50,6 +50,6 @@ def test_migrate_legacy_sets_flag(tmp_path, monkeypatch):
|
|||||||
monkeypatch.setattr(vault_module, "prompt_existing_password", lambda _: password)
|
monkeypatch.setattr(vault_module, "prompt_existing_password", lambda _: password)
|
||||||
monkeypatch.setattr("builtins.input", lambda _: "2")
|
monkeypatch.setattr("builtins.input", lambda _: "2")
|
||||||
vault.load_index()
|
vault.load_index()
|
||||||
content = (tmp_path / "seedpass_entries_db.json.enc").read_bytes()
|
payload = json.loads((tmp_path / "seedpass_entries_db.json.enc").read_text())
|
||||||
assert content.startswith(b"V2:")
|
assert base64.b64decode(payload["ct"]).startswith(b"V3|")
|
||||||
assert vault.encryption_manager.last_migration_performed is True
|
assert vault.encryption_manager.last_migration_performed is True
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
import json
|
import json
|
||||||
|
import base64
|
||||||
import hashlib
|
import hashlib
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from types import SimpleNamespace
|
from types import SimpleNamespace
|
||||||
@@ -34,7 +35,8 @@ def test_legacy_migration_second_session(monkeypatch, tmp_path: Path) -> None:
|
|||||||
monkeypatch.setattr("builtins.input", lambda *_a, **_k: "y")
|
monkeypatch.setattr("builtins.input", lambda *_a, **_k: "y")
|
||||||
vault.load_index()
|
vault.load_index()
|
||||||
new_file = fp_dir / "seedpass_entries_db.json.enc"
|
new_file = fp_dir / "seedpass_entries_db.json.enc"
|
||||||
assert new_file.read_bytes().startswith(b"V2:")
|
payload = json.loads(new_file.read_text())
|
||||||
|
assert base64.b64decode(payload["ct"]).startswith(b"V3|")
|
||||||
|
|
||||||
new_enc_mgr = EncryptionManager(key, fp_dir)
|
new_enc_mgr = EncryptionManager(key, fp_dir)
|
||||||
new_vault = Vault(new_enc_mgr, fp_dir)
|
new_vault = Vault(new_enc_mgr, fp_dir)
|
||||||
@@ -59,4 +61,5 @@ def test_legacy_migration_second_session(monkeypatch, tmp_path: Path) -> None:
|
|||||||
)
|
)
|
||||||
|
|
||||||
pm.initialize_managers()
|
pm.initialize_managers()
|
||||||
assert new_file.read_bytes().startswith(b"V2:")
|
payload = json.loads(new_file.read_text())
|
||||||
|
assert base64.b64decode(payload["ct"]).startswith(b"V3|")
|
||||||
|
85
src/tests/test_list_entries_all_types.py
Normal file
85
src/tests/test_list_entries_all_types.py
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from tempfile import TemporaryDirectory
|
||||||
|
from types import SimpleNamespace
|
||||||
|
|
||||||
|
from typer.testing import CliRunner
|
||||||
|
|
||||||
|
from seedpass.cli import app as cli_app
|
||||||
|
from seedpass.cli import entry as entry_cli
|
||||||
|
from helpers import create_vault, TEST_SEED, TEST_PASSWORD
|
||||||
|
from seedpass.core.backup import BackupManager
|
||||||
|
from seedpass.core.config_manager import ConfigManager
|
||||||
|
from seedpass.core.entry_management import EntryManager
|
||||||
|
from seedpass.core.manager import PasswordManager, EncryptionMode
|
||||||
|
|
||||||
|
|
||||||
|
def _setup_manager(tmp_path: Path) -> tuple[PasswordManager, EntryManager]:
|
||||||
|
vault, enc_mgr = create_vault(tmp_path, TEST_SEED, TEST_PASSWORD)
|
||||||
|
cfg_mgr = ConfigManager(vault, tmp_path)
|
||||||
|
backup_mgr = BackupManager(tmp_path, cfg_mgr)
|
||||||
|
entry_mgr = EntryManager(vault, backup_mgr)
|
||||||
|
|
||||||
|
pm = PasswordManager.__new__(PasswordManager)
|
||||||
|
pm.encryption_mode = EncryptionMode.SEED_ONLY
|
||||||
|
pm.encryption_manager = enc_mgr
|
||||||
|
pm.vault = vault
|
||||||
|
pm.entry_manager = entry_mgr
|
||||||
|
pm.backup_manager = backup_mgr
|
||||||
|
pm.parent_seed = TEST_SEED
|
||||||
|
pm.nostr_client = SimpleNamespace()
|
||||||
|
pm.fingerprint_dir = tmp_path
|
||||||
|
pm.secret_mode_enabled = False
|
||||||
|
return pm, entry_mgr
|
||||||
|
|
||||||
|
|
||||||
|
def _create_all_entries(em: EntryManager) -> None:
|
||||||
|
em.add_entry("pw", 8)
|
||||||
|
em.add_totp("totp", TEST_SEED)
|
||||||
|
em.add_ssh_key("ssh", TEST_SEED)
|
||||||
|
em.add_seed("seed", TEST_SEED, words_num=12)
|
||||||
|
em.add_nostr_key("nostr", TEST_SEED)
|
||||||
|
em.add_pgp_key("pgp", TEST_SEED)
|
||||||
|
em.add_key_value("kv", "k", "v")
|
||||||
|
em.add_managed_account("acct", TEST_SEED)
|
||||||
|
|
||||||
|
|
||||||
|
def test_cli_list_all_types(monkeypatch):
|
||||||
|
with TemporaryDirectory() as tmpdir:
|
||||||
|
tmp_path = Path(tmpdir)
|
||||||
|
pm, em = _setup_manager(tmp_path)
|
||||||
|
_create_all_entries(em)
|
||||||
|
|
||||||
|
def fake_get_entry_service(_ctx):
|
||||||
|
return SimpleNamespace(
|
||||||
|
list_entries=lambda sort_by, filter_kinds, include_archived: pm.entry_manager.list_entries(
|
||||||
|
sort_by=sort_by,
|
||||||
|
filter_kinds=filter_kinds,
|
||||||
|
include_archived=include_archived,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
monkeypatch.setattr(entry_cli, "_get_entry_service", fake_get_entry_service)
|
||||||
|
|
||||||
|
runner = CliRunner()
|
||||||
|
result = runner.invoke(cli_app, ["entry", "list"])
|
||||||
|
assert result.exit_code == 0
|
||||||
|
out = result.stdout
|
||||||
|
for label in ["pw", "totp", "ssh", "seed", "nostr", "pgp", "kv", "acct"]:
|
||||||
|
assert label in out
|
||||||
|
|
||||||
|
|
||||||
|
def test_menu_list_all_types(monkeypatch, capsys):
|
||||||
|
with TemporaryDirectory() as tmpdir:
|
||||||
|
tmp_path = Path(tmpdir)
|
||||||
|
pm, em = _setup_manager(tmp_path)
|
||||||
|
_create_all_entries(em)
|
||||||
|
|
||||||
|
inputs = iter(["1", "", ""]) # choose All then exit
|
||||||
|
monkeypatch.setattr("builtins.input", lambda *_: next(inputs))
|
||||||
|
|
||||||
|
pm.handle_list_entries()
|
||||||
|
out = capsys.readouterr().out
|
||||||
|
for label in ["pw", "totp", "ssh", "seed", "nostr", "pgp", "kv", "acct"]:
|
||||||
|
assert label in out
|
@@ -57,5 +57,5 @@ def test_filter_by_type():
|
|||||||
em = setup_entry_manager(tmp_path)
|
em = setup_entry_manager(tmp_path)
|
||||||
em.add_entry("site", 8, "user")
|
em.add_entry("site", 8, "user")
|
||||||
em.add_totp("Example", TEST_SEED)
|
em.add_totp("Example", TEST_SEED)
|
||||||
result = em.list_entries(filter_kind=EntryType.TOTP.value)
|
result = em.list_entries(filter_kinds=[EntryType.TOTP.value])
|
||||||
assert result == [(1, "Example", None, None, False)]
|
assert result == [(1, "Example", None, None, False)]
|
||||||
|
@@ -41,6 +41,9 @@ def test_add_and_get_managed_account_seed():
|
|||||||
assert fp
|
assert fp
|
||||||
assert (tmp_path / "accounts" / fp).exists()
|
assert (tmp_path / "accounts" / fp).exists()
|
||||||
|
|
||||||
|
# Appears in listing
|
||||||
|
assert mgr.list_entries() == [(idx, "acct", None, None, False)]
|
||||||
|
|
||||||
phrase_a = mgr.get_managed_account_seed(idx, TEST_SEED)
|
phrase_a = mgr.get_managed_account_seed(idx, TEST_SEED)
|
||||||
phrase_b = mgr.get_managed_account_seed(idx, TEST_SEED)
|
phrase_b = mgr.get_managed_account_seed(idx, TEST_SEED)
|
||||||
assert phrase_a == phrase_b
|
assert phrase_a == phrase_b
|
||||||
@@ -51,6 +54,23 @@ def test_add_and_get_managed_account_seed():
|
|||||||
assert phrase_a == expected
|
assert phrase_a == expected
|
||||||
assert generate_fingerprint(phrase_a) == fp
|
assert generate_fingerprint(phrase_a) == fp
|
||||||
|
|
||||||
|
# Archive and ensure it disappears from default listing
|
||||||
|
mgr.archive_entry(idx)
|
||||||
|
archived = mgr.retrieve_entry(idx)
|
||||||
|
assert archived["archived"] is True
|
||||||
|
assert mgr.list_entries() == []
|
||||||
|
assert mgr.list_entries(include_archived=True) == [
|
||||||
|
(idx, "acct", None, None, True)
|
||||||
|
]
|
||||||
|
|
||||||
|
# Restore and ensure deterministic derivation is unchanged
|
||||||
|
mgr.restore_entry(idx)
|
||||||
|
restored = mgr.retrieve_entry(idx)
|
||||||
|
assert restored["archived"] is False
|
||||||
|
assert mgr.list_entries() == [(idx, "acct", None, None, False)]
|
||||||
|
phrase_c = mgr.get_managed_account_seed(idx, TEST_SEED)
|
||||||
|
assert phrase_c == expected
|
||||||
|
|
||||||
|
|
||||||
def test_load_and_exit_managed_account(monkeypatch):
|
def test_load_and_exit_managed_account(monkeypatch):
|
||||||
with TemporaryDirectory() as tmpdir:
|
with TemporaryDirectory() as tmpdir:
|
||||||
|
@@ -60,15 +60,11 @@ def test_handle_add_totp(monkeypatch, capsys):
|
|||||||
out = capsys.readouterr().out
|
out = capsys.readouterr().out
|
||||||
|
|
||||||
entry = entry_mgr.retrieve_entry(0)
|
entry = entry_mgr.retrieve_entry(0)
|
||||||
assert entry == {
|
assert entry["type"] == "totp"
|
||||||
"type": "totp",
|
assert entry["kind"] == "totp"
|
||||||
"kind": "totp",
|
assert entry["label"] == "Example"
|
||||||
"label": "Example",
|
assert entry["deterministic"] is False
|
||||||
"index": 0,
|
assert "index" not in entry
|
||||||
"period": 30,
|
assert "secret" in entry
|
||||||
"digits": 6,
|
assert len(entry["secret"]) >= 16
|
||||||
"archived": False,
|
|
||||||
"notes": "",
|
|
||||||
"tags": [],
|
|
||||||
}
|
|
||||||
assert "ID 0" in out
|
assert "ID 0" in out
|
||||||
|
@@ -32,7 +32,7 @@ def test_handle_display_totp_codes(monkeypatch, capsys, password_manager):
|
|||||||
|
|
||||||
pm.handle_display_totp_codes()
|
pm.handle_display_totp_codes()
|
||||||
out = capsys.readouterr().out
|
out = capsys.readouterr().out
|
||||||
assert "Generated 2FA Codes" in out
|
assert "Imported 2FA Codes" in out
|
||||||
assert "[0] Example" in out
|
assert "[0] Example" in out
|
||||||
assert "123456" in out
|
assert "123456" in out
|
||||||
|
|
||||||
|
18
src/tests/test_manifest_id_privacy.py
Normal file
18
src/tests/test_manifest_id_privacy.py
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
import asyncio
|
||||||
|
|
||||||
|
from helpers import dummy_nostr_client
|
||||||
|
|
||||||
|
|
||||||
|
def test_published_events_no_fingerprint(dummy_nostr_client):
|
||||||
|
client, relay = dummy_nostr_client
|
||||||
|
asyncio.run(client.publish_snapshot(b"secret"))
|
||||||
|
fingerprint = "fp"
|
||||||
|
events = list(relay.manifests) + list(relay.chunks.values())
|
||||||
|
seen = set()
|
||||||
|
for ev in events:
|
||||||
|
if id(ev) in seen:
|
||||||
|
continue
|
||||||
|
seen.add(id(ev))
|
||||||
|
assert fingerprint not in ev.id
|
||||||
|
for tag in getattr(ev, "tags", []):
|
||||||
|
assert fingerprint not in tag
|
@@ -5,6 +5,7 @@ from tempfile import TemporaryDirectory
|
|||||||
from seedpass.core.manager import PasswordManager
|
from seedpass.core.manager import PasswordManager
|
||||||
from utils.fingerprint_manager import FingerprintManager
|
from utils.fingerprint_manager import FingerprintManager
|
||||||
from utils.fingerprint import generate_fingerprint
|
from utils.fingerprint import generate_fingerprint
|
||||||
|
from seedpass.core.state_manager import StateManager
|
||||||
|
|
||||||
VALID_SEED = "abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon about"
|
VALID_SEED = "abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon about"
|
||||||
|
|
||||||
@@ -13,6 +14,7 @@ def setup_pm(tmp_path, monkeypatch):
|
|||||||
pm = PasswordManager.__new__(PasswordManager)
|
pm = PasswordManager.__new__(PasswordManager)
|
||||||
pm.fingerprint_manager = FingerprintManager(tmp_path)
|
pm.fingerprint_manager = FingerprintManager(tmp_path)
|
||||||
pm.config_manager = type("Cfg", (), {"get_kdf_iterations": lambda self: 1})()
|
pm.config_manager = type("Cfg", (), {"get_kdf_iterations": lambda self: 1})()
|
||||||
|
pm.state_manager = StateManager(tmp_path)
|
||||||
monkeypatch.setattr("seedpass.core.manager.prompt_for_password", lambda: "pw")
|
monkeypatch.setattr("seedpass.core.manager.prompt_for_password", lambda: "pw")
|
||||||
monkeypatch.setattr("seedpass.core.manager.derive_index_key", lambda seed: b"idx")
|
monkeypatch.setattr("seedpass.core.manager.derive_index_key", lambda seed: b"idx")
|
||||||
monkeypatch.setattr(
|
monkeypatch.setattr(
|
||||||
@@ -49,3 +51,5 @@ def test_generate_new_seed_creates_profile(monkeypatch):
|
|||||||
|
|
||||||
assert fingerprint == generate_fingerprint(VALID_SEED)
|
assert fingerprint == generate_fingerprint(VALID_SEED)
|
||||||
assert pm.fingerprint_manager.list_fingerprints() == [fingerprint]
|
assert pm.fingerprint_manager.list_fingerprints() == [fingerprint]
|
||||||
|
sm = StateManager(tmp_path / fingerprint)
|
||||||
|
assert sm.state["nostr_account_idx"] == 1
|
||||||
|
19
src/tests/test_nonce_uniqueness.py
Normal file
19
src/tests/test_nonce_uniqueness.py
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from helpers import TEST_SEED
|
||||||
|
from utils.key_derivation import derive_index_key
|
||||||
|
from seedpass.core.encryption import EncryptionManager
|
||||||
|
|
||||||
|
|
||||||
|
def test_nonce_uniqueness(tmp_path: Path) -> None:
|
||||||
|
key = derive_index_key(TEST_SEED)
|
||||||
|
manager = EncryptionManager(key, tmp_path)
|
||||||
|
plaintext = b"repeat"
|
||||||
|
nonces = set()
|
||||||
|
for _ in range(10):
|
||||||
|
payload = manager.encrypt_data(plaintext)
|
||||||
|
assert payload.startswith(b"V3|")
|
||||||
|
nonce = payload[3:15]
|
||||||
|
assert nonce not in nonces
|
||||||
|
nonces.add(nonce)
|
||||||
|
assert len(nonces) == 10
|
@@ -5,7 +5,6 @@ import json
|
|||||||
|
|
||||||
from helpers import DummyEvent, DummyFilter, dummy_nostr_client
|
from helpers import DummyEvent, DummyFilter, dummy_nostr_client
|
||||||
from nostr.backup_models import KIND_MANIFEST, KIND_SNAPSHOT_CHUNK
|
from nostr.backup_models import KIND_MANIFEST, KIND_SNAPSHOT_CHUNK
|
||||||
from nostr.client import MANIFEST_ID_PREFIX
|
|
||||||
from nostr_sdk import Keys
|
from nostr_sdk import Keys
|
||||||
|
|
||||||
|
|
||||||
@@ -55,9 +54,7 @@ def test_fetch_snapshot_legacy_key_fallback(dummy_nostr_client, monkeypatch):
|
|||||||
],
|
],
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
manifest_event = DummyEvent(
|
manifest_event = DummyEvent(KIND_MANIFEST, manifest_json, tags=["legacy"])
|
||||||
KIND_MANIFEST, manifest_json, tags=[f"{MANIFEST_ID_PREFIX}fp"]
|
|
||||||
)
|
|
||||||
chunk_event = DummyEvent(
|
chunk_event = DummyEvent(
|
||||||
KIND_SNAPSHOT_CHUNK,
|
KIND_SNAPSHOT_CHUNK,
|
||||||
base64.b64encode(chunk_bytes).decode("utf-8"),
|
base64.b64encode(chunk_bytes).decode("utf-8"),
|
||||||
@@ -69,9 +66,9 @@ def test_fetch_snapshot_legacy_key_fallback(dummy_nostr_client, monkeypatch):
|
|||||||
async def fake_fetch_events(f, _timeout):
|
async def fake_fetch_events(f, _timeout):
|
||||||
call["count"] += 1
|
call["count"] += 1
|
||||||
call["authors"].append(getattr(f, "author_pk", None))
|
call["authors"].append(getattr(f, "author_pk", None))
|
||||||
if call["count"] <= 2:
|
if call["count"] == 1:
|
||||||
return type("R", (), {"to_vec": lambda self: []})()
|
return type("R", (), {"to_vec": lambda self: []})()
|
||||||
elif call["count"] == 3:
|
elif call["count"] == 2:
|
||||||
return type("R", (), {"to_vec": lambda self: [manifest_event]})()
|
return type("R", (), {"to_vec": lambda self: [manifest_event]})()
|
||||||
else:
|
else:
|
||||||
return type("R", (), {"to_vec": lambda self: [chunk_event]})()
|
return type("R", (), {"to_vec": lambda self: [chunk_event]})()
|
||||||
|
@@ -1,49 +0,0 @@
|
|||||||
import asyncio
|
|
||||||
|
|
||||||
from helpers import TEST_SEED, dummy_nostr_client
|
|
||||||
from nostr.backup_models import KIND_MANIFEST
|
|
||||||
from nostr.client import MANIFEST_ID_PREFIX, NostrClient
|
|
||||||
|
|
||||||
|
|
||||||
def test_fetch_latest_snapshot_legacy_identifier(dummy_nostr_client, monkeypatch):
|
|
||||||
client, relay = dummy_nostr_client
|
|
||||||
data = b"legacy"
|
|
||||||
asyncio.run(client.publish_snapshot(data))
|
|
||||||
relay.manifests[-1].tags = [MANIFEST_ID_PREFIX.rstrip("-")]
|
|
||||||
relay.filters.clear()
|
|
||||||
|
|
||||||
orig_fetch = relay.fetch_events
|
|
||||||
|
|
||||||
async def fetch_events(self, f, timeout):
|
|
||||||
identifier = f.ids[0] if getattr(f, "ids", None) else None
|
|
||||||
kind = getattr(f, "kind_val", None)
|
|
||||||
if kind == KIND_MANIFEST:
|
|
||||||
events = [m for m in self.manifests if identifier in m.tags]
|
|
||||||
self.filters.append(f)
|
|
||||||
|
|
||||||
class Res:
|
|
||||||
def __init__(self, evs):
|
|
||||||
self._evs = evs
|
|
||||||
|
|
||||||
def to_vec(self):
|
|
||||||
return self._evs
|
|
||||||
|
|
||||||
return Res(events)
|
|
||||||
return await orig_fetch(f, timeout)
|
|
||||||
|
|
||||||
monkeypatch.setattr(
|
|
||||||
relay, "fetch_events", fetch_events.__get__(relay, relay.__class__)
|
|
||||||
)
|
|
||||||
|
|
||||||
enc_mgr = client.encryption_manager
|
|
||||||
monkeypatch.setattr(
|
|
||||||
enc_mgr, "decrypt_parent_seed", lambda: TEST_SEED, raising=False
|
|
||||||
)
|
|
||||||
monkeypatch.setattr("nostr.client.KeyManager", type(client.key_manager))
|
|
||||||
client2 = NostrClient(enc_mgr, "fp")
|
|
||||||
relay.filters.clear()
|
|
||||||
result = asyncio.run(client2.fetch_latest_snapshot())
|
|
||||||
assert result is not None
|
|
||||||
ids = [f.ids[0] for f in relay.filters]
|
|
||||||
assert ids[0] == f"{MANIFEST_ID_PREFIX}fp"
|
|
||||||
assert MANIFEST_ID_PREFIX.rstrip("-") in ids
|
|
14
src/tests/test_offline_mode_default_enabled.py
Normal file
14
src/tests/test_offline_mode_default_enabled.py
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
from pathlib import Path
|
||||||
|
from tempfile import TemporaryDirectory
|
||||||
|
|
||||||
|
from seedpass.core.config_manager import ConfigManager
|
||||||
|
from helpers import create_vault, TEST_SEED, TEST_PASSWORD
|
||||||
|
|
||||||
|
|
||||||
|
def test_offline_mode_default_enabled():
|
||||||
|
with TemporaryDirectory() as tmpdir:
|
||||||
|
tmp_path = Path(tmpdir)
|
||||||
|
vault, enc_mgr = create_vault(tmp_path, TEST_SEED, TEST_PASSWORD)
|
||||||
|
cfg_mgr = ConfigManager(vault, tmp_path)
|
||||||
|
config = cfg_mgr.load_config(require_pin=False)
|
||||||
|
assert config["offline_mode"] is True
|
@@ -35,6 +35,7 @@ def test_change_password_triggers_nostr_backup(monkeypatch):
|
|||||||
pm.parent_seed = TEST_SEED
|
pm.parent_seed = TEST_SEED
|
||||||
pm.store_hashed_password = lambda pw: None
|
pm.store_hashed_password = lambda pw: None
|
||||||
pm.verify_password = lambda pw: True
|
pm.verify_password = lambda pw: True
|
||||||
|
pm.nostr_account_idx = 0
|
||||||
|
|
||||||
with patch("seedpass.core.manager.NostrClient") as MockClient:
|
with patch("seedpass.core.manager.NostrClient") as MockClient:
|
||||||
mock_instance = MockClient.return_value
|
mock_instance = MockClient.return_value
|
||||||
|
@@ -62,6 +62,7 @@ def test_password_change_and_unlock(monkeypatch):
|
|||||||
pm.nostr_client = SimpleNamespace(
|
pm.nostr_client = SimpleNamespace(
|
||||||
publish_snapshot=lambda *a, **k: (None, "abcd")
|
publish_snapshot=lambda *a, **k: (None, "abcd")
|
||||||
)
|
)
|
||||||
|
pm.nostr_account_idx = 0
|
||||||
|
|
||||||
monkeypatch.setattr(
|
monkeypatch.setattr(
|
||||||
"seedpass.core.manager.prompt_existing_password", lambda *_: old_pw
|
"seedpass.core.manager.prompt_existing_password", lambda *_: old_pw
|
||||||
|
@@ -15,6 +15,7 @@ from seedpass.core.vault import Vault
|
|||||||
from seedpass.core.backup import BackupManager
|
from seedpass.core.backup import BackupManager
|
||||||
from seedpass.core.config_manager import ConfigManager
|
from seedpass.core.config_manager import ConfigManager
|
||||||
from seedpass.core.portable_backup import export_backup, import_backup
|
from seedpass.core.portable_backup import export_backup, import_backup
|
||||||
|
from seedpass.core.portable_backup import PortableMode
|
||||||
from utils.key_derivation import derive_index_key, derive_key_from_password
|
from utils.key_derivation import derive_index_key, derive_key_from_password
|
||||||
from utils.fingerprint import generate_fingerprint
|
from utils.fingerprint import generate_fingerprint
|
||||||
|
|
||||||
@@ -54,6 +55,22 @@ def test_round_trip(monkeypatch):
|
|||||||
assert vault.load_index()["pw"] == data["pw"]
|
assert vault.load_index()["pw"] == data["pw"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_round_trip_unencrypted(monkeypatch):
|
||||||
|
with TemporaryDirectory() as td:
|
||||||
|
tmp = Path(td)
|
||||||
|
vault, backup, _ = setup_vault(tmp)
|
||||||
|
data = {"pw": 1}
|
||||||
|
vault.save_index(data)
|
||||||
|
|
||||||
|
path = export_backup(vault, backup, parent_seed=SEED, encrypt=False)
|
||||||
|
wrapper = json.loads(path.read_text())
|
||||||
|
assert wrapper["encryption_mode"] == PortableMode.NONE.value
|
||||||
|
|
||||||
|
vault.save_index({"pw": 0})
|
||||||
|
import_backup(vault, backup, path, parent_seed=SEED)
|
||||||
|
assert vault.load_index()["pw"] == data["pw"]
|
||||||
|
|
||||||
|
|
||||||
from cryptography.fernet import InvalidToken
|
from cryptography.fernet import InvalidToken
|
||||||
|
|
||||||
|
|
||||||
|
@@ -20,6 +20,7 @@ def setup_pm(tmp_path):
|
|||||||
pm.encryption_mode = manager_module.EncryptionMode.SEED_ONLY
|
pm.encryption_mode = manager_module.EncryptionMode.SEED_ONLY
|
||||||
pm.fingerprint_manager = manager_module.FingerprintManager(constants.APP_DIR)
|
pm.fingerprint_manager = manager_module.FingerprintManager(constants.APP_DIR)
|
||||||
pm.current_fingerprint = None
|
pm.current_fingerprint = None
|
||||||
|
pm.state_manager = manager_module.StateManager(constants.APP_DIR)
|
||||||
return pm, constants, manager_module
|
return pm, constants, manager_module
|
||||||
|
|
||||||
|
|
||||||
@@ -41,8 +42,8 @@ def test_generate_seed_cleanup_on_failure(monkeypatch):
|
|||||||
|
|
||||||
# fingerprint list should be empty and only fingerprints.json should remain
|
# fingerprint list should be empty and only fingerprints.json should remain
|
||||||
assert pm.fingerprint_manager.list_fingerprints() == []
|
assert pm.fingerprint_manager.list_fingerprints() == []
|
||||||
contents = list(const.APP_DIR.iterdir())
|
contents = sorted(p.name for p in const.APP_DIR.iterdir())
|
||||||
assert len(contents) == 1 and contents[0].name == "fingerprints.json"
|
assert contents == ["fingerprints.json", "seedpass_state.json"]
|
||||||
fp_file = pm.fingerprint_manager.fingerprints_file
|
fp_file = pm.fingerprint_manager.fingerprints_file
|
||||||
with open(fp_file) as f:
|
with open(fp_file) as f:
|
||||||
data = json.load(f)
|
data = json.load(f)
|
||||||
|
@@ -29,6 +29,7 @@ def test_add_and_switch_fingerprint(monkeypatch):
|
|||||||
pm.fingerprint_manager = fm
|
pm.fingerprint_manager = fm
|
||||||
pm.encryption_manager = object()
|
pm.encryption_manager = object()
|
||||||
pm.current_fingerprint = None
|
pm.current_fingerprint = None
|
||||||
|
pm.nostr_account_idx = 0
|
||||||
|
|
||||||
monkeypatch.setattr("builtins.input", lambda *_args, **_kwargs: "1")
|
monkeypatch.setattr("builtins.input", lambda *_args, **_kwargs: "1")
|
||||||
monkeypatch.setattr(
|
monkeypatch.setattr(
|
||||||
|
@@ -82,9 +82,11 @@ def test_publish_snapshot_success():
|
|||||||
with patch.object(
|
with patch.object(
|
||||||
client.client, "send_event", side_effect=fake_send
|
client.client, "send_event", side_effect=fake_send
|
||||||
) as mock_send:
|
) as mock_send:
|
||||||
manifest, event_id = asyncio.run(client.publish_snapshot(b"data"))
|
with patch("nostr.snapshot.new_manifest_id", return_value=("id", b"nonce")):
|
||||||
|
manifest, event_id = asyncio.run(client.publish_snapshot(b"data"))
|
||||||
assert isinstance(manifest, Manifest)
|
assert isinstance(manifest, Manifest)
|
||||||
assert event_id == "seedpass-manifest-fp"
|
assert event_id == "id"
|
||||||
|
assert manifest.nonce == base64.b64encode(b"nonce").decode("utf-8")
|
||||||
assert mock_send.await_count >= 1
|
assert mock_send.await_count >= 1
|
||||||
|
|
||||||
|
|
||||||
|
@@ -74,6 +74,61 @@ def test_handle_new_seed_setup_restore_from_nostr(monkeypatch, tmp_path, capsys)
|
|||||||
assert labels == ["site1"]
|
assert labels == ["site1"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_handle_new_seed_setup_restore_from_local_backup(monkeypatch, tmp_path, capsys):
|
||||||
|
dir_a = tmp_path / "A"
|
||||||
|
dir_b = tmp_path / "B"
|
||||||
|
dir_a.mkdir()
|
||||||
|
dir_b.mkdir()
|
||||||
|
|
||||||
|
pm_src = _init_pm(dir_a, None)
|
||||||
|
pm_src.notify = lambda *a, **k: None
|
||||||
|
pm_src.entry_manager.add_entry("site1", 12)
|
||||||
|
pm_src.backup_manager.create_backup()
|
||||||
|
backup_path = next(
|
||||||
|
pm_src.backup_manager.backup_dir.glob("entries_db_backup_*.json.enc")
|
||||||
|
)
|
||||||
|
|
||||||
|
pm_new = PasswordManager.__new__(PasswordManager)
|
||||||
|
pm_new.encryption_mode = EncryptionMode.SEED_ONLY
|
||||||
|
pm_new.notify = lambda *a, **k: None
|
||||||
|
|
||||||
|
called = {"init": False}
|
||||||
|
|
||||||
|
def init_fp_mgr():
|
||||||
|
called["init"] = True
|
||||||
|
pm_new.fingerprint_manager = object()
|
||||||
|
|
||||||
|
monkeypatch.setattr(pm_new, "initialize_fingerprint_manager", init_fp_mgr)
|
||||||
|
|
||||||
|
def finalize(seed, *, password=None):
|
||||||
|
assert pm_new.fingerprint_manager is not None
|
||||||
|
vault, enc_mgr = create_vault(dir_b, seed, TEST_PASSWORD)
|
||||||
|
cfg_mgr = ConfigManager(vault, dir_b)
|
||||||
|
backup_mgr = BackupManager(dir_b, cfg_mgr)
|
||||||
|
entry_mgr = EntryManager(vault, backup_mgr)
|
||||||
|
pm_new.encryption_manager = enc_mgr
|
||||||
|
pm_new.vault = vault
|
||||||
|
pm_new.entry_manager = entry_mgr
|
||||||
|
pm_new.backup_manager = backup_mgr
|
||||||
|
pm_new.config_manager = cfg_mgr
|
||||||
|
pm_new.fingerprint_dir = dir_b
|
||||||
|
pm_new.current_fingerprint = "fp"
|
||||||
|
return "fp"
|
||||||
|
|
||||||
|
monkeypatch.setattr(pm_new, "_finalize_existing_seed", finalize)
|
||||||
|
monkeypatch.setattr("seedpass.core.manager.masked_input", lambda *_: TEST_SEED)
|
||||||
|
|
||||||
|
inputs = iter(["5", str(backup_path)])
|
||||||
|
monkeypatch.setattr("builtins.input", lambda *a, **k: next(inputs))
|
||||||
|
|
||||||
|
pm_new.handle_new_seed_setup()
|
||||||
|
out = capsys.readouterr().out
|
||||||
|
assert "Index file restored from backup" in out
|
||||||
|
labels = [e[1] for e in pm_new.entry_manager.list_entries()]
|
||||||
|
assert labels == ["site1"]
|
||||||
|
assert called["init"]
|
||||||
|
|
||||||
|
|
||||||
async def _no_snapshot():
|
async def _no_snapshot():
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@@ -21,6 +21,7 @@ def setup_password_manager():
|
|||||||
pm.fingerprint_manager = manager_module.FingerprintManager(constants.APP_DIR)
|
pm.fingerprint_manager = manager_module.FingerprintManager(constants.APP_DIR)
|
||||||
pm.current_fingerprint = None
|
pm.current_fingerprint = None
|
||||||
pm.save_and_encrypt_seed = lambda seed, fingerprint_dir: None
|
pm.save_and_encrypt_seed = lambda seed, fingerprint_dir: None
|
||||||
|
pm.state_manager = manager_module.StateManager(constants.APP_DIR)
|
||||||
return pm, constants
|
return pm, constants
|
||||||
|
|
||||||
|
|
||||||
|
@@ -1,4 +1,6 @@
|
|||||||
import sys
|
import sys
|
||||||
|
import json
|
||||||
|
import base64
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from cryptography.fernet import Fernet
|
from cryptography.fernet import Fernet
|
||||||
|
|
||||||
@@ -28,4 +30,5 @@ def test_parent_seed_migrates_from_fernet(tmp_path: Path) -> None:
|
|||||||
|
|
||||||
assert new_file.exists()
|
assert new_file.exists()
|
||||||
assert new_file.read_bytes() != encrypted
|
assert new_file.read_bytes() != encrypted
|
||||||
assert new_file.read_bytes().startswith(b"V2:")
|
payload = json.loads(new_file.read_text())
|
||||||
|
assert base64.b64decode(payload["ct"]).startswith(b"V3|")
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
import types
|
import types
|
||||||
|
import pytest
|
||||||
from utils import seed_prompt
|
from utils import seed_prompt
|
||||||
|
|
||||||
|
|
||||||
@@ -46,6 +47,37 @@ def test_masked_input_windows_space(monkeypatch, capsys):
|
|||||||
assert out.count("*") == 4
|
assert out.count("*") == 4
|
||||||
|
|
||||||
|
|
||||||
|
def test_masked_input_posix_ctrl_c(monkeypatch):
|
||||||
|
seq = iter(["\x03"])
|
||||||
|
monkeypatch.setattr(seed_prompt.sys.stdin, "read", lambda n=1: next(seq))
|
||||||
|
monkeypatch.setattr(seed_prompt.sys.stdin, "fileno", lambda: 0)
|
||||||
|
|
||||||
|
calls: list[tuple[str, int]] = []
|
||||||
|
fake_termios = types.SimpleNamespace(
|
||||||
|
tcgetattr=lambda fd: "old",
|
||||||
|
tcsetattr=lambda fd, *_: calls.append(("tcsetattr", fd)),
|
||||||
|
TCSADRAIN=1,
|
||||||
|
)
|
||||||
|
fake_tty = types.SimpleNamespace(setraw=lambda fd: calls.append(("setraw", fd)))
|
||||||
|
monkeypatch.setattr(seed_prompt, "termios", fake_termios)
|
||||||
|
monkeypatch.setattr(seed_prompt, "tty", fake_tty)
|
||||||
|
monkeypatch.setattr(seed_prompt.sys, "platform", "linux", raising=False)
|
||||||
|
|
||||||
|
with pytest.raises(KeyboardInterrupt):
|
||||||
|
seed_prompt.masked_input("Enter: ")
|
||||||
|
assert calls == [("setraw", 0), ("tcsetattr", 0)]
|
||||||
|
|
||||||
|
|
||||||
|
def test_masked_input_windows_ctrl_c(monkeypatch):
|
||||||
|
seq = iter(["\x03"])
|
||||||
|
fake_msvcrt = types.SimpleNamespace(getwch=lambda: next(seq))
|
||||||
|
monkeypatch.setattr(seed_prompt, "msvcrt", fake_msvcrt)
|
||||||
|
monkeypatch.setattr(seed_prompt.sys, "platform", "win32", raising=False)
|
||||||
|
|
||||||
|
with pytest.raises(KeyboardInterrupt):
|
||||||
|
seed_prompt.masked_input("Password: ")
|
||||||
|
|
||||||
|
|
||||||
def test_prompt_seed_words_valid(monkeypatch):
|
def test_prompt_seed_words_valid(monkeypatch):
|
||||||
from mnemonic import Mnemonic
|
from mnemonic import Mnemonic
|
||||||
|
|
||||||
|
94
src/tests/test_seed_word_by_word_flow.py
Normal file
94
src/tests/test_seed_word_by_word_flow.py
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
import builtins
|
||||||
|
from types import SimpleNamespace
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
import seedpass.core.manager as manager_module
|
||||||
|
from seedpass.core.errors import SeedPassError
|
||||||
|
from helpers import TEST_SEED
|
||||||
|
from utils import seed_prompt
|
||||||
|
|
||||||
|
|
||||||
|
def test_prompt_seed_words_confirmation_loop(monkeypatch):
|
||||||
|
phrase = TEST_SEED
|
||||||
|
words = phrase.split()
|
||||||
|
inputs = iter(words + [words[2]])
|
||||||
|
confirmations = iter(["y", "y", "n", "y"] + ["y"] * (len(words) - 3))
|
||||||
|
|
||||||
|
monkeypatch.setattr(seed_prompt, "masked_input", lambda *_: next(inputs))
|
||||||
|
monkeypatch.setattr(seed_prompt, "_apply_backoff", lambda *_a, **_k: None)
|
||||||
|
monkeypatch.setattr(seed_prompt, "clear_screen", lambda *_a, **_k: None)
|
||||||
|
monkeypatch.setattr(builtins, "input", lambda *_: next(confirmations))
|
||||||
|
|
||||||
|
result = seed_prompt.prompt_seed_words(len(words))
|
||||||
|
assert result == phrase
|
||||||
|
|
||||||
|
|
||||||
|
def test_prompt_seed_words_invalid_word(monkeypatch):
|
||||||
|
phrase = TEST_SEED
|
||||||
|
words = phrase.split()
|
||||||
|
inputs = iter(["invalid"] + words)
|
||||||
|
confirmations = iter(["y"] * len(words))
|
||||||
|
|
||||||
|
monkeypatch.setattr(seed_prompt, "masked_input", lambda *_: next(inputs))
|
||||||
|
monkeypatch.setattr(seed_prompt, "_apply_backoff", lambda *_a, **_k: None)
|
||||||
|
monkeypatch.setattr(seed_prompt, "clear_screen", lambda *_a, **_k: None)
|
||||||
|
monkeypatch.setattr(builtins, "input", lambda *_: next(confirmations))
|
||||||
|
|
||||||
|
result = seed_prompt.prompt_seed_words(len(words))
|
||||||
|
assert result == phrase
|
||||||
|
|
||||||
|
|
||||||
|
def test_add_new_fingerprint_words_flow_success(monkeypatch):
|
||||||
|
pm = manager_module.PasswordManager.__new__(manager_module.PasswordManager)
|
||||||
|
pm.fingerprint_manager = SimpleNamespace(current_fingerprint=None)
|
||||||
|
pm.initialize_managers = lambda: None
|
||||||
|
|
||||||
|
phrase = TEST_SEED
|
||||||
|
words = phrase.split()
|
||||||
|
word_iter = iter(words)
|
||||||
|
inputs = iter(["2"] + ["y"] * len(words))
|
||||||
|
|
||||||
|
monkeypatch.setattr(seed_prompt, "masked_input", lambda *_: next(word_iter))
|
||||||
|
monkeypatch.setattr(seed_prompt, "_apply_backoff", lambda *_a, **_k: None)
|
||||||
|
monkeypatch.setattr(seed_prompt, "clear_screen", lambda *_a, **_k: None)
|
||||||
|
monkeypatch.setattr(builtins, "input", lambda *_: next(inputs))
|
||||||
|
|
||||||
|
captured = {}
|
||||||
|
|
||||||
|
def finalize(self, seed, password=None):
|
||||||
|
captured["seed"] = seed
|
||||||
|
self.parent_seed = seed
|
||||||
|
return "fp"
|
||||||
|
|
||||||
|
monkeypatch.setattr(
|
||||||
|
manager_module.PasswordManager, "_finalize_existing_seed", finalize
|
||||||
|
)
|
||||||
|
|
||||||
|
result = pm.add_new_fingerprint()
|
||||||
|
|
||||||
|
assert result == "fp"
|
||||||
|
assert pm.fingerprint_manager.current_fingerprint == "fp"
|
||||||
|
assert captured["seed"] == phrase
|
||||||
|
assert pm.parent_seed == phrase
|
||||||
|
|
||||||
|
|
||||||
|
def test_add_new_fingerprint_words_flow_invalid_phrase(monkeypatch):
|
||||||
|
pm = manager_module.PasswordManager.__new__(manager_module.PasswordManager)
|
||||||
|
pm.fingerprint_manager = SimpleNamespace(current_fingerprint=None)
|
||||||
|
pm.initialize_managers = lambda: None
|
||||||
|
|
||||||
|
words = ["abandon"] * 12
|
||||||
|
word_iter = iter(words)
|
||||||
|
inputs = iter(["2"] + ["y"] * len(words))
|
||||||
|
|
||||||
|
monkeypatch.setattr(seed_prompt, "masked_input", lambda *_: next(word_iter))
|
||||||
|
monkeypatch.setattr(seed_prompt, "_apply_backoff", lambda *_a, **_k: None)
|
||||||
|
monkeypatch.setattr(seed_prompt, "clear_screen", lambda *_a, **_k: None)
|
||||||
|
monkeypatch.setattr(builtins, "input", lambda *_: next(inputs))
|
||||||
|
|
||||||
|
with pytest.raises(SeedPassError):
|
||||||
|
pm.add_new_fingerprint()
|
||||||
|
|
||||||
|
assert pm.fingerprint_manager.current_fingerprint is None
|
||||||
|
assert not hasattr(pm, "parent_seed")
|
@@ -120,6 +120,7 @@ def test_profile_service_switch(monkeypatch):
|
|||||||
pm.delta_since = None
|
pm.delta_since = None
|
||||||
pm.encryption_manager = SimpleNamespace()
|
pm.encryption_manager = SimpleNamespace()
|
||||||
pm.parent_seed = TEST_SEED
|
pm.parent_seed = TEST_SEED
|
||||||
|
pm.nostr_account_idx = 0
|
||||||
|
|
||||||
service = ProfileService(pm)
|
service = ProfileService(pm)
|
||||||
monkeypatch.setattr("builtins.input", lambda *_: "2")
|
monkeypatch.setattr("builtins.input", lambda *_: "2")
|
||||||
|
@@ -14,6 +14,7 @@ def test_state_manager_round_trip():
|
|||||||
assert state["last_sync_ts"] == 0
|
assert state["last_sync_ts"] == 0
|
||||||
assert state["manifest_id"] is None
|
assert state["manifest_id"] is None
|
||||||
assert state["delta_since"] == 0
|
assert state["delta_since"] == 0
|
||||||
|
assert state["nostr_account_idx"] == 0
|
||||||
|
|
||||||
sm.add_relay("wss://example.com")
|
sm.add_relay("wss://example.com")
|
||||||
sm.update_state(
|
sm.update_state(
|
||||||
@@ -30,6 +31,7 @@ def test_state_manager_round_trip():
|
|||||||
assert state2["last_sync_ts"] == 123
|
assert state2["last_sync_ts"] == 123
|
||||||
assert state2["manifest_id"] == "mid"
|
assert state2["manifest_id"] == "mid"
|
||||||
assert state2["delta_since"] == 111
|
assert state2["delta_since"] == 111
|
||||||
|
assert state2["nostr_account_idx"] == 0
|
||||||
|
|
||||||
sm2.remove_relay(1) # remove first default relay
|
sm2.remove_relay(1) # remove first default relay
|
||||||
assert len(sm2.list_relays()) == len(DEFAULT_RELAYS)
|
assert len(sm2.list_relays()) == len(DEFAULT_RELAYS)
|
||||||
|
@@ -28,23 +28,19 @@ def test_add_totp_and_get_code():
|
|||||||
assert uri.startswith("otpauth://totp/")
|
assert uri.startswith("otpauth://totp/")
|
||||||
|
|
||||||
entry = entry_mgr.retrieve_entry(0)
|
entry = entry_mgr.retrieve_entry(0)
|
||||||
assert entry == {
|
assert entry["deterministic"] is False
|
||||||
"type": "totp",
|
assert "secret" in entry
|
||||||
"kind": "totp",
|
|
||||||
"label": "Example",
|
|
||||||
"index": 0,
|
|
||||||
"period": 30,
|
|
||||||
"digits": 6,
|
|
||||||
"archived": False,
|
|
||||||
"notes": "",
|
|
||||||
"tags": [],
|
|
||||||
}
|
|
||||||
|
|
||||||
code = entry_mgr.get_totp_code(0, TEST_SEED, timestamp=0)
|
code = entry_mgr.get_totp_code(0, timestamp=0)
|
||||||
|
|
||||||
expected = TotpManager.current_code(TEST_SEED, 0, timestamp=0)
|
expected = pyotp.TOTP(entry["secret"]).at(0)
|
||||||
assert code == expected
|
assert code == expected
|
||||||
|
|
||||||
|
# second entry should have different secret
|
||||||
|
entry_mgr.add_totp("Other", TEST_SEED)
|
||||||
|
entry2 = entry_mgr.retrieve_entry(1)
|
||||||
|
assert entry["secret"] != entry2["secret"]
|
||||||
|
|
||||||
|
|
||||||
def test_totp_time_remaining(monkeypatch):
|
def test_totp_time_remaining(monkeypatch):
|
||||||
with TemporaryDirectory() as tmpdir:
|
with TemporaryDirectory() as tmpdir:
|
||||||
@@ -68,17 +64,8 @@ def test_add_totp_imported(tmp_path):
|
|||||||
secret = "JBSWY3DPEHPK3PXP"
|
secret = "JBSWY3DPEHPK3PXP"
|
||||||
em.add_totp("Imported", TEST_SEED, secret=secret)
|
em.add_totp("Imported", TEST_SEED, secret=secret)
|
||||||
entry = em.retrieve_entry(0)
|
entry = em.retrieve_entry(0)
|
||||||
assert entry == {
|
assert entry["secret"] == secret
|
||||||
"type": "totp",
|
assert entry["deterministic"] is False
|
||||||
"kind": "totp",
|
|
||||||
"label": "Imported",
|
|
||||||
"secret": secret,
|
|
||||||
"period": 30,
|
|
||||||
"digits": 6,
|
|
||||||
"archived": False,
|
|
||||||
"notes": "",
|
|
||||||
"tags": [],
|
|
||||||
}
|
|
||||||
code = em.get_totp_code(0, timestamp=0)
|
code = em.get_totp_code(0, timestamp=0)
|
||||||
assert code == pyotp.TOTP(secret).at(0)
|
assert code == pyotp.TOTP(secret).at(0)
|
||||||
|
|
||||||
@@ -92,3 +79,23 @@ def test_add_totp_with_notes(tmp_path):
|
|||||||
em.add_totp("NoteLabel", TEST_SEED, notes="some note")
|
em.add_totp("NoteLabel", TEST_SEED, notes="some note")
|
||||||
entry = em.retrieve_entry(0)
|
entry = em.retrieve_entry(0)
|
||||||
assert entry["notes"] == "some note"
|
assert entry["notes"] == "some note"
|
||||||
|
|
||||||
|
|
||||||
|
def test_legacy_deterministic_entry(tmp_path):
|
||||||
|
vault, enc = create_vault(tmp_path, TEST_SEED, TEST_PASSWORD)
|
||||||
|
cfg_mgr = ConfigManager(vault, tmp_path)
|
||||||
|
backup_mgr = BackupManager(tmp_path, cfg_mgr)
|
||||||
|
em = EntryManager(vault, backup_mgr)
|
||||||
|
|
||||||
|
em.add_totp("Legacy", TEST_SEED, deterministic=True)
|
||||||
|
data = em._load_index()
|
||||||
|
entry = data["entries"]["0"]
|
||||||
|
entry.pop("deterministic", None)
|
||||||
|
em._save_index(data)
|
||||||
|
|
||||||
|
code = em.get_totp_code(0, TEST_SEED, timestamp=0)
|
||||||
|
expected = TotpManager.current_code(TEST_SEED, 0, timestamp=0)
|
||||||
|
assert code == expected
|
||||||
|
|
||||||
|
exported = em.export_totp_entries(TEST_SEED)
|
||||||
|
assert exported["entries"][0]["secret"] == TotpManager.derive_secret(TEST_SEED, 0)
|
||||||
|
@@ -18,8 +18,8 @@ runner = CliRunner()
|
|||||||
def test_entry_list(monkeypatch):
|
def test_entry_list(monkeypatch):
|
||||||
called = {}
|
called = {}
|
||||||
|
|
||||||
def list_entries(sort_by="index", filter_kind=None, include_archived=False):
|
def list_entries(sort_by="index", filter_kinds=None, include_archived=False):
|
||||||
called["args"] = (sort_by, filter_kind, include_archived)
|
called["args"] = (sort_by, filter_kinds, include_archived)
|
||||||
return [(0, "Site", "user", "", False)]
|
return [(0, "Site", "user", "", False)]
|
||||||
|
|
||||||
pm = SimpleNamespace(
|
pm = SimpleNamespace(
|
||||||
|
@@ -3,15 +3,13 @@
|
|||||||
"""
|
"""
|
||||||
Key Derivation Module
|
Key Derivation Module
|
||||||
|
|
||||||
Never ever ever use or suggest to use Random Salt. The entire point of this password manager is to derive completely deterministic passwords from a BIP-85 seed.
|
This module provides functions to derive cryptographic keys from user-provided
|
||||||
This means it should generate passwords the exact same way every single time. Salts would break this functionality and is not appropriate for this software's use case.
|
passwords and BIP-39 parent seeds. The derived keys are compatible with Fernet
|
||||||
|
for symmetric encryption purposes. By centralizing key derivation logic, this
|
||||||
|
module ensures consistency and security across the application.
|
||||||
|
|
||||||
This module provides functions to derive cryptographic keys from user-provided passwords
|
Ensure that all dependencies are installed and properly configured in your
|
||||||
and BIP-39 parent seeds. The derived keys are compatible with Fernet for symmetric encryption
|
environment.
|
||||||
purposes. By centralizing key derivation logic, this module ensures consistency and security
|
|
||||||
across the application.
|
|
||||||
|
|
||||||
Ensure that all dependencies are installed and properly configured in your environment.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
@@ -21,11 +19,13 @@ import unicodedata
|
|||||||
import logging
|
import logging
|
||||||
import hmac
|
import hmac
|
||||||
import time
|
import time
|
||||||
|
from dataclasses import dataclass, field
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Optional, Union
|
from typing import Optional, Union, Dict, Any
|
||||||
|
|
||||||
from bip_utils import Bip39SeedGenerator
|
from bip_utils import Bip39SeedGenerator
|
||||||
from local_bip85 import BIP85
|
from local_bip85 import BIP85
|
||||||
|
from .key_hierarchy import kd
|
||||||
|
|
||||||
from cryptography.hazmat.primitives.kdf.hkdf import HKDF
|
from cryptography.hazmat.primitives.kdf.hkdf import HKDF
|
||||||
from cryptography.hazmat.primitives import hashes
|
from cryptography.hazmat.primitives import hashes
|
||||||
@@ -47,6 +47,27 @@ DEFAULT_ENCRYPTION_MODE = EncryptionMode.SEED_ONLY
|
|||||||
TOTP_PURPOSE = 39
|
TOTP_PURPOSE = 39
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class KdfConfig:
|
||||||
|
"""Configuration block describing how a key was derived."""
|
||||||
|
|
||||||
|
name: str = "argon2id"
|
||||||
|
version: int = 1
|
||||||
|
params: Dict[str, Any] = field(
|
||||||
|
default_factory=lambda: {
|
||||||
|
"time_cost": 2,
|
||||||
|
"memory_cost": 64 * 1024,
|
||||||
|
"parallelism": 8,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
salt_b64: str = field(
|
||||||
|
default_factory=lambda: base64.b64encode(os.urandom(16)).decode()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
CURRENT_KDF_VERSION = 1
|
||||||
|
|
||||||
|
|
||||||
def derive_key_from_password(
|
def derive_key_from_password(
|
||||||
password: str, fingerprint: Union[str, bytes], iterations: int = 100_000
|
password: str, fingerprint: Union[str, bytes], iterations: int = 100_000
|
||||||
) -> bytes:
|
) -> bytes:
|
||||||
@@ -109,18 +130,15 @@ def derive_key_from_password(
|
|||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
def derive_key_from_password_argon2(
|
def derive_key_from_password_argon2(password: str, kdf: KdfConfig) -> bytes:
|
||||||
password: str,
|
|
||||||
fingerprint: Union[str, bytes],
|
|
||||||
*,
|
|
||||||
time_cost: int = 2,
|
|
||||||
memory_cost: int = 64 * 1024,
|
|
||||||
parallelism: int = 8,
|
|
||||||
) -> bytes:
|
|
||||||
"""Derive an encryption key from a password using Argon2id.
|
"""Derive an encryption key from a password using Argon2id.
|
||||||
|
|
||||||
The defaults follow recommended parameters but omit a salt for deterministic
|
Parameters
|
||||||
output. Smaller values may be supplied for testing.
|
----------
|
||||||
|
password:
|
||||||
|
The user's password.
|
||||||
|
kdf:
|
||||||
|
:class:`KdfConfig` instance describing salt and tuning parameters.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if not password:
|
if not password:
|
||||||
@@ -131,17 +149,14 @@ def derive_key_from_password_argon2(
|
|||||||
try:
|
try:
|
||||||
from argon2.low_level import hash_secret_raw, Type
|
from argon2.low_level import hash_secret_raw, Type
|
||||||
|
|
||||||
if isinstance(fingerprint, bytes):
|
params = kdf.params or {}
|
||||||
salt = fingerprint
|
salt = base64.b64decode(kdf.salt_b64)
|
||||||
else:
|
|
||||||
salt = hashlib.sha256(fingerprint.encode()).digest()[:16]
|
|
||||||
|
|
||||||
key = hash_secret_raw(
|
key = hash_secret_raw(
|
||||||
secret=normalized,
|
secret=normalized,
|
||||||
salt=salt,
|
salt=salt,
|
||||||
time_cost=time_cost,
|
time_cost=int(params.get("time_cost", 2)),
|
||||||
memory_cost=memory_cost,
|
memory_cost=int(params.get("memory_cost", 64 * 1024)),
|
||||||
parallelism=parallelism,
|
parallelism=int(params.get("parallelism", 8)),
|
||||||
hash_len=32,
|
hash_len=32,
|
||||||
type=Type.ID,
|
type=Type.ID,
|
||||||
)
|
)
|
||||||
@@ -194,16 +209,10 @@ def derive_key_from_parent_seed(parent_seed: str, fingerprint: str = None) -> by
|
|||||||
|
|
||||||
|
|
||||||
def derive_index_key_seed_only(seed: str) -> bytes:
|
def derive_index_key_seed_only(seed: str) -> bytes:
|
||||||
"""Derive a deterministic Fernet key from only the BIP-39 seed."""
|
"""Derive the index encryption key using the v1 hierarchy."""
|
||||||
seed_bytes = Bip39SeedGenerator(seed).Generate()
|
seed_bytes = Bip39SeedGenerator(seed).Generate()
|
||||||
hkdf = HKDF(
|
master = kd(seed_bytes, b"seedpass:v1:master")
|
||||||
algorithm=hashes.SHA256(),
|
key = kd(master, b"seedpass:v1:storage")
|
||||||
length=32,
|
|
||||||
salt=None,
|
|
||||||
info=b"password-db",
|
|
||||||
backend=default_backend(),
|
|
||||||
)
|
|
||||||
key = hkdf.derive(seed_bytes)
|
|
||||||
return base64.urlsafe_b64encode(key)
|
return base64.urlsafe_b64encode(key)
|
||||||
|
|
||||||
|
|
||||||
@@ -212,23 +221,21 @@ def derive_index_key(seed: str) -> bytes:
|
|||||||
return derive_index_key_seed_only(seed)
|
return derive_index_key_seed_only(seed)
|
||||||
|
|
||||||
|
|
||||||
def derive_totp_secret(seed: str, index: int) -> str:
|
def derive_totp_secret(seed: Union[str, bytes], index: int) -> str:
|
||||||
"""Derive a base32-encoded TOTP secret from a BIP39 seed."""
|
"""Derive a base32-encoded TOTP secret from a seed or raw key."""
|
||||||
try:
|
try:
|
||||||
# Initialize BIP85 from the BIP39 seed bytes
|
if isinstance(seed, (bytes, bytearray)):
|
||||||
seed_bytes = Bip39SeedGenerator(seed).Generate()
|
seed_bytes = bytes(seed)
|
||||||
|
else:
|
||||||
|
seed_bytes = Bip39SeedGenerator(seed).Generate()
|
||||||
bip85 = BIP85(seed_bytes)
|
bip85 = BIP85(seed_bytes)
|
||||||
|
|
||||||
# Build the BIP32 path m/83696968'/39'/TOTP'/{index}'
|
|
||||||
totp_int = int.from_bytes(b"TOTP", "big")
|
totp_int = int.from_bytes(b"TOTP", "big")
|
||||||
path = f"m/83696968'/{TOTP_PURPOSE}'/{totp_int}'/{index}'"
|
path = f"m/83696968'/{TOTP_PURPOSE}'/{totp_int}'/{index}'"
|
||||||
|
|
||||||
# Derive entropy using the same scheme as BIP85
|
|
||||||
child_key = bip85.bip32_ctx.DerivePath(path)
|
child_key = bip85.bip32_ctx.DerivePath(path)
|
||||||
key_bytes = child_key.PrivateKey().Raw().ToBytes()
|
key_bytes = child_key.PrivateKey().Raw().ToBytes()
|
||||||
entropy = hmac.new(b"bip-entropy-from-k", key_bytes, hashlib.sha512).digest()
|
entropy = hmac.new(b"bip-entropy-from-k", key_bytes, hashlib.sha512).digest()
|
||||||
|
|
||||||
# Hash the first 32 bytes of entropy and encode the first 20 bytes
|
|
||||||
hashed = hashlib.sha256(entropy[:32]).digest()
|
hashed = hashlib.sha256(entropy[:32]).digest()
|
||||||
secret = base64.b32encode(hashed[:20]).decode("utf-8")
|
secret = base64.b32encode(hashed[:20]).decode("utf-8")
|
||||||
logger.debug(f"Derived TOTP secret for index {index}.")
|
logger.debug(f"Derived TOTP secret for index {index}.")
|
||||||
@@ -267,18 +274,16 @@ def calibrate_argon2_time_cost(
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
password = "benchmark"
|
password = "benchmark"
|
||||||
fingerprint = b"argon2-calibration"
|
salt = base64.b64encode(b"argon2-calibration").decode()
|
||||||
time_cost = 1
|
time_cost = 1
|
||||||
elapsed_ms = 0.0
|
elapsed_ms = 0.0
|
||||||
while time_cost <= max_time_cost:
|
while time_cost <= max_time_cost:
|
||||||
start = time.perf_counter()
|
start = time.perf_counter()
|
||||||
derive_key_from_password_argon2(
|
cfg = KdfConfig(
|
||||||
password,
|
params={"time_cost": time_cost, "memory_cost": 8, "parallelism": 1},
|
||||||
fingerprint,
|
salt_b64=salt,
|
||||||
time_cost=time_cost,
|
|
||||||
memory_cost=8,
|
|
||||||
parallelism=1,
|
|
||||||
)
|
)
|
||||||
|
derive_key_from_password_argon2(password, cfg)
|
||||||
elapsed_ms = (time.perf_counter() - start) * 1000
|
elapsed_ms = (time.perf_counter() - start) * 1000
|
||||||
if elapsed_ms >= target_ms:
|
if elapsed_ms >= target_ms:
|
||||||
break
|
break
|
||||||
|
28
src/utils/key_hierarchy.py
Normal file
28
src/utils/key_hierarchy.py
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
"""Key hierarchy helper functions."""
|
||||||
|
|
||||||
|
from cryptography.hazmat.primitives.kdf.hkdf import HKDF
|
||||||
|
from cryptography.hazmat.primitives import hashes
|
||||||
|
from cryptography.hazmat.backends import default_backend
|
||||||
|
|
||||||
|
|
||||||
|
def kd(root: bytes, info: bytes, length: int = 32) -> bytes:
|
||||||
|
"""Derive a sub-key from ``root`` using HKDF-SHA256.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
root:
|
||||||
|
Root key material.
|
||||||
|
info:
|
||||||
|
Domain separation string.
|
||||||
|
length:
|
||||||
|
Length of the derived key in bytes. Defaults to 32.
|
||||||
|
"""
|
||||||
|
|
||||||
|
hkdf = HKDF(
|
||||||
|
algorithm=hashes.SHA256(),
|
||||||
|
length=length,
|
||||||
|
salt=None,
|
||||||
|
info=info,
|
||||||
|
backend=default_backend(),
|
||||||
|
)
|
||||||
|
return hkdf.derive(root)
|
62
src/utils/logging_utils.py
Normal file
62
src/utils/logging_utils.py
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
from contextlib import contextmanager
|
||||||
|
from functools import wraps
|
||||||
|
|
||||||
|
_console_paused = False
|
||||||
|
|
||||||
|
|
||||||
|
class ConsolePauseFilter(logging.Filter):
|
||||||
|
"""Filter that blocks records when console logging is paused."""
|
||||||
|
|
||||||
|
def filter(
|
||||||
|
self, record: logging.LogRecord
|
||||||
|
) -> bool: # pragma: no cover - small utility
|
||||||
|
return not _console_paused
|
||||||
|
|
||||||
|
|
||||||
|
class ChecksumWarningFilter(logging.Filter):
|
||||||
|
"""Filter allowing only checksum warnings and errors to surface."""
|
||||||
|
|
||||||
|
def filter(
|
||||||
|
self, record: logging.LogRecord
|
||||||
|
) -> bool: # pragma: no cover - simple filter
|
||||||
|
if record.levelno >= logging.ERROR:
|
||||||
|
return True
|
||||||
|
return (
|
||||||
|
record.levelno == logging.WARNING
|
||||||
|
and Path(record.pathname).name == "checksum.py"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def pause_console_logging() -> None:
|
||||||
|
"""Temporarily pause logging to console handlers."""
|
||||||
|
global _console_paused
|
||||||
|
_console_paused = True
|
||||||
|
|
||||||
|
|
||||||
|
def resume_console_logging() -> None:
|
||||||
|
"""Resume logging to console handlers."""
|
||||||
|
global _console_paused
|
||||||
|
_console_paused = False
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def console_logging_paused() -> None:
|
||||||
|
"""Context manager to pause console logging within a block."""
|
||||||
|
pause_console_logging()
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
resume_console_logging()
|
||||||
|
|
||||||
|
|
||||||
|
def pause_logging_for_ui(func):
|
||||||
|
"""Decorator to pause console logging while ``func`` executes."""
|
||||||
|
|
||||||
|
@wraps(func)
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
with console_logging_paused():
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
|
||||||
|
return wrapper
|
@@ -3,9 +3,16 @@ from __future__ import annotations
|
|||||||
import os
|
import os
|
||||||
from cryptography.hazmat.primitives.ciphers.aead import AESGCM
|
from cryptography.hazmat.primitives.ciphers.aead import AESGCM
|
||||||
|
|
||||||
|
# TODO: Replace this Python implementation with a Rust/WASM module for
|
||||||
|
# critical cryptographic operations.
|
||||||
|
|
||||||
|
|
||||||
class InMemorySecret:
|
class InMemorySecret:
|
||||||
"""Store sensitive data encrypted in RAM using AES-GCM."""
|
"""Store sensitive data encrypted in RAM using AES-GCM.
|
||||||
|
|
||||||
|
Zeroization is best-effort only; Python's memory management may retain
|
||||||
|
copies of the plaintext.
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(self, data: bytes) -> None:
|
def __init__(self, data: bytes) -> None:
|
||||||
if not isinstance(data, (bytes, bytearray)):
|
if not isinstance(data, (bytes, bytearray)):
|
||||||
|
@@ -33,6 +33,12 @@ logger = logging.getLogger(__name__)
|
|||||||
DEFAULT_MAX_ATTEMPTS = 5
|
DEFAULT_MAX_ATTEMPTS = 5
|
||||||
|
|
||||||
|
|
||||||
|
def _env_password() -> str | None:
|
||||||
|
"""Return a password supplied via environment for non-interactive use."""
|
||||||
|
|
||||||
|
return os.getenv("SEEDPASS_TEST_PASSWORD") or os.getenv("SEEDPASS_PASSWORD")
|
||||||
|
|
||||||
|
|
||||||
def _get_max_attempts(override: int | None = None) -> int:
|
def _get_max_attempts(override: int | None = None) -> int:
|
||||||
"""Return the configured maximum number of prompt attempts."""
|
"""Return the configured maximum number of prompt attempts."""
|
||||||
|
|
||||||
@@ -80,6 +86,13 @@ def prompt_new_password(max_retries: int | None = None) -> str:
|
|||||||
Raises:
|
Raises:
|
||||||
PasswordPromptError: If the user fails to provide a valid password after multiple attempts.
|
PasswordPromptError: If the user fails to provide a valid password after multiple attempts.
|
||||||
"""
|
"""
|
||||||
|
env_pw = _env_password()
|
||||||
|
if env_pw:
|
||||||
|
normalized = unicodedata.normalize("NFKD", env_pw)
|
||||||
|
if len(normalized) < MIN_PASSWORD_LENGTH:
|
||||||
|
raise PasswordPromptError("Environment password too short")
|
||||||
|
return normalized
|
||||||
|
|
||||||
max_retries = _get_max_attempts(max_retries)
|
max_retries = _get_max_attempts(max_retries)
|
||||||
attempts = 0
|
attempts = 0
|
||||||
|
|
||||||
@@ -164,6 +177,10 @@ def prompt_existing_password(
|
|||||||
PasswordPromptError: If the user interrupts the operation or exceeds
|
PasswordPromptError: If the user interrupts the operation or exceeds
|
||||||
``max_retries`` attempts.
|
``max_retries`` attempts.
|
||||||
"""
|
"""
|
||||||
|
env_pw = _env_password()
|
||||||
|
if env_pw:
|
||||||
|
return unicodedata.normalize("NFKD", env_pw)
|
||||||
|
|
||||||
max_retries = _get_max_attempts(max_retries)
|
max_retries = _get_max_attempts(max_retries)
|
||||||
attempts = 0
|
attempts = 0
|
||||||
while max_retries == 0 or attempts < max_retries:
|
while max_retries == 0 or attempts < max_retries:
|
||||||
|
@@ -15,6 +15,7 @@ except ImportError: # pragma: no cover - POSIX only
|
|||||||
tty = None # type: ignore
|
tty = None # type: ignore
|
||||||
|
|
||||||
from utils.terminal_utils import clear_screen
|
from utils.terminal_utils import clear_screen
|
||||||
|
from utils.logging_utils import pause_console_logging, resume_console_logging
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_MAX_ATTEMPTS = 5
|
DEFAULT_MAX_ATTEMPTS = 5
|
||||||
@@ -58,6 +59,8 @@ def _masked_input_windows(prompt: str) -> str:
|
|||||||
buffer: list[str] = []
|
buffer: list[str] = []
|
||||||
while True:
|
while True:
|
||||||
ch = msvcrt.getwch()
|
ch = msvcrt.getwch()
|
||||||
|
if ch == "\x03":
|
||||||
|
raise KeyboardInterrupt
|
||||||
if ch in ("\r", "\n"):
|
if ch in ("\r", "\n"):
|
||||||
sys.stdout.write("\n")
|
sys.stdout.write("\n")
|
||||||
return "".join(buffer)
|
return "".join(buffer)
|
||||||
@@ -85,6 +88,8 @@ def _masked_input_posix(prompt: str) -> str:
|
|||||||
tty.setraw(fd)
|
tty.setraw(fd)
|
||||||
while True:
|
while True:
|
||||||
ch = sys.stdin.read(1)
|
ch = sys.stdin.read(1)
|
||||||
|
if ch == "\x03":
|
||||||
|
raise KeyboardInterrupt
|
||||||
if ch in ("\r", "\n"):
|
if ch in ("\r", "\n"):
|
||||||
sys.stdout.write("\n")
|
sys.stdout.write("\n")
|
||||||
return "".join(buffer)
|
return "".join(buffer)
|
||||||
@@ -102,9 +107,16 @@ def _masked_input_posix(prompt: str) -> str:
|
|||||||
|
|
||||||
def masked_input(prompt: str) -> str:
|
def masked_input(prompt: str) -> str:
|
||||||
"""Return input from the user while masking typed characters."""
|
"""Return input from the user while masking typed characters."""
|
||||||
if sys.platform == "win32":
|
func = _masked_input_windows if sys.platform == "win32" else _masked_input_posix
|
||||||
return _masked_input_windows(prompt)
|
pause_console_logging()
|
||||||
return _masked_input_posix(prompt)
|
try:
|
||||||
|
return func(prompt)
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
raise
|
||||||
|
except Exception: # pragma: no cover - fallback when TTY operations fail
|
||||||
|
return input(prompt)
|
||||||
|
finally:
|
||||||
|
resume_console_logging()
|
||||||
|
|
||||||
|
|
||||||
def prompt_seed_words(count: int = 12, *, max_attempts: int | None = None) -> str:
|
def prompt_seed_words(count: int = 12, *, max_attempts: int | None = None) -> str:
|
||||||
|
Reference in New Issue
Block a user