mirror of
https://github.com/PR0M3TH3AN/SeedPass.git
synced 2025-09-08 07:18:47 +00:00
Compare commits
66 Commits
alpha
...
cfb861b60a
Author | SHA1 | Date | |
---|---|---|---|
![]() |
cfb861b60a | ||
![]() |
ca533a3518 | ||
![]() |
a7da9b8971 | ||
![]() |
a0ae414765 | ||
![]() |
45c112b26b | ||
![]() |
4df6ff639e | ||
![]() |
108fcfcb04 | ||
![]() |
505cf1a950 | ||
![]() |
e701a1c1cb | ||
![]() |
cb9a068e40 | ||
![]() |
c13742f3f3 | ||
![]() |
6c8b1928b8 | ||
![]() |
b1b31eeb8a | ||
![]() |
492bfba3fb | ||
![]() |
b33565e7f3 | ||
![]() |
857b1ef0f9 | ||
![]() |
7a039171a0 | ||
![]() |
dd513cf964 | ||
![]() |
16de0a82c7 | ||
![]() |
d99af30d9f | ||
![]() |
da37ec2e61 | ||
![]() |
0315562d80 | ||
![]() |
e75e197270 | ||
![]() |
619226d336 | ||
![]() |
15df3f10a6 | ||
![]() |
b451097c65 | ||
![]() |
9cacd1b13d | ||
![]() |
b97d60778b | ||
![]() |
bbb26ca55a | ||
![]() |
d6e03d5e7a | ||
![]() |
26632c0e70 | ||
![]() |
06ca51993a | ||
![]() |
1b6b0ab5c5 | ||
![]() |
87999b1888 | ||
![]() |
6928b4ddbf | ||
![]() |
73183d53a5 | ||
![]() |
c9ad16f150 | ||
![]() |
bd86bdbb3a | ||
![]() |
8d5374ef5b | ||
![]() |
468608a369 | ||
![]() |
56e652089a | ||
![]() |
c353c04472 | ||
![]() |
2559920a14 | ||
![]() |
57935bdfc1 | ||
![]() |
55fdee522c | ||
![]() |
af4eb72385 | ||
![]() |
90c304ff6e | ||
![]() |
7b1ef2abe2 | ||
![]() |
5194adf145 | ||
![]() |
8f74ac27f4 | ||
![]() |
1232630dba | ||
![]() |
62983df69c | ||
![]() |
b4238791aa | ||
![]() |
d1fccbc4f2 | ||
![]() |
50532597b8 | ||
![]() |
bb733bb194 | ||
![]() |
785acf938c | ||
![]() |
4973095a5c | ||
![]() |
69f1619816 | ||
![]() |
e1b821bc55 | ||
![]() |
a21efa91db | ||
![]() |
5109f96ce7 | ||
![]() |
19577163cf | ||
![]() |
b0e4ab9bc6 | ||
![]() |
3ff3e4e1d6 | ||
![]() |
08c4453326 |
59
AGENTS.md
59
AGENTS.md
@@ -2,6 +2,60 @@
|
||||
|
||||
This project is written in **Python**. Follow these instructions when working with the code base.
|
||||
|
||||
## Installation Quickstart for AI Agents
|
||||
|
||||
### Prerequisites
|
||||
|
||||
Ensure the system has the required build tools and Python headers. Examples:
|
||||
|
||||
```bash
|
||||
# Ubuntu/Debian
|
||||
sudo apt update && sudo apt install -y \
|
||||
build-essential \
|
||||
libffi-dev \
|
||||
pkg-config \
|
||||
python3.11-dev \
|
||||
curl \
|
||||
git
|
||||
|
||||
# CentOS/RHEL
|
||||
sudo yum install -y gcc gcc-c++ libffi-devel pkgconfig python3-devel curl git
|
||||
|
||||
# macOS
|
||||
brew install python@3.11 libffi pkg-config git
|
||||
```
|
||||
|
||||
### Installation
|
||||
|
||||
Run the installer script to fetch the latest release:
|
||||
|
||||
```bash
|
||||
# Stable release
|
||||
bash -c "$(curl -sSL https://raw.githubusercontent.com/PR0M3TH3AN/SeedPass/main/scripts/install.sh)"
|
||||
|
||||
# Beta branch
|
||||
bash -c "$(curl -sSL https://raw.githubusercontent.com/PR0M3TH3AN/SeedPass/main/scripts/install.sh)" _ -b beta
|
||||
```
|
||||
|
||||
### Environment Layout
|
||||
|
||||
- Virtual environment: `~/.seedpass/app/venv/`
|
||||
- Entry point: `~/.seedpass/app/src/main.py`
|
||||
|
||||
### Verification
|
||||
|
||||
```bash
|
||||
cd ~/.seedpass/app && source venv/bin/activate
|
||||
cd src && python main.py --version # Expected: SeedPass v[version]
|
||||
```
|
||||
|
||||
### Running SeedPass
|
||||
|
||||
```bash
|
||||
cd ~/.seedpass/app && source venv/bin/activate
|
||||
cd src && python main.py
|
||||
```
|
||||
|
||||
## Running Tests
|
||||
|
||||
1. Set up a virtual environment and install dependencies:
|
||||
@@ -39,6 +93,11 @@ This project is written in **Python**. Follow these instructions when working wi
|
||||
|
||||
Following these practices helps keep the code base consistent and secure.
|
||||
|
||||
## Deterministic Artifact Generation
|
||||
|
||||
- All generated artifacts (passwords, keys, TOTP secrets, etc.) must be fully deterministic across runs and platforms.
|
||||
- Randomness is only permitted for security primitives (e.g., encryption nonces, in-memory keys) and must never influence derived artifacts.
|
||||
|
||||
## Legacy Index Migration
|
||||
|
||||
- Always provide a migration path for index archives and import/export routines.
|
||||
|
35
README.md
35
README.md
@@ -16,6 +16,10 @@ This software was not developed by an experienced security expert and should be
|
||||
|
||||
Recent releases derive passwords and other artifacts using a fully deterministic algorithm that behaves consistently across Python versions. This improvement means artifacts generated with earlier versions of SeedPass will not match those produced now. Regenerate any previously derived data or retain the old version if you need to reproduce older passwords or keys.
|
||||
|
||||
**⚠️ First Run Warning**
|
||||
|
||||
Use a dedicated BIP-39 seed phrase exclusively for SeedPass. Offline Mode is **ON by default**, keeping all Nostr syncing disabled until you explicitly opt in.
|
||||
|
||||
---
|
||||
### Supported OS
|
||||
|
||||
@@ -119,9 +123,9 @@ See `docs/ARCHITECTURE.md` and [Nostr Setup](docs/nostr_setup.md) for details.
|
||||
### Quick Installer
|
||||
|
||||
Use the automated installer to download SeedPass and its dependencies in one step.
|
||||
The scripts can also install the BeeWare backend for your platform when requested (use `-IncludeGui` on Windows).
|
||||
The scripts can also install the BeeWare backend for your platform when requested (`--mode gui` or `--mode both` on Linux/macOS, `-IncludeGui` on Windows).
|
||||
If the GTK `gi` bindings are missing, the installer attempts to install the
|
||||
necessary system packages using `apt`, `yum`, `pacman`, or Homebrew.
|
||||
necessary system packages using `apt`, `yum`, `pacman`, or Homebrew. When no display server is detected, GUI components are skipped automatically.
|
||||
|
||||
**Linux and macOS:**
|
||||
```bash
|
||||
@@ -132,6 +136,10 @@ bash -c "$(curl -sSL https://raw.githubusercontent.com/PR0M3TH3AN/SeedPass/main/
|
||||
bash -c "$(curl -sSL https://raw.githubusercontent.com/PR0M3TH3AN/SeedPass/main/scripts/install.sh)" _ -b beta
|
||||
```
|
||||
Make sure the command ends right after `-b beta` with **no trailing parenthesis**.
|
||||
*Install with GUI support:*
|
||||
```bash
|
||||
bash -c "$(curl -sSL https://raw.githubusercontent.com/PR0M3TH3AN/SeedPass/main/scripts/install.sh)" _ --mode gui
|
||||
```
|
||||
|
||||
**Windows (PowerShell):**
|
||||
```powershell
|
||||
@@ -146,6 +154,10 @@ The Windows installer will attempt to install Git automatically if it is not alr
|
||||
|
||||
**Note:** If this fallback fails, install Python 3.12 manually or install the [Microsoft Visual C++ Build Tools](https://visualstudio.microsoft.com/visual-cpp-build-tools/) and rerun the installer.
|
||||
|
||||
#### Installer Dependency Checks
|
||||
|
||||
The installer verifies that core build tooling—C/C++ build tools, Rust, CMake, and the imaging/GTK libraries—are available before completing. Use `--mode gui` to install only the graphical interface or `--mode both` to install both interfaces (default: `tui`). On Linux, ensure `xclip` or `wl-clipboard` is installed for clipboard support.
|
||||
|
||||
#### Windows Nostr Sync Troubleshooting
|
||||
|
||||
When backing up or restoring from Nostr on Windows, a few issues are common:
|
||||
@@ -274,10 +286,10 @@ You can then launch SeedPass and create a backup:
|
||||
seedpass
|
||||
|
||||
# Export your index
|
||||
seedpass export --file "~/seedpass_backup.json"
|
||||
seedpass vault export --file "~/seedpass_backup.json"
|
||||
|
||||
# Later you can restore it
|
||||
seedpass import --file "~/seedpass_backup.json"
|
||||
seedpass vault import --file "~/seedpass_backup.json"
|
||||
|
||||
# Quickly find or retrieve entries
|
||||
seedpass search "github"
|
||||
@@ -431,6 +443,16 @@ For a full list of commands see [docs/advanced_cli.md](docs/advanced_cli.md). Th
|
||||
```
|
||||
*(or `python src/main.py` when running directly from the repository)*
|
||||
|
||||
To restore a previously backed up index at launch, provide the backup path
|
||||
and fingerprint:
|
||||
|
||||
```bash
|
||||
seedpass --restore-backup /path/to/backup.json.enc --fingerprint <fp>
|
||||
```
|
||||
|
||||
Without the flag, the startup prompt offers a **Restore from backup** option
|
||||
before the vault is initialized.
|
||||
|
||||
2. **Follow the Prompts:**
|
||||
|
||||
- **Seed Profile Selection:** If you have existing seed profiles, you'll be prompted to select one or add a new one.
|
||||
@@ -616,6 +638,10 @@ initial setup. You must provide both your 12‑word master seed and the master
|
||||
password that encrypted the vault; without the correct password the retrieved
|
||||
data cannot be decrypted.
|
||||
|
||||
Alternatively, a local backup file can be loaded at startup. Launch the
|
||||
application with `--restore-backup <file> --fingerprint <fp>` or choose the
|
||||
**Restore from backup** option presented before the vault initializes.
|
||||
|
||||
1. Start SeedPass and choose option **4** when prompted to set up a seed.
|
||||
2. Paste your BIP‑85 seed phrase when asked.
|
||||
3. Enter the master password associated with that seed.
|
||||
@@ -767,6 +793,7 @@ You can also launch the GUI directly with `seedpass gui` or `seedpass-gui`.
|
||||
- **No PBKDF2 Salt Needed:** SeedPass deliberately omits an explicit PBKDF2 salt. Every password is derived from a unique 512-bit BIP-85 child seed, which already provides stronger per-password uniqueness than a conventional 128-bit salt.
|
||||
- **Checksum Verification:** Always verify the script's checksum to ensure its integrity and protect against unauthorized modifications.
|
||||
- **Potential Bugs and Limitations:** Be aware that the software may contain bugs and lacks certain features. Snapshot chunks are capped at 50 KB and the client rotates snapshots after enough delta events accumulate. The security of memory management and logs has not been thoroughly evaluated and may pose risks of leaking sensitive information.
|
||||
- **Best-Effort Memory Zeroization:** Sensitive data is wiped from memory when possible, but Python may retain copies of decrypted values.
|
||||
- **Multiple Seeds Management:** While managing multiple seeds adds flexibility, it also increases the responsibility to secure each seed and its associated password.
|
||||
- **No PBKDF2 Salt Required:** SeedPass deliberately omits an explicit PBKDF2 salt. Every password is derived from a unique 512-bit BIP-85 child seed, which already provides stronger per-password uniqueness than a conventional 128-bit salt.
|
||||
- **Default KDF Iterations:** New profiles start with 50,000 PBKDF2 iterations. Adjust this with `seedpass config set kdf_iterations`.
|
||||
|
44
docs/SPEC.md
Normal file
44
docs/SPEC.md
Normal file
@@ -0,0 +1,44 @@
|
||||
# SeedPass Specification
|
||||
|
||||
## Key Hierarchy
|
||||
|
||||
SeedPass derives a hierarchy of keys from a single BIP-39 parent seed using HKDF:
|
||||
|
||||
- **Master Key** – `HKDF(seed, "seedpass:v1:master")`
|
||||
- **KEY_STORAGE** – used to encrypt vault data.
|
||||
- **KEY_INDEX** – protects the metadata index.
|
||||
- **KEY_PW_DERIVE** – deterministic password generation.
|
||||
- **KEY_TOTP_DET** – deterministic TOTP secrets.
|
||||
|
||||
Each context string keeps derived keys domain separated.
|
||||
|
||||
## KDF Parameters
|
||||
|
||||
Passwords are protected with **PBKDF2-HMAC-SHA256**. The default work factor is
|
||||
**50,000 iterations** but may be adjusted via the settings slider. The config
|
||||
stores a `KdfConfig` structure with the chosen iteration count, algorithm name,
|
||||
and the current spec version (`CURRENT_KDF_VERSION = 1`). Argon2 is available
|
||||
with a default `time_cost` of 2 when selected.
|
||||
|
||||
## Message Formats
|
||||
|
||||
SeedPass synchronizes profiles over Nostr using three event kinds:
|
||||
|
||||
- **Manifest (`30070`)** – high level snapshot description and current version.
|
||||
- **Snapshot Chunk (`30071`)** – compressed, encrypted portions of the vault.
|
||||
- **Delta (`30072`)** – incremental changes since the last snapshot.
|
||||
|
||||
Events encode JSON and include tags for checksums, fingerprints, and timestamps.
|
||||
|
||||
## Versioning
|
||||
|
||||
Configuration and KDF schemas are versioned so clients can migrate older
|
||||
profiles. Nostr events carry a version field in the manifest, and the software
|
||||
follows semantic versioning for releases.
|
||||
|
||||
## Memory Protection
|
||||
|
||||
SeedPass encrypts sensitive values in memory and attempts to wipe them when no
|
||||
longer needed. This zeroization is best-effort only; Python's memory management
|
||||
may retain copies of decrypted data. Critical cryptographic operations may move
|
||||
to a Rust/WASM module in the future to provide stronger guarantees.
|
@@ -127,7 +127,7 @@ Run or stop the local HTTP API.
|
||||
| Action | Command | Examples |
|
||||
| :--- | :--- | :--- |
|
||||
| Start the API | `api start` | `seedpass api start --host 0.0.0.0 --port 8000` |
|
||||
| Stop the API | `api stop` | `seedpass api stop` |
|
||||
| Stop the API | `api stop --token TOKEN` | `seedpass api stop --token <token>` |
|
||||
|
||||
---
|
||||
|
||||
@@ -214,7 +214,7 @@ Set the `SEEDPASS_CORS_ORIGINS` environment variable to a comma‑separated list
|
||||
SEEDPASS_CORS_ORIGINS=http://localhost:3000 seedpass api start
|
||||
```
|
||||
|
||||
Shut down the server with `seedpass api stop`.
|
||||
Shut down the server with `seedpass api stop --token <token>`.
|
||||
|
||||
---
|
||||
|
||||
|
@@ -120,6 +120,11 @@ isn't on your PATH. If these tools are unavailable you'll see a link to download
|
||||
the installer now attempts to download Python 3.12 automatically so you don't have to compile packages from source.
|
||||
|
||||
**Note:** If this fallback fails, install Python 3.12 manually or install the [Microsoft Visual C++ Build Tools](https://visualstudio.microsoft.com/visual-cpp-build-tools/) and rerun the installer.
|
||||
|
||||
#### Installer Dependency Checks
|
||||
|
||||
The installer verifies that core build tooling—C/C++ build tools, Rust, CMake, and the imaging/GTK libraries—are available before completing. Pass `--no-gui` to skip installing GUI packages. On Linux, ensure `xclip` or `wl-clipboard` is installed for clipboard support.
|
||||
|
||||
### Uninstall
|
||||
|
||||
Run the matching uninstaller if you need to remove a previous installation or clean up an old `seedpass` command:
|
||||
|
@@ -43,6 +43,7 @@ from seedpass.core.vault import Vault
|
||||
from seedpass.core.config_manager import ConfigManager
|
||||
from seedpass.core.backup import BackupManager
|
||||
from seedpass.core.entry_management import EntryManager
|
||||
from seedpass.core.state_manager import StateManager
|
||||
from nostr.client import NostrClient
|
||||
from utils.fingerprint import generate_fingerprint
|
||||
from utils.fingerprint_manager import FingerprintManager
|
||||
@@ -195,11 +196,13 @@ def main() -> None:
|
||||
|
||||
encrypted = entry_mgr.vault.get_encrypted_index()
|
||||
if encrypted:
|
||||
idx = StateManager(dir_path).state.get("nostr_account_idx", 0)
|
||||
client = NostrClient(
|
||||
entry_mgr.vault.encryption_manager,
|
||||
fingerprint or dir_path.name,
|
||||
parent_seed=seed,
|
||||
config_manager=cfg_mgr,
|
||||
account_index=idx,
|
||||
)
|
||||
asyncio.run(client.publish_snapshot(encrypted))
|
||||
print("[+] Data synchronized to Nostr.")
|
||||
|
@@ -5,7 +5,9 @@
|
||||
# Supports installing from a specific branch using the -b or --branch flag.
|
||||
# Example: ./install.sh -b beta
|
||||
|
||||
set -e
|
||||
set -euo pipefail
|
||||
IFS=$'\n\t'
|
||||
trap 'echo "[ERROR] Line $LINENO failed"; exit 1' ERR
|
||||
|
||||
# --- Configuration ---
|
||||
REPO_URL="https://github.com/PR0M3TH3AN/SeedPass.git"
|
||||
@@ -15,13 +17,14 @@ VENV_DIR="$INSTALL_DIR/venv"
|
||||
LAUNCHER_DIR="$HOME/.local/bin"
|
||||
LAUNCHER_PATH="$LAUNCHER_DIR/seedpass"
|
||||
BRANCH="main" # Default branch
|
||||
MODE="tui"
|
||||
INSTALL_GUI=false
|
||||
|
||||
# --- Helper Functions ---
|
||||
print_info() { echo -e "\033[1;34m[INFO]\033[0m $1"; }
|
||||
print_success() { echo -e "\033[1;32m[SUCCESS]\033[0m $1"; }
|
||||
print_warning() { echo -e "\033[1;33m[WARNING]\033[0m $1"; }
|
||||
print_error() { echo -e "\033[1;31m[ERROR]\033[0m $1" >&2; exit 1; }
|
||||
print_info() { echo -e "\033[1;34m[INFO]\033[0m" "$1"; }
|
||||
print_success() { echo -e "\033[1;32m[SUCCESS]\033[0m" "$1"; }
|
||||
print_warning() { echo -e "\033[1;33m[WARNING]\033[0m" "$1"; }
|
||||
print_error() { echo -e "\033[1;31m[ERROR]\033[0m" "$1" >&2; exit 1; }
|
||||
|
||||
# Install build dependencies for Gtk/GObject if available via the system package manager
|
||||
install_dependencies() {
|
||||
@@ -30,29 +33,36 @@ install_dependencies() {
|
||||
sudo apt-get update && sudo apt-get install -y \\
|
||||
build-essential pkg-config libcairo2 libcairo2-dev \\
|
||||
libgirepository1.0-dev gobject-introspection \\
|
||||
gir1.2-gtk-3.0 python3-dev libffi-dev libssl-dev
|
||||
gir1.2-gtk-3.0 libgtk-3-dev python3-dev libffi-dev libssl-dev \\
|
||||
cmake rustc cargo zlib1g-dev libjpeg-dev libpng-dev \\
|
||||
libfreetype6-dev xclip wl-clipboard
|
||||
elif command -v yum &>/dev/null; then
|
||||
sudo yum install -y @'Development Tools' cairo cairo-devel \\
|
||||
gobject-introspection-devel gtk3-devel python3-devel \\
|
||||
libffi-devel openssl-devel
|
||||
libffi-devel openssl-devel cmake rust cargo zlib-devel \\
|
||||
libjpeg-turbo-devel libpng-devel freetype-devel xclip \\
|
||||
wl-clipboard
|
||||
elif command -v dnf &>/dev/null; then
|
||||
sudo dnf groupinstall -y "Development Tools" && sudo dnf install -y \\
|
||||
cairo cairo-devel gobject-introspection-devel gtk3-devel \\
|
||||
python3-devel libffi-devel openssl-devel
|
||||
python3-devel libffi-devel openssl-devel cmake rust cargo \\
|
||||
zlib-devel libjpeg-turbo-devel libpng-devel freetype-devel \\
|
||||
xclip wl-clipboard
|
||||
elif command -v pacman &>/dev/null; then
|
||||
sudo pacman -Syu --noconfirm base-devel pkgconf cairo \\
|
||||
gobject-introspection gtk3 python
|
||||
sudo pacman -Syu --noconfirm base-devel pkgconf cmake rustup \\
|
||||
gtk3 gobject-introspection cairo libjpeg-turbo zlib \\
|
||||
libpng freetype xclip wl-clipboard && rustup default stable
|
||||
elif command -v brew &>/dev/null; then
|
||||
brew install pkg-config cairo gobject-introspection gtk+3
|
||||
brew install pkg-config cairo gobject-introspection gtk+3 cmake rustup-init && \\
|
||||
rustup-init -y
|
||||
else
|
||||
print_warning "Unsupported package manager. Please install Gtk/GObject dependencies manually."
|
||||
fi
|
||||
print_warning "Install 'xclip' manually to enable clipboard features in secret mode."
|
||||
}
|
||||
usage() {
|
||||
echo "Usage: $0 [-b | --branch <branch_name>] [--with-gui] [-h | --help]"
|
||||
echo "Usage: $0 [-b | --branch <branch_name>] [-m | --mode <tui|gui|both>] [-h | --help]"
|
||||
echo " -b, --branch Specify the git branch to install (default: main)"
|
||||
echo " --with-gui Include graphical interface dependencies"
|
||||
echo " -m, --mode Installation mode: tui, gui, both (default: tui)"
|
||||
echo " -h, --help Display this help message"
|
||||
exit 0
|
||||
}
|
||||
@@ -73,9 +83,13 @@ main() {
|
||||
-h|--help)
|
||||
usage
|
||||
;;
|
||||
--with-gui)
|
||||
INSTALL_GUI=true
|
||||
shift
|
||||
-m|--mode)
|
||||
if [ -n "$2" ]; then
|
||||
MODE="$2"
|
||||
shift 2
|
||||
else
|
||||
print_error "Error: --mode requires an argument (tui|gui|both)."
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
print_error "Unknown parameter passed: $1"; usage
|
||||
@@ -83,6 +97,26 @@ main() {
|
||||
esac
|
||||
done
|
||||
|
||||
case "$MODE" in
|
||||
tui|gui|both) ;;
|
||||
*)
|
||||
print_error "Invalid mode: $MODE. Use 'tui', 'gui', or 'both'."
|
||||
;;
|
||||
esac
|
||||
|
||||
DISPLAY_DETECTED=false
|
||||
if [ -n "${DISPLAY:-}" ] || [ -n "${WAYLAND_DISPLAY:-}" ]; then
|
||||
DISPLAY_DETECTED=true
|
||||
fi
|
||||
|
||||
if [[ "$MODE" == "gui" || "$MODE" == "both" ]]; then
|
||||
if [ "$DISPLAY_DETECTED" = true ]; then
|
||||
INSTALL_GUI=true
|
||||
else
|
||||
print_warning "No display detected. Skipping GUI installation."
|
||||
fi
|
||||
fi
|
||||
|
||||
# 1. Detect OS
|
||||
OS_NAME=$(uname -s)
|
||||
print_info "Installing SeedPass from branch: '$BRANCH'"
|
||||
@@ -179,6 +213,11 @@ main() {
|
||||
else
|
||||
pip install -e .
|
||||
fi
|
||||
|
||||
if ! "$VENV_DIR/bin/python" -c "import seedpass.cli; print('ok')"; then
|
||||
print_error "SeedPass CLI import check failed."
|
||||
fi
|
||||
|
||||
deactivate
|
||||
|
||||
# 7. Create launcher script
|
||||
|
@@ -6,10 +6,10 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
from .bip85 import BIP85
|
||||
except Exception as exc:
|
||||
logger.error("Failed to import BIP85 module: %s", exc, exc_info=True)
|
||||
raise ImportError(
|
||||
"BIP85 dependencies are missing. Install 'bip_utils', 'cryptography', and 'colorama'."
|
||||
) from exc
|
||||
|
||||
logger.info("BIP85 module imported successfully.")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to import BIP85 module: {e}", exc_info=True)
|
||||
BIP85 = None
|
||||
|
||||
__all__ = ["BIP85"] if BIP85 is not None else []
|
||||
__all__ = ["BIP85"]
|
||||
|
@@ -18,6 +18,8 @@ import hashlib
|
||||
import hmac
|
||||
import logging
|
||||
import os
|
||||
from typing import Union
|
||||
|
||||
from colorama import Fore
|
||||
|
||||
from bip_utils import Bip32Slip10Secp256k1, Bip39MnemonicGenerator, Bip39Languages
|
||||
@@ -37,13 +39,19 @@ class Bip85Error(Exception):
|
||||
|
||||
|
||||
class BIP85:
|
||||
def __init__(self, seed_bytes: bytes | str):
|
||||
"""Initialize from BIP39 seed bytes or BIP32 xprv string."""
|
||||
def __init__(self, seed_or_xprv: Union[bytes, str]):
|
||||
"""Initialize from seed bytes or an ``xprv`` string.
|
||||
|
||||
Parameters:
|
||||
seed_or_xprv (Union[bytes, str]): Either raw BIP39 seed bytes
|
||||
or a BIP32 extended private key (``xprv``) string.
|
||||
"""
|
||||
|
||||
try:
|
||||
if isinstance(seed_bytes, (bytes, bytearray)):
|
||||
self.bip32_ctx = Bip32Slip10Secp256k1.FromSeed(seed_bytes)
|
||||
if isinstance(seed_or_xprv, (bytes, bytearray)):
|
||||
self.bip32_ctx = Bip32Slip10Secp256k1.FromSeed(seed_or_xprv)
|
||||
else:
|
||||
self.bip32_ctx = Bip32Slip10Secp256k1.FromExtendedKey(seed_bytes)
|
||||
self.bip32_ctx = Bip32Slip10Secp256k1.FromExtendedKey(seed_or_xprv)
|
||||
logging.debug("BIP32 context initialized successfully.")
|
||||
except Exception as e:
|
||||
logging.error(f"Error initializing BIP32 context: {e}", exc_info=True)
|
||||
@@ -51,26 +59,34 @@ class BIP85:
|
||||
raise Bip85Error(f"Error initializing BIP32 context: {e}")
|
||||
|
||||
def derive_entropy(
|
||||
self, index: int, bytes_len: int, app_no: int = 39, words_len: int | None = None
|
||||
self,
|
||||
index: int,
|
||||
entropy_bytes: int,
|
||||
app_no: int = 39,
|
||||
word_count: int | None = None,
|
||||
) -> bytes:
|
||||
"""
|
||||
Derives entropy using BIP-85 HMAC-SHA512 method.
|
||||
"""Derive entropy using the BIP-85 HMAC-SHA512 method.
|
||||
|
||||
Parameters:
|
||||
index (int): Index for the child entropy.
|
||||
bytes_len (int): Number of bytes to derive for the entropy.
|
||||
app_no (int): Application number (default 39 for BIP39)
|
||||
entropy_bytes (int): Number of bytes of entropy to derive.
|
||||
app_no (int): Application number (default 39 for BIP39).
|
||||
word_count (int | None): Number of words used in the derivation path
|
||||
for BIP39. If ``None`` and ``app_no`` is ``39``, ``word_count``
|
||||
defaults to ``entropy_bytes``. The final segment of the
|
||||
derivation path becomes ``m/83696968'/39'/0'/word_count'/index'``.
|
||||
|
||||
Returns:
|
||||
bytes: Derived entropy.
|
||||
bytes: Derived entropy of length ``entropy_bytes``.
|
||||
|
||||
Raises:
|
||||
SystemExit: If derivation fails or entropy length is invalid.
|
||||
SystemExit: If derivation fails or the derived entropy length is
|
||||
invalid.
|
||||
"""
|
||||
if app_no == 39:
|
||||
if words_len is None:
|
||||
words_len = bytes_len
|
||||
path = f"m/83696968'/{app_no}'/0'/{words_len}'/{index}'"
|
||||
if word_count is None:
|
||||
word_count = entropy_bytes
|
||||
path = f"m/83696968'/{app_no}'/0'/{word_count}'/{index}'"
|
||||
elif app_no == 32:
|
||||
path = f"m/83696968'/{app_no}'/{index}'"
|
||||
else:
|
||||
@@ -86,17 +102,17 @@ class BIP85:
|
||||
hmac_result = hmac.new(hmac_key, k, hashlib.sha512).digest()
|
||||
logging.debug(f"HMAC-SHA512 result: {hmac_result.hex()}")
|
||||
|
||||
entropy = hmac_result[:bytes_len]
|
||||
entropy = hmac_result[:entropy_bytes]
|
||||
|
||||
if len(entropy) != bytes_len:
|
||||
if len(entropy) != entropy_bytes:
|
||||
logging.error(
|
||||
f"Derived entropy length is {len(entropy)} bytes; expected {bytes_len} bytes."
|
||||
f"Derived entropy length is {len(entropy)} bytes; expected {entropy_bytes} bytes."
|
||||
)
|
||||
print(
|
||||
f"{Fore.RED}Error: Derived entropy length is {len(entropy)} bytes; expected {bytes_len} bytes."
|
||||
f"{Fore.RED}Error: Derived entropy length is {len(entropy)} bytes; expected {entropy_bytes} bytes."
|
||||
)
|
||||
raise Bip85Error(
|
||||
f"Derived entropy length is {len(entropy)} bytes; expected {bytes_len} bytes."
|
||||
f"Derived entropy length is {len(entropy)} bytes; expected {entropy_bytes} bytes."
|
||||
)
|
||||
|
||||
logging.debug(f"Derived entropy: {entropy.hex()}")
|
||||
@@ -107,14 +123,17 @@ class BIP85:
|
||||
raise Bip85Error(f"Error deriving entropy: {e}")
|
||||
|
||||
def derive_mnemonic(self, index: int, words_num: int) -> str:
|
||||
bytes_len = {12: 16, 18: 24, 24: 32}.get(words_num)
|
||||
if not bytes_len:
|
||||
entropy_bytes = {12: 16, 18: 24, 24: 32}.get(words_num)
|
||||
if not entropy_bytes:
|
||||
logging.error(f"Unsupported number of words: {words_num}")
|
||||
print(f"{Fore.RED}Error: Unsupported number of words: {words_num}")
|
||||
raise Bip85Error(f"Unsupported number of words: {words_num}")
|
||||
|
||||
entropy = self.derive_entropy(
|
||||
index=index, bytes_len=bytes_len, app_no=39, words_len=words_num
|
||||
index=index,
|
||||
entropy_bytes=entropy_bytes,
|
||||
app_no=39,
|
||||
word_count=words_num,
|
||||
)
|
||||
try:
|
||||
mnemonic = Bip39MnemonicGenerator(Bip39Languages.ENGLISH).FromEntropy(
|
||||
@@ -130,7 +149,7 @@ class BIP85:
|
||||
def derive_symmetric_key(self, index: int = 0, app_no: int = 2) -> bytes:
|
||||
"""Derive 32 bytes of entropy for symmetric key usage."""
|
||||
try:
|
||||
key = self.derive_entropy(index=index, bytes_len=32, app_no=app_no)
|
||||
key = self.derive_entropy(index=index, entropy_bytes=32, app_no=app_no)
|
||||
logging.debug(f"Derived symmetric key: {key.hex()}")
|
||||
return key
|
||||
except Exception as e:
|
||||
|
112
src/main.py
112
src/main.py
@@ -19,7 +19,7 @@ from termcolor import colored
|
||||
from utils.color_scheme import color_text
|
||||
import importlib
|
||||
|
||||
from seedpass.core.manager import PasswordManager
|
||||
from seedpass.core.manager import PasswordManager, restore_backup_index
|
||||
from nostr.client import NostrClient
|
||||
from seedpass.core.entry_types import EntryType
|
||||
from seedpass.core.config_manager import ConfigManager
|
||||
@@ -670,33 +670,49 @@ def handle_set_inactivity_timeout(password_manager: PasswordManager) -> None:
|
||||
|
||||
|
||||
def handle_set_kdf_iterations(password_manager: PasswordManager) -> None:
|
||||
"""Change the PBKDF2 iteration count."""
|
||||
"""Interactive slider for PBKDF2 iteration strength with benchmarking."""
|
||||
import hashlib
|
||||
import time
|
||||
|
||||
cfg_mgr = password_manager.config_manager
|
||||
if cfg_mgr is None:
|
||||
print(colored("Configuration manager unavailable.", "red"))
|
||||
return
|
||||
levels = [
|
||||
("1", "Very Fast", 10_000),
|
||||
("2", "Fast", 50_000),
|
||||
("3", "Balanced", 100_000),
|
||||
("4", "Slow", 200_000),
|
||||
("5", "Paranoid", 500_000),
|
||||
]
|
||||
try:
|
||||
current = cfg_mgr.get_kdf_iterations()
|
||||
print(colored(f"Current iterations: {current}", "cyan"))
|
||||
except Exception as e:
|
||||
logging.error(f"Error loading iterations: {e}")
|
||||
print(colored(f"Error: {e}", "red"))
|
||||
return
|
||||
value = input("Enter new iteration count: ").strip()
|
||||
if not value:
|
||||
print(colored("No iteration count entered.", "yellow"))
|
||||
print(colored(f"Current iterations: {current}", "cyan"))
|
||||
for key, label, iters in levels:
|
||||
marker = "*" if iters == current else " "
|
||||
print(colored(f"{key}. {label} ({iters}) {marker}", "menu"))
|
||||
print(colored("b. Benchmark current setting", "menu"))
|
||||
choice = input("Select strength or 'b' to benchmark: ").strip().lower()
|
||||
if not choice:
|
||||
print(colored("No change made.", "yellow"))
|
||||
return
|
||||
if choice == "b":
|
||||
start = time.perf_counter()
|
||||
hashlib.pbkdf2_hmac("sha256", b"bench", b"salt", current)
|
||||
elapsed = time.perf_counter() - start
|
||||
print(colored(f"{current} iterations took {elapsed:.2f}s", "green"))
|
||||
return
|
||||
selected = {k: v for k, _, v in levels}.get(choice)
|
||||
if not selected:
|
||||
print(colored("Invalid choice.", "red"))
|
||||
return
|
||||
try:
|
||||
iterations = int(value)
|
||||
if iterations <= 0:
|
||||
print(colored("Iterations must be positive.", "red"))
|
||||
return
|
||||
except ValueError:
|
||||
print(colored("Invalid number.", "red"))
|
||||
return
|
||||
try:
|
||||
cfg_mgr.set_kdf_iterations(iterations)
|
||||
print(colored("KDF iteration count updated.", "green"))
|
||||
cfg_mgr.set_kdf_iterations(selected)
|
||||
print(colored(f"KDF iteration count set to {selected}.", "green"))
|
||||
except Exception as e:
|
||||
logging.error(f"Error saving iterations: {e}")
|
||||
print(colored(f"Error: {e}", "red"))
|
||||
@@ -1014,12 +1030,12 @@ def handle_settings(password_manager: PasswordManager) -> None:
|
||||
print(color_text("8. Import database", "menu"))
|
||||
print(color_text("9. Export 2FA codes", "menu"))
|
||||
print(color_text("10. Set additional backup location", "menu"))
|
||||
print(color_text("11. Set KDF iterations", "menu"))
|
||||
print(color_text("11. KDF strength & benchmark", "menu"))
|
||||
print(color_text("12. Set inactivity timeout", "menu"))
|
||||
print(color_text("13. Lock Vault", "menu"))
|
||||
print(color_text("14. Stats", "menu"))
|
||||
print(color_text("15. Toggle Secret Mode", "menu"))
|
||||
print(color_text("16. Toggle Offline Mode", "menu"))
|
||||
print(color_text("16. Toggle Offline Mode (default ON)", "menu"))
|
||||
print(color_text("17. Toggle Quick Unlock", "menu"))
|
||||
choice = input("Select an option or press Enter to go back: ").strip()
|
||||
if choice == "1":
|
||||
@@ -1285,11 +1301,20 @@ def main(argv: list[str] | None = None, *, fingerprint: str | None = None) -> in
|
||||
load_global_config()
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--fingerprint")
|
||||
parser.add_argument(
|
||||
"--restore-backup",
|
||||
help="Restore index from backup file before starting",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-clipboard",
|
||||
action="store_true",
|
||||
help="Disable clipboard support and print secrets",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--deterministic-totp",
|
||||
action="store_true",
|
||||
help="Derive TOTP secrets deterministically",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--max-prompt-attempts",
|
||||
type=int,
|
||||
@@ -1300,6 +1325,11 @@ def main(argv: list[str] | None = None, *, fingerprint: str | None = None) -> in
|
||||
|
||||
exp = sub.add_parser("export")
|
||||
exp.add_argument("--file")
|
||||
exp.add_argument(
|
||||
"--unencrypted",
|
||||
action="store_true",
|
||||
help="Export without encryption",
|
||||
)
|
||||
|
||||
imp = sub.add_parser("import")
|
||||
imp.add_argument("--file")
|
||||
@@ -1315,6 +1345,41 @@ def main(argv: list[str] | None = None, *, fingerprint: str | None = None) -> in
|
||||
|
||||
args = parser.parse_args(argv)
|
||||
|
||||
if args.restore_backup:
|
||||
fp_target = args.fingerprint or fingerprint
|
||||
if fp_target is None:
|
||||
print(
|
||||
colored(
|
||||
"Error: --fingerprint is required when using --restore-backup.",
|
||||
"red",
|
||||
)
|
||||
)
|
||||
return 1
|
||||
try:
|
||||
restore_backup_index(Path(args.restore_backup), fp_target)
|
||||
logger.info("Restored backup from %s", args.restore_backup)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to restore backup: {e}", exc_info=True)
|
||||
print(colored(f"Error: Failed to restore backup: {e}", "red"))
|
||||
return 1
|
||||
elif args.command is None:
|
||||
print("Startup Options:")
|
||||
print("1. Continue")
|
||||
print("2. Restore from backup")
|
||||
choice = input("Select an option: ").strip()
|
||||
if choice == "2":
|
||||
path = input("Enter backup file path: ").strip()
|
||||
fp_target = args.fingerprint or fingerprint
|
||||
if fp_target is None:
|
||||
fp_target = input("Enter fingerprint for restore: ").strip()
|
||||
try:
|
||||
restore_backup_index(Path(path), fp_target)
|
||||
logger.info("Restored backup from %s", path)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to restore backup: {e}", exc_info=True)
|
||||
print(colored(f"Error: Failed to restore backup: {e}", "red"))
|
||||
return 1
|
||||
|
||||
if args.max_prompt_attempts is not None:
|
||||
os.environ["SEEDPASS_MAX_PROMPT_ATTEMPTS"] = str(args.max_prompt_attempts)
|
||||
|
||||
@@ -1332,9 +1397,13 @@ def main(argv: list[str] | None = None, *, fingerprint: str | None = None) -> in
|
||||
|
||||
if args.no_clipboard:
|
||||
password_manager.secret_mode_enabled = False
|
||||
if args.deterministic_totp:
|
||||
password_manager.deterministic_totp = True
|
||||
|
||||
if args.command == "export":
|
||||
password_manager.handle_export_database(Path(args.file))
|
||||
password_manager.handle_export_database(
|
||||
Path(args.file), encrypt=not args.unencrypted
|
||||
)
|
||||
return 0
|
||||
if args.command == "import":
|
||||
password_manager.handle_import_database(Path(args.file))
|
||||
@@ -1376,9 +1445,10 @@ def main(argv: list[str] | None = None, *, fingerprint: str | None = None) -> in
|
||||
if entry.get("type") != EntryType.TOTP.value:
|
||||
print(colored("Entry is not a TOTP entry.", "red"))
|
||||
return 1
|
||||
code = password_manager.entry_manager.get_totp_code(
|
||||
idx, password_manager.parent_seed
|
||||
key = getattr(password_manager, "KEY_TOTP_DET", None) or getattr(
|
||||
password_manager, "parent_seed", None
|
||||
)
|
||||
code = password_manager.entry_manager.get_totp_code(idx, key)
|
||||
print(code)
|
||||
try:
|
||||
if copy_to_clipboard(code, password_manager.clipboard_clear_delay):
|
||||
|
@@ -25,3 +25,4 @@ class Manifest:
|
||||
algo: str
|
||||
chunks: List[ChunkMeta]
|
||||
delta_since: Optional[int] = None
|
||||
nonce: Optional[str] = None
|
||||
|
@@ -33,7 +33,7 @@ from .backup_models import (
|
||||
)
|
||||
from .connection import ConnectionHandler, DEFAULT_RELAYS
|
||||
from .key_manager import KeyManager as SeedPassKeyManager
|
||||
from .snapshot import MANIFEST_ID_PREFIX, SnapshotHandler, prepare_snapshot
|
||||
from .snapshot import SnapshotHandler, prepare_snapshot
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover - imported for type hints
|
||||
from seedpass.core.config_manager import ConfigManager
|
||||
@@ -57,6 +57,8 @@ class NostrClient(ConnectionHandler, SnapshotHandler):
|
||||
parent_seed: Optional[str] = None,
|
||||
offline_mode: bool = False,
|
||||
config_manager: Optional["ConfigManager"] = None,
|
||||
key_index: bytes | None = None,
|
||||
account_index: int | None = None,
|
||||
) -> None:
|
||||
self.encryption_manager = encryption_manager
|
||||
self.fingerprint = fingerprint
|
||||
@@ -68,7 +70,7 @@ class NostrClient(ConnectionHandler, SnapshotHandler):
|
||||
parent_seed = self.encryption_manager.decrypt_parent_seed()
|
||||
|
||||
# Use our project's KeyManager to derive the private key
|
||||
self.key_manager = KeyManager(parent_seed, fingerprint)
|
||||
self.key_manager = KeyManager(parent_seed, fingerprint, account_index)
|
||||
|
||||
# Create a nostr-sdk Keys object from our derived private key
|
||||
private_key_hex = self.key_manager.keys.private_key_hex()
|
||||
@@ -99,6 +101,7 @@ class NostrClient(ConnectionHandler, SnapshotHandler):
|
||||
self.current_manifest: Manifest | None = None
|
||||
self.current_manifest_id: str | None = None
|
||||
self._delta_events: list[str] = []
|
||||
self.key_index = key_index or b""
|
||||
|
||||
# Configure and initialize the nostr-sdk Client
|
||||
signer = NostrSigner.keys(self.keys)
|
||||
@@ -111,5 +114,4 @@ __all__ = [
|
||||
"NostrClient",
|
||||
"prepare_snapshot",
|
||||
"DEFAULT_RELAYS",
|
||||
"MANIFEST_ID_PREFIX",
|
||||
]
|
||||
|
@@ -16,17 +16,22 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class KeyManager:
|
||||
"""
|
||||
Manages key generation, encoding, and derivation for NostrClient.
|
||||
"""
|
||||
"""Manages key generation, encoding, and derivation for ``NostrClient``."""
|
||||
|
||||
def __init__(self, parent_seed: str, fingerprint: str):
|
||||
"""
|
||||
Initializes the KeyManager with the provided parent_seed and fingerprint.
|
||||
def __init__(
|
||||
self, parent_seed: str, fingerprint: str, account_index: int | None = None
|
||||
):
|
||||
"""Initialize the key manager.
|
||||
|
||||
Parameters:
|
||||
parent_seed (str): The parent seed used for key derivation.
|
||||
fingerprint (str): The fingerprint to differentiate key derivations.
|
||||
Parameters
|
||||
----------
|
||||
parent_seed:
|
||||
The BIP-39 seed used as the root for derivations.
|
||||
fingerprint:
|
||||
Seed profile fingerprint used for legacy derivations and logging.
|
||||
account_index:
|
||||
Optional explicit index for BIP-85 Nostr key derivation. When ``None``
|
||||
the index defaults to ``0``.
|
||||
"""
|
||||
try:
|
||||
if not isinstance(parent_seed, str):
|
||||
@@ -40,12 +45,15 @@ class KeyManager:
|
||||
|
||||
self.parent_seed = parent_seed
|
||||
self.fingerprint = fingerprint
|
||||
logger.debug(f"KeyManager initialized with parent_seed and fingerprint.")
|
||||
self.account_index = account_index
|
||||
logger.debug(
|
||||
"KeyManager initialized with parent_seed, fingerprint and account index."
|
||||
)
|
||||
|
||||
# Initialize BIP85
|
||||
self.bip85 = self.initialize_bip85()
|
||||
|
||||
# Generate Nostr keys using the fingerprint
|
||||
# Generate Nostr keys using the provided account index
|
||||
self.keys = self.generate_nostr_keys()
|
||||
logger.debug("Nostr Keys initialized successfully.")
|
||||
|
||||
@@ -70,39 +78,41 @@ class KeyManager:
|
||||
raise
|
||||
|
||||
def generate_nostr_keys(self) -> Keys:
|
||||
"""
|
||||
Derives a unique Nostr key pair for the given fingerprint using BIP-85.
|
||||
|
||||
Returns:
|
||||
Keys: An instance of Keys containing the Nostr key pair.
|
||||
"""
|
||||
"""Derive a Nostr key pair using the configured ``account_index``."""
|
||||
try:
|
||||
# Convert fingerprint to an integer index (using a hash function)
|
||||
index = int(hashlib.sha256(self.fingerprint.encode()).hexdigest(), 16) % (
|
||||
2**31
|
||||
)
|
||||
index = self.account_index if self.account_index is not None else 0
|
||||
|
||||
# Derive entropy for Nostr key (32 bytes)
|
||||
entropy_bytes = self.bip85.derive_entropy(
|
||||
index=index,
|
||||
bytes_len=32,
|
||||
app_no=NOSTR_KEY_APP_ID,
|
||||
index=index, entropy_bytes=32, app_no=NOSTR_KEY_APP_ID
|
||||
)
|
||||
|
||||
# Generate Nostr key pair from entropy
|
||||
private_key_hex = entropy_bytes.hex()
|
||||
keys = Keys(priv_k=private_key_hex)
|
||||
logger.debug(f"Nostr keys generated for fingerprint {self.fingerprint}.")
|
||||
logger.debug("Nostr keys generated for account index %s", index)
|
||||
return keys
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to generate Nostr keys: {e}", exc_info=True)
|
||||
raise
|
||||
|
||||
def generate_v1_nostr_keys(self) -> Keys:
|
||||
"""Derive keys using the legacy fingerprint-hash method."""
|
||||
try:
|
||||
index = int(hashlib.sha256(self.fingerprint.encode()).hexdigest(), 16) % (
|
||||
2**31
|
||||
)
|
||||
entropy_bytes = self.bip85.derive_entropy(
|
||||
index=index, entropy_bytes=32, app_no=NOSTR_KEY_APP_ID
|
||||
)
|
||||
return Keys(priv_k=entropy_bytes.hex())
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to generate v1 Nostr keys: {e}", exc_info=True)
|
||||
raise
|
||||
|
||||
def generate_legacy_nostr_keys(self) -> Keys:
|
||||
"""Derive Nostr keys using the legacy application ID."""
|
||||
try:
|
||||
entropy = self.bip85.derive_entropy(
|
||||
index=0, bytes_len=32, app_no=LEGACY_NOSTR_KEY_APP_ID
|
||||
index=0, entropy_bytes=32, app_no=LEGACY_NOSTR_KEY_APP_ID
|
||||
)
|
||||
return Keys(priv_k=entropy.hex())
|
||||
except Exception as e:
|
||||
|
@@ -1,41 +0,0 @@
|
||||
# nostr/logging_config.py
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
# Comment out or remove the configure_logging function to avoid conflicts
|
||||
# def configure_logging():
|
||||
# """
|
||||
# Configures logging with both file and console handlers.
|
||||
# Logs include the timestamp, log level, message, filename, and line number.
|
||||
# Only ERROR and higher-level messages are shown in the terminal, while all messages
|
||||
# are logged in the log file.
|
||||
# """
|
||||
# logger = logging.getLogger()
|
||||
# logger.setLevel(logging.DEBUG) # Set root logger to DEBUG
|
||||
#
|
||||
# # Prevent adding multiple handlers if configure_logging is called multiple times
|
||||
# if not logger.handlers:
|
||||
# # Create the 'logs' folder if it doesn't exist
|
||||
# log_directory = 'logs'
|
||||
# if not os.path.exists(log_directory):
|
||||
# os.makedirs(log_directory)
|
||||
#
|
||||
# # Create handlers
|
||||
# c_handler = logging.StreamHandler()
|
||||
# f_handler = logging.FileHandler(os.path.join(log_directory, 'app.log'))
|
||||
#
|
||||
# # Set levels: only errors and critical messages will be shown in the console
|
||||
# c_handler.setLevel(logging.ERROR)
|
||||
# f_handler.setLevel(logging.DEBUG)
|
||||
#
|
||||
# # Create formatters and add them to handlers, include file and line number in log messages
|
||||
# formatter = logging.Formatter(
|
||||
# '%(asctime)s [%(levelname)s] %(message)s [%(filename)s:%(lineno)d]'
|
||||
# )
|
||||
# c_handler.setFormatter(formatter)
|
||||
# f_handler.setFormatter(formatter)
|
||||
#
|
||||
# # Add handlers to the logger
|
||||
# logger.addHandler(c_handler)
|
||||
# logger.addHandler(f_handler)
|
@@ -2,8 +2,10 @@ import asyncio
|
||||
import base64
|
||||
import gzip
|
||||
import hashlib
|
||||
import hmac
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
from datetime import timedelta
|
||||
from typing import Tuple
|
||||
@@ -23,9 +25,6 @@ from .backup_models import (
|
||||
logger = logging.getLogger("nostr.client")
|
||||
logger.setLevel(logging.WARNING)
|
||||
|
||||
# Identifier prefix for replaceable manifest events
|
||||
MANIFEST_ID_PREFIX = "seedpass-manifest-"
|
||||
|
||||
|
||||
def prepare_snapshot(
|
||||
encrypted_bytes: bytes, limit: int
|
||||
@@ -47,6 +46,19 @@ def prepare_snapshot(
|
||||
return manifest, chunks
|
||||
|
||||
|
||||
def new_manifest_id(key_index: bytes) -> tuple[str, bytes]:
|
||||
"""Return a new manifest identifier and nonce.
|
||||
|
||||
The identifier is computed as HMAC-SHA256 of ``b"manifest|" + nonce``
|
||||
using ``key_index`` as the HMAC key. The nonce is returned so it can be
|
||||
embedded inside the manifest itself.
|
||||
"""
|
||||
|
||||
nonce = os.urandom(16)
|
||||
digest = hmac.new(key_index, b"manifest|" + nonce, hashlib.sha256).hexdigest()
|
||||
return digest, nonce
|
||||
|
||||
|
||||
class SnapshotHandler:
|
||||
"""Mixin providing chunk and manifest handling."""
|
||||
|
||||
@@ -84,34 +96,43 @@ class SnapshotHandler:
|
||||
except Exception:
|
||||
meta.event_id = None
|
||||
|
||||
if (
|
||||
self.current_manifest_id
|
||||
and self.current_manifest
|
||||
and getattr(self.current_manifest, "nonce", None)
|
||||
):
|
||||
manifest_id = self.current_manifest_id
|
||||
manifest.nonce = self.current_manifest.nonce
|
||||
else:
|
||||
manifest_id, nonce = new_manifest_id(self.key_index)
|
||||
manifest.nonce = base64.b64encode(nonce).decode("utf-8")
|
||||
|
||||
manifest_json = json.dumps(
|
||||
{
|
||||
"ver": manifest.ver,
|
||||
"algo": manifest.algo,
|
||||
"chunks": [meta.__dict__ for meta in manifest.chunks],
|
||||
"delta_since": manifest.delta_since,
|
||||
"nonce": manifest.nonce,
|
||||
}
|
||||
)
|
||||
|
||||
manifest_identifier = (
|
||||
self.current_manifest_id or f"{MANIFEST_ID_PREFIX}{self.fingerprint}"
|
||||
)
|
||||
manifest_event = (
|
||||
nostr_client.EventBuilder(nostr_client.Kind(KIND_MANIFEST), manifest_json)
|
||||
.tags([nostr_client.Tag.identifier(manifest_identifier)])
|
||||
.tags([nostr_client.Tag.identifier(manifest_id)])
|
||||
.build(self.keys.public_key())
|
||||
.sign_with_keys(self.keys)
|
||||
)
|
||||
await self.client.send_event(manifest_event)
|
||||
with self._state_lock:
|
||||
self.current_manifest = manifest
|
||||
self.current_manifest_id = manifest_identifier
|
||||
self.current_manifest_id = manifest_id
|
||||
self.current_manifest.delta_since = int(time.time())
|
||||
self._delta_events = []
|
||||
if getattr(self, "verbose_timing", False):
|
||||
duration = time.perf_counter() - start
|
||||
logger.info("publish_snapshot completed in %.2f seconds", duration)
|
||||
return manifest, manifest_identifier
|
||||
return manifest, manifest_id
|
||||
|
||||
async def _fetch_chunks_with_retry(
|
||||
self, manifest_event
|
||||
@@ -129,6 +150,7 @@ class SnapshotHandler:
|
||||
if data.get("delta_since") is not None
|
||||
else None
|
||||
),
|
||||
nonce=data.get("nonce"),
|
||||
)
|
||||
except Exception:
|
||||
return None
|
||||
@@ -204,14 +226,11 @@ class SnapshotHandler:
|
||||
pubkey = self.keys.public_key()
|
||||
timeout = timedelta(seconds=10)
|
||||
|
||||
ident = f"{MANIFEST_ID_PREFIX}{self.fingerprint}"
|
||||
f = (
|
||||
nostr_client.Filter()
|
||||
.author(pubkey)
|
||||
.kind(nostr_client.Kind(KIND_MANIFEST))
|
||||
.identifier(ident)
|
||||
.limit(1)
|
||||
)
|
||||
ident = self.current_manifest_id
|
||||
f = nostr_client.Filter().author(pubkey).kind(nostr_client.Kind(KIND_MANIFEST))
|
||||
if ident:
|
||||
f = f.identifier(ident)
|
||||
f = f.limit(1)
|
||||
try:
|
||||
events = (await self.client.fetch_events(f, timeout)).to_vec()
|
||||
except Exception as e: # pragma: no cover - network errors
|
||||
@@ -223,13 +242,11 @@ class SnapshotHandler:
|
||||
)
|
||||
return None
|
||||
|
||||
if not events:
|
||||
ident = MANIFEST_ID_PREFIX.rstrip("-")
|
||||
if not events and ident:
|
||||
f = (
|
||||
nostr_client.Filter()
|
||||
.author(pubkey)
|
||||
.kind(nostr_client.Kind(KIND_MANIFEST))
|
||||
.identifier(ident)
|
||||
.limit(1)
|
||||
)
|
||||
try:
|
||||
@@ -245,8 +262,6 @@ class SnapshotHandler:
|
||||
if not events:
|
||||
return None
|
||||
|
||||
logger.info("Fetched manifest using identifier %s", ident)
|
||||
|
||||
for manifest_event in events:
|
||||
try:
|
||||
result = await self._fetch_chunks_with_retry(manifest_event)
|
||||
@@ -300,7 +315,9 @@ class SnapshotHandler:
|
||||
return
|
||||
await self._connect_async()
|
||||
pubkey = self.keys.public_key()
|
||||
ident = self.current_manifest_id or f"{MANIFEST_ID_PREFIX}{self.fingerprint}"
|
||||
ident = self.current_manifest_id
|
||||
if ident is None:
|
||||
return
|
||||
f = (
|
||||
nostr_client.Filter()
|
||||
.author(pubkey)
|
||||
@@ -358,6 +375,7 @@ class SnapshotHandler:
|
||||
meta.__dict__ for meta in self.current_manifest.chunks
|
||||
],
|
||||
"delta_since": self.current_manifest.delta_since,
|
||||
"nonce": self.current_manifest.nonce,
|
||||
}
|
||||
)
|
||||
manifest_event = (
|
||||
|
@@ -1,9 +0,0 @@
|
||||
"""Placeholder utilities for Nostr.
|
||||
|
||||
This module is intentionally left minimal and will be expanded in future
|
||||
releases as the Nostr integration grows.
|
||||
"""
|
||||
|
||||
# The module currently provides no functionality.
|
||||
# `pass` denotes the intentional absence of implementation.
|
||||
pass
|
@@ -28,7 +28,6 @@ Generated on: 2025-04-06
|
||||
├── encryption_manager.py
|
||||
├── event_handler.py
|
||||
├── key_manager.py
|
||||
├── logging_config.py
|
||||
├── utils.py
|
||||
├── utils/
|
||||
├── __init__.py
|
||||
@@ -3082,52 +3081,6 @@ __all__ = ['NostrClient']
|
||||
|
||||
```
|
||||
|
||||
## nostr/logging_config.py
|
||||
```python
|
||||
# nostr/logging_config.py
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
# Comment out or remove the configure_logging function to avoid conflicts
|
||||
# def configure_logging():
|
||||
# """
|
||||
# Configures logging with both file and console handlers.
|
||||
# Logs include the timestamp, log level, message, filename, and line number.
|
||||
# Only ERROR and higher-level messages are shown in the terminal, while all messages
|
||||
# are logged in the log file.
|
||||
# """
|
||||
# logger = logging.getLogger()
|
||||
# logger.setLevel(logging.DEBUG) # Set root logger to DEBUG
|
||||
#
|
||||
# # Prevent adding multiple handlers if configure_logging is called multiple times
|
||||
# if not logger.handlers:
|
||||
# # Create the 'logs' folder if it doesn't exist
|
||||
# log_directory = 'logs'
|
||||
# if not os.path.exists(log_directory):
|
||||
# os.makedirs(log_directory)
|
||||
#
|
||||
# # Create handlers
|
||||
# c_handler = logging.StreamHandler()
|
||||
# f_handler = logging.FileHandler(os.path.join(log_directory, 'app.log'))
|
||||
#
|
||||
# # Set levels: only errors and critical messages will be shown in the console
|
||||
# c_handler.setLevel(logging.ERROR)
|
||||
# f_handler.setLevel(logging.DEBUG)
|
||||
#
|
||||
# # Create formatters and add them to handlers, include file and line number in log messages
|
||||
# formatter = logging.Formatter(
|
||||
# '%(asctime)s [%(levelname)s] %(message)s [%(filename)s:%(lineno)d]'
|
||||
# )
|
||||
# c_handler.setFormatter(formatter)
|
||||
# f_handler.setFormatter(formatter)
|
||||
#
|
||||
# # Add handlers to the logger
|
||||
# logger.addHandler(c_handler)
|
||||
# logger.addHandler(f_handler)
|
||||
|
||||
```
|
||||
|
||||
## nostr/event_handler.py
|
||||
```python
|
||||
# nostr/event_handler.py
|
||||
|
@@ -9,8 +9,6 @@ import secrets
|
||||
import queue
|
||||
from typing import Any, List, Optional
|
||||
|
||||
from datetime import datetime, timedelta, timezone
|
||||
import jwt
|
||||
import logging
|
||||
|
||||
from fastapi import FastAPI, Header, HTTPException, Request, Response
|
||||
@@ -18,8 +16,8 @@ from fastapi.concurrency import run_in_threadpool
|
||||
import asyncio
|
||||
import sys
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
import hashlib
|
||||
import hmac
|
||||
|
||||
import bcrypt
|
||||
|
||||
from slowapi import Limiter, _rate_limit_exceeded_handler
|
||||
from slowapi.errors import RateLimitExceeded
|
||||
@@ -50,16 +48,9 @@ def _get_pm(request: Request) -> PasswordManager:
|
||||
def _check_token(request: Request, auth: str | None) -> None:
|
||||
if auth is None or not auth.startswith("Bearer "):
|
||||
raise HTTPException(status_code=401, detail="Unauthorized")
|
||||
token = auth.split(" ", 1)[1]
|
||||
jwt_secret = getattr(request.app.state, "jwt_secret", "")
|
||||
token_hash = getattr(request.app.state, "token_hash", "")
|
||||
try:
|
||||
jwt.decode(token, jwt_secret, algorithms=["HS256"])
|
||||
except jwt.ExpiredSignatureError:
|
||||
raise HTTPException(status_code=401, detail="Token expired")
|
||||
except jwt.InvalidTokenError:
|
||||
raise HTTPException(status_code=401, detail="Unauthorized")
|
||||
if not hmac.compare_digest(hashlib.sha256(token.encode()).hexdigest(), token_hash):
|
||||
token = auth.split(" ", 1)[1].encode()
|
||||
token_hash = getattr(request.app.state, "token_hash", b"")
|
||||
if not token_hash or not bcrypt.checkpw(token, token_hash):
|
||||
raise HTTPException(status_code=401, detail="Unauthorized")
|
||||
|
||||
|
||||
@@ -78,7 +69,7 @@ def _reload_relays(request: Request, relays: list[str]) -> None:
|
||||
|
||||
|
||||
def start_server(fingerprint: str | None = None) -> str:
|
||||
"""Initialize global state and return a short-lived JWT token.
|
||||
"""Initialize global state and return a random API token.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
@@ -90,10 +81,8 @@ def start_server(fingerprint: str | None = None) -> str:
|
||||
else:
|
||||
pm = PasswordManager(fingerprint=fingerprint)
|
||||
app.state.pm = pm
|
||||
app.state.jwt_secret = secrets.token_urlsafe(32)
|
||||
payload = {"exp": datetime.now(timezone.utc) + timedelta(minutes=5)}
|
||||
raw_token = jwt.encode(payload, app.state.jwt_secret, algorithm="HS256")
|
||||
app.state.token_hash = hashlib.sha256(raw_token.encode()).hexdigest()
|
||||
raw_token = secrets.token_urlsafe(32)
|
||||
app.state.token_hash = bcrypt.hashpw(raw_token.encode(), bcrypt.gensalt())
|
||||
if not getattr(app.state, "limiter", None):
|
||||
app.state.limiter = limiter
|
||||
app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler)
|
||||
@@ -214,13 +203,14 @@ async def create_entry(
|
||||
uri = await run_in_threadpool(
|
||||
pm.entry_manager.add_totp,
|
||||
entry.get("label"),
|
||||
pm.parent_seed,
|
||||
pm.KEY_TOTP_DET if entry.get("deterministic", False) else None,
|
||||
secret=entry.get("secret"),
|
||||
index=entry.get("index"),
|
||||
period=int(entry.get("period", 30)),
|
||||
digits=int(entry.get("digits", 6)),
|
||||
notes=entry.get("notes", ""),
|
||||
archived=entry.get("archived", False),
|
||||
deterministic=entry.get("deterministic", False),
|
||||
)
|
||||
return {"id": index, "uri": uri}
|
||||
|
||||
@@ -464,7 +454,8 @@ def export_totp(
|
||||
_check_token(request, authorization)
|
||||
_require_password(request, password)
|
||||
pm = _get_pm(request)
|
||||
return pm.entry_manager.export_totp_entries(pm.parent_seed)
|
||||
key = getattr(pm, "KEY_TOTP_DET", None) or getattr(pm, "parent_seed", None)
|
||||
return pm.entry_manager.export_totp_entries(key)
|
||||
|
||||
|
||||
@app.get("/api/v1/totp")
|
||||
@@ -478,11 +469,12 @@ def get_totp_codes(
|
||||
_require_password(request, password)
|
||||
pm = _get_pm(request)
|
||||
entries = pm.entry_manager.list_entries(
|
||||
filter_kind=EntryType.TOTP.value, include_archived=False
|
||||
filter_kinds=[EntryType.TOTP.value], include_archived=False
|
||||
)
|
||||
codes = []
|
||||
for idx, label, _u, _url, _arch in entries:
|
||||
code = pm.entry_manager.get_totp_code(idx, pm.parent_seed)
|
||||
key = getattr(pm, "KEY_TOTP_DET", None) or getattr(pm, "parent_seed", None)
|
||||
code = pm.entry_manager.get_totp_code(idx, key)
|
||||
|
||||
rem = pm.entry_manager.get_totp_time_remaining(idx)
|
||||
|
||||
|
@@ -30,6 +30,13 @@ no_clipboard_option = typer.Option(
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
deterministic_totp_option = typer.Option(
|
||||
False,
|
||||
"--deterministic-totp",
|
||||
help="Derive TOTP secrets deterministically",
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
# Sub command groups
|
||||
from . import entry, vault, nostr, config, fingerprint, util, api
|
||||
|
||||
@@ -55,12 +62,17 @@ def main(
|
||||
ctx: typer.Context,
|
||||
fingerprint: Optional[str] = fingerprint_option,
|
||||
no_clipboard: bool = no_clipboard_option,
|
||||
deterministic_totp: bool = deterministic_totp_option,
|
||||
) -> None:
|
||||
"""SeedPass CLI entry point.
|
||||
|
||||
When called without a subcommand this launches the interactive TUI.
|
||||
"""
|
||||
ctx.obj = {"fingerprint": fingerprint, "no_clipboard": no_clipboard}
|
||||
ctx.obj = {
|
||||
"fingerprint": fingerprint,
|
||||
"no_clipboard": no_clipboard,
|
||||
"deterministic_totp": deterministic_totp,
|
||||
}
|
||||
if ctx.invoked_subcommand is None:
|
||||
tui = importlib.import_module("main")
|
||||
raise typer.Exit(tui.main(fingerprint=fingerprint))
|
||||
|
@@ -13,19 +13,25 @@ app = typer.Typer(help="Run the API server")
|
||||
def api_start(ctx: typer.Context, host: str = "127.0.0.1", port: int = 8000) -> None:
|
||||
"""Start the SeedPass API server."""
|
||||
token = api_module.start_server(ctx.obj.get("fingerprint"))
|
||||
typer.echo(f"API token: {token}")
|
||||
typer.echo(
|
||||
f"API token: {token}\nWARNING: Store this token securely; it cannot be recovered."
|
||||
)
|
||||
uvicorn.run(api_module.app, host=host, port=port)
|
||||
|
||||
|
||||
@app.command("stop")
|
||||
def api_stop(ctx: typer.Context, host: str = "127.0.0.1", port: int = 8000) -> None:
|
||||
def api_stop(
|
||||
token: str = typer.Option(..., help="API token"),
|
||||
host: str = "127.0.0.1",
|
||||
port: int = 8000,
|
||||
) -> None:
|
||||
"""Stop the SeedPass API server."""
|
||||
import requests
|
||||
|
||||
try:
|
||||
requests.post(
|
||||
f"http://{host}:{port}/api/v1/shutdown",
|
||||
headers={"Authorization": f"Bearer {api_module.app.state.token_hash}"},
|
||||
headers={"Authorization": f"Bearer {token}"},
|
||||
timeout=2,
|
||||
)
|
||||
except Exception as exc: # pragma: no cover - best effort
|
||||
|
@@ -29,6 +29,8 @@ def _get_pm(ctx: typer.Context) -> PasswordManager:
|
||||
pm = PasswordManager(fingerprint=fp)
|
||||
if ctx.obj.get("no_clipboard"):
|
||||
pm.secret_mode_enabled = False
|
||||
if ctx.obj.get("deterministic_totp"):
|
||||
pm.deterministic_totp = True
|
||||
return pm
|
||||
|
||||
|
||||
|
@@ -6,8 +6,10 @@ from pathlib import Path
|
||||
from typing import List, Optional
|
||||
|
||||
import typer
|
||||
import click
|
||||
|
||||
from .common import _get_entry_service, EntryType
|
||||
from seedpass.core.entry_types import ALL_ENTRY_TYPES
|
||||
from utils.clipboard import ClipboardUnavailableError
|
||||
|
||||
|
||||
@@ -20,13 +22,20 @@ def entry_list(
|
||||
sort: str = typer.Option(
|
||||
"index", "--sort", help="Sort by 'index', 'label', or 'updated'"
|
||||
),
|
||||
kind: Optional[str] = typer.Option(None, "--kind", help="Filter by entry type"),
|
||||
kind: Optional[str] = typer.Option(
|
||||
None,
|
||||
"--kind",
|
||||
help="Filter by entry type",
|
||||
click_type=click.Choice(ALL_ENTRY_TYPES),
|
||||
),
|
||||
archived: bool = typer.Option(False, "--archived", help="Include archived"),
|
||||
) -> None:
|
||||
"""List entries in the vault."""
|
||||
service = _get_entry_service(ctx)
|
||||
entries = service.list_entries(
|
||||
sort_by=sort, filter_kind=kind, include_archived=archived
|
||||
sort_by=sort,
|
||||
filter_kinds=[kind] if kind else None,
|
||||
include_archived=archived,
|
||||
)
|
||||
for idx, label, username, url, is_archived in entries:
|
||||
line = f"{idx}: {label}"
|
||||
@@ -43,16 +52,17 @@ def entry_list(
|
||||
def entry_search(
|
||||
ctx: typer.Context,
|
||||
query: str,
|
||||
kind: List[str] = typer.Option(
|
||||
kinds: List[str] = typer.Option(
|
||||
None,
|
||||
"--kind",
|
||||
"-k",
|
||||
help="Filter by entry kinds (can be repeated)",
|
||||
click_type=click.Choice(ALL_ENTRY_TYPES),
|
||||
),
|
||||
) -> None:
|
||||
"""Search entries."""
|
||||
service = _get_entry_service(ctx)
|
||||
kinds = list(kind) if kind else None
|
||||
kinds = list(kinds) if kinds else None
|
||||
results = service.search_entries(query, kinds=kinds)
|
||||
if not results:
|
||||
typer.echo("No matching entries found")
|
||||
@@ -167,6 +177,9 @@ def entry_add_totp(
|
||||
secret: Optional[str] = typer.Option(None, "--secret", help="Import secret"),
|
||||
period: int = typer.Option(30, "--period", help="TOTP period in seconds"),
|
||||
digits: int = typer.Option(6, "--digits", help="Number of TOTP digits"),
|
||||
deterministic_totp: bool = typer.Option(
|
||||
False, "--deterministic-totp", help="Derive secret deterministically"
|
||||
),
|
||||
) -> None:
|
||||
"""Add a TOTP entry and output the otpauth URI."""
|
||||
service = _get_entry_service(ctx)
|
||||
@@ -176,6 +189,7 @@ def entry_add_totp(
|
||||
secret=secret,
|
||||
period=period,
|
||||
digits=digits,
|
||||
deterministic=deterministic_totp,
|
||||
)
|
||||
typer.echo(uri)
|
||||
|
||||
|
@@ -265,13 +265,13 @@ class EntryService:
|
||||
def list_entries(
|
||||
self,
|
||||
sort_by: str = "index",
|
||||
filter_kind: str | None = None,
|
||||
filter_kinds: list[str] | None = None,
|
||||
include_archived: bool = False,
|
||||
):
|
||||
with self._lock:
|
||||
return self._manager.entry_manager.list_entries(
|
||||
sort_by=sort_by,
|
||||
filter_kind=filter_kind,
|
||||
filter_kinds=filter_kinds,
|
||||
include_archived=include_archived,
|
||||
)
|
||||
|
||||
@@ -305,9 +305,10 @@ class EntryService:
|
||||
|
||||
def get_totp_code(self, entry_id: int) -> str:
|
||||
with self._lock:
|
||||
return self._manager.entry_manager.get_totp_code(
|
||||
entry_id, self._manager.parent_seed
|
||||
key = getattr(self._manager, "KEY_TOTP_DET", None) or getattr(
|
||||
self._manager, "parent_seed", None
|
||||
)
|
||||
return self._manager.entry_manager.get_totp_code(entry_id, key)
|
||||
|
||||
def add_entry(
|
||||
self,
|
||||
@@ -362,15 +363,18 @@ class EntryService:
|
||||
secret: str | None = None,
|
||||
period: int = 30,
|
||||
digits: int = 6,
|
||||
deterministic: bool = False,
|
||||
) -> str:
|
||||
with self._lock:
|
||||
key = self._manager.KEY_TOTP_DET if deterministic else None
|
||||
uri = self._manager.entry_manager.add_totp(
|
||||
label,
|
||||
self._manager.parent_seed,
|
||||
key,
|
||||
index=index,
|
||||
secret=secret,
|
||||
period=period,
|
||||
digits=digits,
|
||||
deterministic=deterministic,
|
||||
)
|
||||
self._manager.start_background_vault_sync()
|
||||
return uri
|
||||
@@ -515,9 +519,10 @@ class EntryService:
|
||||
|
||||
def export_totp_entries(self) -> dict:
|
||||
with self._lock:
|
||||
return self._manager.entry_manager.export_totp_entries(
|
||||
self._manager.parent_seed
|
||||
key = getattr(self._manager, "KEY_TOTP_DET", None) or getattr(
|
||||
self._manager, "parent_seed", None
|
||||
)
|
||||
return self._manager.entry_manager.export_totp_entries(key)
|
||||
|
||||
def display_totp_codes(self) -> None:
|
||||
with self._lock:
|
||||
|
@@ -41,7 +41,7 @@ class ConfigManager:
|
||||
logger.info("Config file not found; returning defaults")
|
||||
return {
|
||||
"relays": list(DEFAULT_NOSTR_RELAYS),
|
||||
"offline_mode": False,
|
||||
"offline_mode": True,
|
||||
"pin_hash": "",
|
||||
"password_hash": "",
|
||||
"inactivity_timeout": INACTIVITY_TIMEOUT,
|
||||
@@ -71,7 +71,7 @@ class ConfigManager:
|
||||
raise ValueError("Config data must be a dictionary")
|
||||
# Ensure defaults for missing keys
|
||||
data.setdefault("relays", list(DEFAULT_NOSTR_RELAYS))
|
||||
data.setdefault("offline_mode", False)
|
||||
data.setdefault("offline_mode", True)
|
||||
data.setdefault("pin_hash", "")
|
||||
data.setdefault("password_hash", "")
|
||||
data.setdefault("inactivity_timeout", INACTIVITY_TIMEOUT)
|
||||
|
@@ -16,8 +16,10 @@ except Exception: # pragma: no cover - fallback for environments without orjson
|
||||
import hashlib
|
||||
import os
|
||||
import base64
|
||||
import zlib
|
||||
from dataclasses import asdict
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
from typing import Optional, Tuple
|
||||
|
||||
from cryptography.hazmat.primitives.ciphers.aead import AESGCM
|
||||
from cryptography.exceptions import InvalidTag
|
||||
@@ -26,6 +28,7 @@ from termcolor import colored
|
||||
from utils.file_lock import exclusive_lock
|
||||
from mnemonic import Mnemonic
|
||||
from utils.password_prompt import prompt_existing_password
|
||||
from utils.key_derivation import KdfConfig, CURRENT_KDF_VERSION
|
||||
|
||||
# Instantiate the logger
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -89,16 +92,23 @@ class EncryptionManager:
|
||||
# Track user preference for handling legacy indexes
|
||||
self._legacy_migrate_flag = True
|
||||
self.last_migration_performed = False
|
||||
# Track nonces to detect accidental reuse
|
||||
self.nonce_crc_table: set[int] = set()
|
||||
|
||||
def encrypt_data(self, data: bytes) -> bytes:
|
||||
"""
|
||||
(2) Encrypts data using the NEW AES-GCM format, prepending a version
|
||||
header and the nonce. All new data will be in this format.
|
||||
Encrypt data using AES-GCM, emitting ``b"V3|" + nonce + ciphertext + tag``.
|
||||
A fresh 96-bit nonce is generated for each call and tracked via a CRC
|
||||
table to detect accidental reuse during batch operations.
|
||||
"""
|
||||
try:
|
||||
nonce = os.urandom(12) # 96-bit nonce is recommended for AES-GCM
|
||||
crc = zlib.crc32(nonce)
|
||||
if crc in self.nonce_crc_table:
|
||||
raise ValueError("Nonce reuse detected")
|
||||
self.nonce_crc_table.add(crc)
|
||||
ciphertext = self.cipher.encrypt(nonce, data, None)
|
||||
return b"V2:" + nonce + ciphertext
|
||||
return b"V3|" + nonce + ciphertext
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to encrypt data: {e}", exc_info=True)
|
||||
raise
|
||||
@@ -120,7 +130,21 @@ class EncryptionManager:
|
||||
ctx = f" {context}" if context else ""
|
||||
|
||||
try:
|
||||
# Try the new V2 format first
|
||||
# Try the new V3 format first
|
||||
if encrypted_data.startswith(b"V3|"):
|
||||
try:
|
||||
nonce = encrypted_data[3:15]
|
||||
ciphertext = encrypted_data[15:]
|
||||
if len(ciphertext) < 16:
|
||||
logger.error("AES-GCM payload too short")
|
||||
raise InvalidToken("AES-GCM payload too short")
|
||||
return self.cipher.decrypt(nonce, ciphertext, None)
|
||||
except InvalidTag as e:
|
||||
msg = f"Failed to decrypt{ctx}: invalid key or corrupt file"
|
||||
logger.error(msg)
|
||||
raise InvalidToken(msg) from e
|
||||
|
||||
# Next try the older V2 format
|
||||
if encrypted_data.startswith(b"V2:"):
|
||||
try:
|
||||
nonce = encrypted_data[3:15]
|
||||
@@ -144,19 +168,18 @@ class EncryptionManager:
|
||||
logger.error(msg)
|
||||
raise InvalidToken(msg) from e
|
||||
|
||||
# If it's not V2, it must be the legacy Fernet format
|
||||
else:
|
||||
logger.warning("Data is in legacy Fernet format. Attempting migration.")
|
||||
try:
|
||||
return self.fernet.decrypt(encrypted_data)
|
||||
except InvalidToken as e:
|
||||
logger.error(
|
||||
"Legacy Fernet decryption failed. Vault may be corrupt or key is incorrect."
|
||||
)
|
||||
raise e
|
||||
# If it's neither V3 nor V2, assume legacy Fernet format
|
||||
logger.warning("Data is in legacy Fernet format. Attempting migration.")
|
||||
try:
|
||||
return self.fernet.decrypt(encrypted_data)
|
||||
except InvalidToken as e:
|
||||
logger.error(
|
||||
"Legacy Fernet decryption failed. Vault may be corrupt or key is incorrect."
|
||||
)
|
||||
raise e
|
||||
|
||||
except (InvalidToken, InvalidTag) as e:
|
||||
if encrypted_data.startswith(b"V2:"):
|
||||
if encrypted_data.startswith(b"V3|") or encrypted_data.startswith(b"V2:"):
|
||||
# Already determined not to be legacy; re-raise
|
||||
raise
|
||||
if isinstance(e, InvalidToken) and str(e) == "AES-GCM payload too short":
|
||||
@@ -231,40 +254,78 @@ class EncryptionManager:
|
||||
raise ValueError("Invalid path outside fingerprint directory")
|
||||
return candidate
|
||||
|
||||
def encrypt_parent_seed(self, parent_seed: str) -> None:
|
||||
def encrypt_parent_seed(
|
||||
self, parent_seed: str, kdf: Optional[KdfConfig] = None
|
||||
) -> None:
|
||||
"""Encrypts and saves the parent seed to 'parent_seed.enc'."""
|
||||
data = parent_seed.encode("utf-8")
|
||||
encrypted_data = self.encrypt_data(data) # This now creates V2 format
|
||||
with exclusive_lock(self.parent_seed_file) as fh:
|
||||
fh.seek(0)
|
||||
fh.truncate()
|
||||
fh.write(encrypted_data)
|
||||
os.chmod(self.parent_seed_file, 0o600)
|
||||
self.encrypt_and_save_file(data, self.parent_seed_file, kdf=kdf)
|
||||
logger.info(f"Parent seed encrypted and saved to '{self.parent_seed_file}'.")
|
||||
|
||||
def decrypt_parent_seed(self) -> str:
|
||||
"""Decrypts and returns the parent seed, handling migration."""
|
||||
with exclusive_lock(self.parent_seed_file) as fh:
|
||||
fh.seek(0)
|
||||
encrypted_data = fh.read()
|
||||
blob = fh.read()
|
||||
|
||||
is_legacy = not encrypted_data.startswith(b"V2:")
|
||||
kdf, encrypted_data = self._deserialize(blob)
|
||||
is_legacy = not (
|
||||
encrypted_data.startswith(b"V3|") or encrypted_data.startswith(b"V2:")
|
||||
)
|
||||
decrypted_data = self.decrypt_data(encrypted_data, context="seed")
|
||||
|
||||
if is_legacy:
|
||||
logger.info("Parent seed was in legacy format. Re-encrypting to V2 format.")
|
||||
self.encrypt_parent_seed(decrypted_data.decode("utf-8").strip())
|
||||
logger.info("Parent seed was in legacy format. Re-encrypting to V3 format.")
|
||||
self.encrypt_parent_seed(decrypted_data.decode("utf-8").strip(), kdf=kdf)
|
||||
|
||||
return decrypted_data.decode("utf-8").strip()
|
||||
|
||||
def encrypt_and_save_file(self, data: bytes, relative_path: Path) -> None:
|
||||
def _serialize(self, kdf: KdfConfig, ciphertext: bytes) -> bytes:
|
||||
payload = {"kdf": asdict(kdf), "ct": base64.b64encode(ciphertext).decode()}
|
||||
if USE_ORJSON:
|
||||
return json_lib.dumps(payload)
|
||||
return json_lib.dumps(payload, separators=(",", ":")).encode("utf-8")
|
||||
|
||||
def _deserialize(self, blob: bytes) -> Tuple[KdfConfig, bytes]:
|
||||
"""Return ``(KdfConfig, ciphertext)`` from serialized *blob*.
|
||||
|
||||
Legacy files stored the raw ciphertext without a JSON wrapper. If
|
||||
decoding the wrapper fails, treat ``blob`` as the ciphertext and return
|
||||
a default HKDF configuration.
|
||||
"""
|
||||
|
||||
try:
|
||||
if USE_ORJSON:
|
||||
obj = json_lib.loads(blob)
|
||||
else:
|
||||
obj = json_lib.loads(blob.decode("utf-8"))
|
||||
kdf = KdfConfig(**obj.get("kdf", {}))
|
||||
ct_b64 = obj.get("ct", "")
|
||||
ciphertext = base64.b64decode(ct_b64)
|
||||
if ciphertext:
|
||||
return kdf, ciphertext
|
||||
except Exception: # pragma: no cover - fall back to legacy path
|
||||
pass
|
||||
|
||||
# Legacy format: ``blob`` already contains the ciphertext
|
||||
return (
|
||||
KdfConfig(name="hkdf", version=CURRENT_KDF_VERSION, params={}, salt_b64=""),
|
||||
blob,
|
||||
)
|
||||
|
||||
def encrypt_and_save_file(
|
||||
self, data: bytes, relative_path: Path, *, kdf: Optional[KdfConfig] = None
|
||||
) -> None:
|
||||
if kdf is None:
|
||||
kdf = KdfConfig()
|
||||
file_path = self.resolve_relative_path(relative_path)
|
||||
file_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
encrypted_data = self.encrypt_data(data)
|
||||
payload = self._serialize(kdf, encrypted_data)
|
||||
with exclusive_lock(file_path) as fh:
|
||||
fh.seek(0)
|
||||
fh.truncate()
|
||||
fh.write(encrypted_data)
|
||||
fh.write(payload)
|
||||
fh.flush()
|
||||
os.fsync(fh.fileno())
|
||||
os.chmod(file_path, 0o600)
|
||||
@@ -273,20 +334,37 @@ class EncryptionManager:
|
||||
file_path = self.resolve_relative_path(relative_path)
|
||||
with exclusive_lock(file_path) as fh:
|
||||
fh.seek(0)
|
||||
encrypted_data = fh.read()
|
||||
blob = fh.read()
|
||||
_, encrypted_data = self._deserialize(blob)
|
||||
return self.decrypt_data(encrypted_data, context=str(relative_path))
|
||||
|
||||
def save_json_data(self, data: dict, relative_path: Optional[Path] = None) -> None:
|
||||
def get_file_kdf(self, relative_path: Path) -> KdfConfig:
|
||||
file_path = self.resolve_relative_path(relative_path)
|
||||
with exclusive_lock(file_path) as fh:
|
||||
fh.seek(0)
|
||||
blob = fh.read()
|
||||
kdf, _ = self._deserialize(blob)
|
||||
return kdf
|
||||
|
||||
def save_json_data(
|
||||
self,
|
||||
data: dict,
|
||||
relative_path: Optional[Path] = None,
|
||||
*,
|
||||
kdf: Optional[KdfConfig] = None,
|
||||
) -> None:
|
||||
if relative_path is None:
|
||||
relative_path = Path("seedpass_entries_db.json.enc")
|
||||
if USE_ORJSON:
|
||||
json_data = json_lib.dumps(data)
|
||||
else:
|
||||
json_data = json_lib.dumps(data, separators=(",", ":")).encode("utf-8")
|
||||
self.encrypt_and_save_file(json_data, relative_path)
|
||||
self.encrypt_and_save_file(json_data, relative_path, kdf=kdf)
|
||||
logger.debug(f"JSON data encrypted and saved to '{relative_path}'.")
|
||||
|
||||
def load_json_data(self, relative_path: Optional[Path] = None) -> dict:
|
||||
def load_json_data(
|
||||
self, relative_path: Optional[Path] = None, *, return_kdf: bool = False
|
||||
) -> dict | Tuple[dict, KdfConfig]:
|
||||
"""
|
||||
Loads and decrypts JSON data, automatically migrating and re-saving
|
||||
if it's in the legacy format.
|
||||
@@ -295,13 +373,21 @@ class EncryptionManager:
|
||||
relative_path = Path("seedpass_entries_db.json.enc")
|
||||
file_path = self.resolve_relative_path(relative_path)
|
||||
if not file_path.exists():
|
||||
return {"entries": {}}
|
||||
empty: dict = {"entries": {}}
|
||||
if return_kdf:
|
||||
return empty, KdfConfig(
|
||||
name="hkdf", version=CURRENT_KDF_VERSION, params={}, salt_b64=""
|
||||
)
|
||||
return empty
|
||||
|
||||
with exclusive_lock(file_path) as fh:
|
||||
fh.seek(0)
|
||||
encrypted_data = fh.read()
|
||||
blob = fh.read()
|
||||
|
||||
is_legacy = not encrypted_data.startswith(b"V2:")
|
||||
kdf, encrypted_data = self._deserialize(blob)
|
||||
is_legacy = not (
|
||||
encrypted_data.startswith(b"V3|") or encrypted_data.startswith(b"V2:")
|
||||
)
|
||||
self.last_migration_performed = False
|
||||
|
||||
try:
|
||||
@@ -316,16 +402,20 @@ class EncryptionManager:
|
||||
# If it was a legacy file, re-save it in the new format now
|
||||
if is_legacy and self._legacy_migrate_flag:
|
||||
logger.info(f"Migrating and re-saving legacy vault file: {file_path}")
|
||||
self.save_json_data(data, relative_path)
|
||||
self.save_json_data(data, relative_path, kdf=kdf)
|
||||
self.update_checksum(relative_path)
|
||||
self.last_migration_performed = True
|
||||
|
||||
if return_kdf:
|
||||
return data, kdf
|
||||
return data
|
||||
except (InvalidToken, InvalidTag, JSONDecodeError) as e:
|
||||
logger.error(
|
||||
f"FATAL: Could not decrypt or parse data from {file_path}: {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
except (InvalidToken, InvalidTag) as e:
|
||||
msg = f"Failed to decrypt or parse data from {file_path}: {e}"
|
||||
logger.error(msg)
|
||||
raise InvalidToken(msg) from e
|
||||
except JSONDecodeError as e:
|
||||
msg = f"Failed to parse JSON data from {file_path}: {e}"
|
||||
logger.error(msg)
|
||||
raise
|
||||
|
||||
def get_encrypted_index(self) -> Optional[bytes]:
|
||||
@@ -360,7 +450,8 @@ class EncryptionManager:
|
||||
if relative_path is None:
|
||||
relative_path = Path("seedpass_entries_db.json.enc")
|
||||
|
||||
is_legacy = not encrypted_data.startswith(b"V2:")
|
||||
kdf, ciphertext = self._deserialize(encrypted_data)
|
||||
is_legacy = not (ciphertext.startswith(b"V3|") or ciphertext.startswith(b"V2:"))
|
||||
self.last_migration_performed = False
|
||||
|
||||
def _process(decrypted: bytes) -> dict:
|
||||
@@ -386,11 +477,9 @@ class EncryptionManager:
|
||||
return data
|
||||
|
||||
try:
|
||||
decrypted_data = self.decrypt_data(
|
||||
encrypted_data, context=str(relative_path)
|
||||
)
|
||||
decrypted_data = self.decrypt_data(ciphertext, context=str(relative_path))
|
||||
data = _process(decrypted_data)
|
||||
self.save_json_data(data, relative_path) # This always saves in V2 format
|
||||
self.save_json_data(data, relative_path, kdf=kdf)
|
||||
self.update_checksum(relative_path)
|
||||
logger.info("Index file from Nostr was processed and saved successfully.")
|
||||
self.last_migration_performed = is_legacy
|
||||
@@ -401,10 +490,10 @@ class EncryptionManager:
|
||||
"Enter your master password for legacy decryption: "
|
||||
)
|
||||
decrypted_data = self.decrypt_legacy(
|
||||
encrypted_data, password, context=str(relative_path)
|
||||
ciphertext, password, context=str(relative_path)
|
||||
)
|
||||
data = _process(decrypted_data)
|
||||
self.save_json_data(data, relative_path)
|
||||
self.save_json_data(data, relative_path, kdf=kdf)
|
||||
self.update_checksum(relative_path)
|
||||
logger.warning(
|
||||
"Index decrypted using legacy password-only key derivation."
|
||||
|
@@ -33,8 +33,8 @@ from pathlib import Path
|
||||
|
||||
from termcolor import colored
|
||||
from .migrations import LATEST_VERSION
|
||||
from .entry_types import EntryType
|
||||
from .totp import TotpManager
|
||||
from .entry_types import EntryType, ALL_ENTRY_TYPES
|
||||
from .totp import TotpManager, random_totp_secret
|
||||
from utils.fingerprint import generate_fingerprint
|
||||
from utils.checksum import canonical_json_dumps
|
||||
from utils.atomic_write import atomic_write
|
||||
@@ -257,7 +257,7 @@ class EntryManager:
|
||||
def add_totp(
|
||||
self,
|
||||
label: str,
|
||||
parent_seed: str,
|
||||
parent_seed: str | bytes | None = None,
|
||||
*,
|
||||
archived: bool = False,
|
||||
secret: str | None = None,
|
||||
@@ -266,13 +266,16 @@ class EntryManager:
|
||||
digits: int = 6,
|
||||
notes: str = "",
|
||||
tags: list[str] | None = None,
|
||||
deterministic: bool = False,
|
||||
) -> str:
|
||||
"""Add a new TOTP entry and return the provisioning URI."""
|
||||
entry_id = self.get_next_index()
|
||||
data = self._load_index()
|
||||
data.setdefault("entries", {})
|
||||
|
||||
if secret is None:
|
||||
if deterministic:
|
||||
if parent_seed is None:
|
||||
raise ValueError("Seed required for deterministic TOTP")
|
||||
if index is None:
|
||||
index = self.get_next_totp_index()
|
||||
secret = TotpManager.derive_secret(parent_seed, index)
|
||||
@@ -289,8 +292,11 @@ class EntryManager:
|
||||
"archived": archived,
|
||||
"notes": notes,
|
||||
"tags": tags or [],
|
||||
"deterministic": True,
|
||||
}
|
||||
else:
|
||||
if secret is None:
|
||||
secret = random_totp_secret()
|
||||
if not validate_totp_secret(secret):
|
||||
raise ValueError("Invalid TOTP secret")
|
||||
entry = {
|
||||
@@ -304,6 +310,7 @@ class EntryManager:
|
||||
"archived": archived,
|
||||
"notes": notes,
|
||||
"tags": tags or [],
|
||||
"deterministic": False,
|
||||
}
|
||||
|
||||
data["entries"][str(entry_id)] = entry
|
||||
@@ -461,7 +468,7 @@ class EntryManager:
|
||||
|
||||
seed_bytes = Bip39SeedGenerator(parent_seed).Generate()
|
||||
bip85 = BIP85(seed_bytes)
|
||||
entropy = bip85.derive_entropy(index=index, bytes_len=32)
|
||||
entropy = bip85.derive_entropy(index=index, entropy_bytes=32)
|
||||
keys = Keys(priv_k=entropy.hex())
|
||||
npub = Keys.hex_to_bech32(keys.public_key_hex(), "npub")
|
||||
nsec = Keys.hex_to_bech32(keys.private_key_hex(), "nsec")
|
||||
@@ -539,7 +546,7 @@ class EntryManager:
|
||||
bip85 = BIP85(seed_bytes)
|
||||
|
||||
key_idx = int(entry.get("index", index))
|
||||
entropy = bip85.derive_entropy(index=key_idx, bytes_len=32)
|
||||
entropy = bip85.derive_entropy(index=key_idx, entropy_bytes=32)
|
||||
keys = Keys(priv_k=entropy.hex())
|
||||
npub = Keys.hex_to_bech32(keys.public_key_hex(), "npub")
|
||||
nsec = Keys.hex_to_bech32(keys.private_key_hex(), "nsec")
|
||||
@@ -689,7 +696,10 @@ class EntryManager:
|
||||
return derive_seed_phrase(bip85, seed_index, words)
|
||||
|
||||
def get_totp_code(
|
||||
self, index: int, parent_seed: str | None = None, timestamp: int | None = None
|
||||
self,
|
||||
index: int,
|
||||
parent_seed: str | bytes | None = None,
|
||||
timestamp: int | None = None,
|
||||
) -> str:
|
||||
"""Return the current TOTP code for the specified entry."""
|
||||
entry = self.retrieve_entry(index)
|
||||
@@ -699,12 +709,12 @@ class EntryManager:
|
||||
etype != EntryType.TOTP.value and kind != EntryType.TOTP.value
|
||||
):
|
||||
raise ValueError("Entry is not a TOTP entry")
|
||||
if "secret" in entry:
|
||||
return TotpManager.current_code_from_secret(entry["secret"], timestamp)
|
||||
if parent_seed is None:
|
||||
raise ValueError("Seed required for derived TOTP")
|
||||
totp_index = int(entry.get("index", 0))
|
||||
return TotpManager.current_code(parent_seed, totp_index, timestamp)
|
||||
if entry.get("deterministic", False) or "secret" not in entry:
|
||||
if parent_seed is None:
|
||||
raise ValueError("Seed required for derived TOTP")
|
||||
totp_index = int(entry.get("index", 0))
|
||||
return TotpManager.current_code(parent_seed, totp_index, timestamp)
|
||||
return TotpManager.current_code_from_secret(entry["secret"], timestamp)
|
||||
|
||||
def get_totp_time_remaining(self, index: int) -> int:
|
||||
"""Return seconds remaining in the TOTP period for the given entry."""
|
||||
@@ -719,7 +729,9 @@ class EntryManager:
|
||||
period = int(entry.get("period", 30))
|
||||
return TotpManager.time_remaining(period)
|
||||
|
||||
def export_totp_entries(self, parent_seed: str) -> dict[str, list[dict[str, Any]]]:
|
||||
def export_totp_entries(
|
||||
self, parent_seed: str | bytes | None
|
||||
) -> dict[str, list[dict[str, Any]]]:
|
||||
"""Return all TOTP secrets and metadata for external use."""
|
||||
data = self._load_index()
|
||||
entries = data.get("entries", {})
|
||||
@@ -731,11 +743,13 @@ class EntryManager:
|
||||
label = entry.get("label", "")
|
||||
period = int(entry.get("period", 30))
|
||||
digits = int(entry.get("digits", 6))
|
||||
if "secret" in entry:
|
||||
secret = entry["secret"]
|
||||
else:
|
||||
if entry.get("deterministic", False) or "secret" not in entry:
|
||||
if parent_seed is None:
|
||||
raise ValueError("Seed required for deterministic TOTP export")
|
||||
idx = int(entry.get("index", 0))
|
||||
secret = TotpManager.derive_secret(parent_seed, idx)
|
||||
else:
|
||||
secret = entry["secret"]
|
||||
uri = TotpManager.make_otpauth_uri(label, secret, period, digits)
|
||||
exported.append(
|
||||
{
|
||||
@@ -1076,7 +1090,7 @@ class EntryManager:
|
||||
def list_entries(
|
||||
self,
|
||||
sort_by: str = "index",
|
||||
filter_kind: str | None = None,
|
||||
filter_kinds: list[str] | None = None,
|
||||
*,
|
||||
include_archived: bool = False,
|
||||
verbose: bool = True,
|
||||
@@ -1088,8 +1102,9 @@ class EntryManager:
|
||||
sort_by:
|
||||
Field to sort by. Supported values are ``"index"``, ``"label"`` and
|
||||
``"updated"``.
|
||||
filter_kind:
|
||||
Optional entry kind to restrict the results.
|
||||
filter_kinds:
|
||||
Optional list of entry kinds to restrict the results. Defaults to
|
||||
``ALL_ENTRY_TYPES``.
|
||||
|
||||
Archived entries are omitted unless ``include_archived`` is ``True``.
|
||||
"""
|
||||
@@ -1118,12 +1133,14 @@ class EntryManager:
|
||||
|
||||
sorted_items = sorted(entries_data.items(), key=sort_key)
|
||||
|
||||
if filter_kinds is None:
|
||||
filter_kinds = ALL_ENTRY_TYPES
|
||||
|
||||
filtered_items: List[Tuple[int, Dict[str, Any]]] = []
|
||||
for idx_str, entry in sorted_items:
|
||||
if (
|
||||
filter_kind is not None
|
||||
and entry.get("type", entry.get("kind", EntryType.PASSWORD.value))
|
||||
!= filter_kind
|
||||
entry.get("type", entry.get("kind", EntryType.PASSWORD.value))
|
||||
not in filter_kinds
|
||||
):
|
||||
continue
|
||||
if not include_archived and entry.get(
|
||||
@@ -1371,7 +1388,7 @@ class EntryManager:
|
||||
def list_all_entries(
|
||||
self,
|
||||
sort_by: str = "index",
|
||||
filter_kind: str | None = None,
|
||||
filter_kinds: list[str] | None = None,
|
||||
*,
|
||||
include_archived: bool = False,
|
||||
) -> None:
|
||||
@@ -1379,7 +1396,7 @@ class EntryManager:
|
||||
try:
|
||||
entries = self.list_entries(
|
||||
sort_by=sort_by,
|
||||
filter_kind=filter_kind,
|
||||
filter_kinds=filter_kinds,
|
||||
include_archived=include_archived,
|
||||
)
|
||||
if not entries:
|
||||
@@ -1403,7 +1420,7 @@ class EntryManager:
|
||||
|
||||
def get_entry_summaries(
|
||||
self,
|
||||
filter_kind: str | None = None,
|
||||
filter_kinds: list[str] | None = None,
|
||||
*,
|
||||
include_archived: bool = False,
|
||||
) -> list[tuple[int, str, str]]:
|
||||
@@ -1412,10 +1429,13 @@ class EntryManager:
|
||||
data = self._load_index()
|
||||
entries_data = data.get("entries", {})
|
||||
|
||||
if filter_kinds is None:
|
||||
filter_kinds = ALL_ENTRY_TYPES
|
||||
|
||||
summaries: list[tuple[int, str, str]] = []
|
||||
for idx_str, entry in entries_data.items():
|
||||
etype = entry.get("type", entry.get("kind", EntryType.PASSWORD.value))
|
||||
if filter_kind and etype != filter_kind:
|
||||
if etype not in filter_kinds:
|
||||
continue
|
||||
if not include_archived and entry.get(
|
||||
"archived", entry.get("blacklisted", False)
|
||||
|
233
src/seedpass/core/entry_service.py
Normal file
233
src/seedpass/core/entry_service.py
Normal file
@@ -0,0 +1,233 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import time
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from termcolor import colored
|
||||
|
||||
from constants import (
|
||||
DEFAULT_PASSWORD_LENGTH,
|
||||
MAX_PASSWORD_LENGTH,
|
||||
MIN_PASSWORD_LENGTH,
|
||||
)
|
||||
import seedpass.core.manager as manager_module
|
||||
from utils.terminal_utils import clear_header_with_notification, pause
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover - typing only
|
||||
from .manager import PasswordManager
|
||||
|
||||
|
||||
class EntryService:
|
||||
"""Entry management operations for :class:`PasswordManager`."""
|
||||
|
||||
def __init__(self, manager: PasswordManager) -> None:
|
||||
self.manager = manager
|
||||
|
||||
def handle_add_password(self) -> None:
|
||||
pm = self.manager
|
||||
try:
|
||||
fp, parent_fp, child_fp = pm.header_fingerprint_args
|
||||
clear_header_with_notification(
|
||||
pm,
|
||||
fp,
|
||||
"Main Menu > Add Entry > Password",
|
||||
parent_fingerprint=parent_fp,
|
||||
child_fingerprint=child_fp,
|
||||
)
|
||||
|
||||
def prompt_length() -> int | None:
|
||||
length_input = input(
|
||||
f"Enter desired password length (default {DEFAULT_PASSWORD_LENGTH}): "
|
||||
).strip()
|
||||
length = DEFAULT_PASSWORD_LENGTH
|
||||
if length_input:
|
||||
if not length_input.isdigit():
|
||||
print(
|
||||
colored("Error: Password length must be a number.", "red")
|
||||
)
|
||||
return None
|
||||
length = int(length_input)
|
||||
if not (MIN_PASSWORD_LENGTH <= length <= MAX_PASSWORD_LENGTH):
|
||||
print(
|
||||
colored(
|
||||
f"Error: Password length must be between {MIN_PASSWORD_LENGTH} and {MAX_PASSWORD_LENGTH}.",
|
||||
"red",
|
||||
)
|
||||
)
|
||||
return None
|
||||
return length
|
||||
|
||||
def finalize_entry(index: int, label: str, length: int) -> None:
|
||||
pm.is_dirty = True
|
||||
pm.last_update = time.time()
|
||||
|
||||
entry = pm.entry_manager.retrieve_entry(index)
|
||||
password = pm._generate_password_for_entry(entry, index, length)
|
||||
|
||||
print(
|
||||
colored(
|
||||
f"\n[+] Password generated and indexed with ID {index}.\n",
|
||||
"green",
|
||||
)
|
||||
)
|
||||
if pm.secret_mode_enabled:
|
||||
if manager_module.copy_to_clipboard(
|
||||
password, pm.clipboard_clear_delay
|
||||
):
|
||||
print(
|
||||
colored(
|
||||
f"[+] Password copied to clipboard. Will clear in {pm.clipboard_clear_delay} seconds.",
|
||||
"green",
|
||||
)
|
||||
)
|
||||
else:
|
||||
print(colored(f"Password for {label}: {password}\n", "yellow"))
|
||||
|
||||
try:
|
||||
pm.start_background_vault_sync()
|
||||
logging.info(
|
||||
"Encrypted index posted to Nostr after entry addition."
|
||||
)
|
||||
except Exception as nostr_error: # pragma: no cover - best effort
|
||||
logging.error(
|
||||
f"Failed to post updated index to Nostr: {nostr_error}",
|
||||
exc_info=True,
|
||||
)
|
||||
pause()
|
||||
|
||||
mode = input("Choose mode: [Q]uick or [A]dvanced? ").strip().lower()
|
||||
|
||||
website_name = input("Enter the label or website name: ").strip()
|
||||
if not website_name:
|
||||
print(colored("Error: Label cannot be empty.", "red"))
|
||||
return
|
||||
|
||||
username = input("Enter the username (optional): ").strip()
|
||||
url = input("Enter the URL (optional): ").strip()
|
||||
|
||||
if mode.startswith("q"):
|
||||
length = prompt_length()
|
||||
if length is None:
|
||||
return
|
||||
include_special_input = (
|
||||
input("Include special characters? (Y/n): ").strip().lower()
|
||||
)
|
||||
include_special_chars: bool | None = None
|
||||
if include_special_input:
|
||||
include_special_chars = include_special_input != "n"
|
||||
|
||||
index = pm.entry_manager.add_entry(
|
||||
website_name,
|
||||
length,
|
||||
username,
|
||||
url,
|
||||
include_special_chars=include_special_chars,
|
||||
)
|
||||
|
||||
finalize_entry(index, website_name, length)
|
||||
return
|
||||
|
||||
notes = input("Enter notes (optional): ").strip()
|
||||
tags_input = input("Enter tags (comma-separated, optional): ").strip()
|
||||
tags = (
|
||||
[t.strip() for t in tags_input.split(",") if t.strip()]
|
||||
if tags_input
|
||||
else []
|
||||
)
|
||||
|
||||
custom_fields: list[dict[str, object]] = []
|
||||
while True:
|
||||
add_field = input("Add custom field? (y/N): ").strip().lower()
|
||||
if add_field != "y":
|
||||
break
|
||||
label = input(" Field label: ").strip()
|
||||
value = input(" Field value: ").strip()
|
||||
hidden = input(" Hidden field? (y/N): ").strip().lower() == "y"
|
||||
custom_fields.append(
|
||||
{"label": label, "value": value, "is_hidden": hidden}
|
||||
)
|
||||
|
||||
length = prompt_length()
|
||||
if length is None:
|
||||
return
|
||||
|
||||
include_special_input = (
|
||||
input("Include special characters? (Y/n): ").strip().lower()
|
||||
)
|
||||
include_special_chars: bool | None = None
|
||||
if include_special_input:
|
||||
include_special_chars = include_special_input != "n"
|
||||
|
||||
allowed_special_chars = input(
|
||||
"Allowed special characters (leave blank for default): "
|
||||
).strip()
|
||||
if not allowed_special_chars:
|
||||
allowed_special_chars = None
|
||||
|
||||
special_mode = input("Special character mode (safe/leave blank): ").strip()
|
||||
if not special_mode:
|
||||
special_mode = None
|
||||
|
||||
exclude_ambiguous_input = (
|
||||
input("Exclude ambiguous characters? (y/N): ").strip().lower()
|
||||
)
|
||||
exclude_ambiguous: bool | None = None
|
||||
if exclude_ambiguous_input:
|
||||
exclude_ambiguous = exclude_ambiguous_input == "y"
|
||||
|
||||
min_uppercase_input = input(
|
||||
"Minimum uppercase letters (blank for default): "
|
||||
).strip()
|
||||
if min_uppercase_input and not min_uppercase_input.isdigit():
|
||||
print(colored("Error: Minimum uppercase must be a number.", "red"))
|
||||
return
|
||||
min_uppercase = int(min_uppercase_input) if min_uppercase_input else None
|
||||
|
||||
min_lowercase_input = input(
|
||||
"Minimum lowercase letters (blank for default): "
|
||||
).strip()
|
||||
if min_lowercase_input and not min_lowercase_input.isdigit():
|
||||
print(colored("Error: Minimum lowercase must be a number.", "red"))
|
||||
return
|
||||
min_lowercase = int(min_lowercase_input) if min_lowercase_input else None
|
||||
|
||||
min_digits_input = input("Minimum digits (blank for default): ").strip()
|
||||
if min_digits_input and not min_digits_input.isdigit():
|
||||
print(colored("Error: Minimum digits must be a number.", "red"))
|
||||
return
|
||||
min_digits = int(min_digits_input) if min_digits_input else None
|
||||
|
||||
min_special_input = input(
|
||||
"Minimum special characters (blank for default): "
|
||||
).strip()
|
||||
if min_special_input and not min_special_input.isdigit():
|
||||
print(colored("Error: Minimum special must be a number.", "red"))
|
||||
return
|
||||
min_special = int(min_special_input) if min_special_input else None
|
||||
|
||||
index = pm.entry_manager.add_entry(
|
||||
website_name,
|
||||
length,
|
||||
username,
|
||||
url,
|
||||
archived=False,
|
||||
notes=notes,
|
||||
custom_fields=custom_fields,
|
||||
tags=tags,
|
||||
include_special_chars=include_special_chars,
|
||||
allowed_special_chars=allowed_special_chars,
|
||||
special_mode=special_mode,
|
||||
exclude_ambiguous=exclude_ambiguous,
|
||||
min_uppercase=min_uppercase,
|
||||
min_lowercase=min_lowercase,
|
||||
min_digits=min_digits,
|
||||
min_special=min_special,
|
||||
)
|
||||
|
||||
finalize_entry(index, website_name, length)
|
||||
|
||||
except Exception as e: # pragma: no cover - defensive
|
||||
logging.error(f"Error during password generation: {e}", exc_info=True)
|
||||
print(colored(f"Error: Failed to generate password: {e}", "red"))
|
||||
pause()
|
@@ -15,3 +15,7 @@ class EntryType(str, Enum):
|
||||
NOSTR = "nostr"
|
||||
KEY_VALUE = "key_value"
|
||||
MANAGED_ACCOUNT = "managed_account"
|
||||
|
||||
|
||||
# List of all entry type values for convenience
|
||||
ALL_ENTRY_TYPES = [e.value for e in EntryType]
|
||||
|
File diff suppressed because it is too large
Load Diff
185
src/seedpass/core/menu_handler.py
Normal file
185
src/seedpass/core/menu_handler.py
Normal file
@@ -0,0 +1,185 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import sys
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from termcolor import colored
|
||||
|
||||
from .entry_types import EntryType, ALL_ENTRY_TYPES
|
||||
import seedpass.core.manager as manager_module
|
||||
from utils.color_scheme import color_text
|
||||
from utils.terminal_utils import clear_header_with_notification
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover - typing only
|
||||
from .manager import PasswordManager
|
||||
|
||||
|
||||
class MenuHandler:
|
||||
"""Handle interactive menu operations for :class:`PasswordManager`."""
|
||||
|
||||
def __init__(self, manager: PasswordManager) -> None:
|
||||
self.manager = manager
|
||||
|
||||
def handle_list_entries(self) -> None:
|
||||
"""List entries and optionally show details."""
|
||||
pm = self.manager
|
||||
try:
|
||||
while True:
|
||||
fp, parent_fp, child_fp = pm.header_fingerprint_args
|
||||
clear_header_with_notification(
|
||||
pm,
|
||||
fp,
|
||||
"Main Menu > List Entries",
|
||||
parent_fingerprint=parent_fp,
|
||||
child_fingerprint=child_fp,
|
||||
)
|
||||
print(color_text("\nList Entries:", "menu"))
|
||||
print(color_text("1. All", "menu"))
|
||||
option_map: dict[str, str] = {}
|
||||
for i, etype in enumerate(ALL_ENTRY_TYPES, start=2):
|
||||
label = etype.replace("_", " ").title()
|
||||
print(color_text(f"{i}. {label}", "menu"))
|
||||
option_map[str(i)] = etype
|
||||
choice = input("Select entry type or press Enter to go back: ").strip()
|
||||
if choice == "1":
|
||||
filter_kinds = None
|
||||
elif choice in option_map:
|
||||
filter_kinds = [option_map[choice]]
|
||||
elif not choice:
|
||||
return
|
||||
else:
|
||||
print(colored("Invalid choice.", "red"))
|
||||
continue
|
||||
|
||||
while True:
|
||||
summaries = pm.entry_manager.get_entry_summaries(
|
||||
filter_kinds, include_archived=False
|
||||
)
|
||||
if not summaries:
|
||||
break
|
||||
fp, parent_fp, child_fp = pm.header_fingerprint_args
|
||||
clear_header_with_notification(
|
||||
pm,
|
||||
fp,
|
||||
"Main Menu > List Entries",
|
||||
parent_fingerprint=parent_fp,
|
||||
child_fingerprint=child_fp,
|
||||
)
|
||||
print(colored("\n[+] Entries:\n", "green"))
|
||||
for idx, etype, label in summaries:
|
||||
if filter_kinds is None:
|
||||
display_type = etype.capitalize()
|
||||
print(colored(f"{idx}. {display_type} - {label}", "cyan"))
|
||||
else:
|
||||
print(colored(f"{idx}. {label}", "cyan"))
|
||||
idx_input = input(
|
||||
"Enter index to view details or press Enter to go back: "
|
||||
).strip()
|
||||
if not idx_input:
|
||||
break
|
||||
if not idx_input.isdigit():
|
||||
print(colored("Invalid index.", "red"))
|
||||
continue
|
||||
pm.show_entry_details_by_index(int(idx_input))
|
||||
except Exception as e: # pragma: no cover - defensive
|
||||
logging.error(f"Failed to list entries: {e}", exc_info=True)
|
||||
print(colored(f"Error: Failed to list entries: {e}", "red"))
|
||||
|
||||
def handle_display_totp_codes(self) -> None:
|
||||
"""Display all stored TOTP codes with a countdown progress bar."""
|
||||
pm = self.manager
|
||||
try:
|
||||
fp, parent_fp, child_fp = pm.header_fingerprint_args
|
||||
clear_header_with_notification(
|
||||
pm,
|
||||
fp,
|
||||
"Main Menu > 2FA Codes",
|
||||
parent_fingerprint=parent_fp,
|
||||
child_fingerprint=child_fp,
|
||||
)
|
||||
data = pm.entry_manager.vault.load_index()
|
||||
entries = data.get("entries", {})
|
||||
totp_list: list[tuple[str, int, int, bool]] = []
|
||||
for idx_str, entry in entries.items():
|
||||
if pm._entry_type_str(entry) == EntryType.TOTP.value and not entry.get(
|
||||
"archived", entry.get("blacklisted", False)
|
||||
):
|
||||
label = entry.get("label", "")
|
||||
period = int(entry.get("period", 30))
|
||||
imported = "secret" in entry
|
||||
totp_list.append((label, int(idx_str), period, imported))
|
||||
|
||||
if not totp_list:
|
||||
pm.notify("No 2FA entries found.", level="WARNING")
|
||||
return
|
||||
|
||||
totp_list.sort(key=lambda t: t[0].lower())
|
||||
print(colored("Press Enter to return to the menu.", "cyan"))
|
||||
while True:
|
||||
fp, parent_fp, child_fp = pm.header_fingerprint_args
|
||||
clear_header_with_notification(
|
||||
pm,
|
||||
fp,
|
||||
"Main Menu > 2FA Codes",
|
||||
parent_fingerprint=parent_fp,
|
||||
child_fingerprint=child_fp,
|
||||
)
|
||||
print(colored("Press Enter to return to the menu.", "cyan"))
|
||||
generated = [t for t in totp_list if not t[3]]
|
||||
imported_list = [t for t in totp_list if t[3]]
|
||||
if generated:
|
||||
print(colored("\nGenerated 2FA Codes:", "green"))
|
||||
for label, idx, period, _ in generated:
|
||||
key = getattr(pm, "KEY_TOTP_DET", None) or getattr(
|
||||
pm, "parent_seed", None
|
||||
)
|
||||
code = pm.entry_manager.get_totp_code(idx, key)
|
||||
remaining = pm.entry_manager.get_totp_time_remaining(idx)
|
||||
filled = int(20 * (period - remaining) / period)
|
||||
bar = "[" + "#" * filled + "-" * (20 - filled) + "]"
|
||||
if pm.secret_mode_enabled:
|
||||
if manager_module.copy_to_clipboard(
|
||||
code, pm.clipboard_clear_delay
|
||||
):
|
||||
print(
|
||||
f"[{idx}] {label}: [HIDDEN] {bar} {remaining:2d}s - copied to clipboard"
|
||||
)
|
||||
else:
|
||||
print(
|
||||
f"[{idx}] {label}: {color_text(code, 'deterministic')} {bar} {remaining:2d}s"
|
||||
)
|
||||
if imported_list:
|
||||
print(colored("\nImported 2FA Codes:", "green"))
|
||||
for label, idx, period, _ in imported_list:
|
||||
key = getattr(pm, "KEY_TOTP_DET", None) or getattr(
|
||||
pm, "parent_seed", None
|
||||
)
|
||||
code = pm.entry_manager.get_totp_code(idx, key)
|
||||
remaining = pm.entry_manager.get_totp_time_remaining(idx)
|
||||
filled = int(20 * (period - remaining) / period)
|
||||
bar = "[" + "#" * filled + "-" * (20 - filled) + "]"
|
||||
if pm.secret_mode_enabled:
|
||||
if manager_module.copy_to_clipboard(
|
||||
code, pm.clipboard_clear_delay
|
||||
):
|
||||
print(
|
||||
f"[{idx}] {label}: [HIDDEN] {bar} {remaining:2d}s - copied to clipboard"
|
||||
)
|
||||
else:
|
||||
print(
|
||||
f"[{idx}] {label}: {color_text(code, 'imported')} {bar} {remaining:2d}s"
|
||||
)
|
||||
sys.stdout.flush()
|
||||
try:
|
||||
user_input = manager_module.timed_input("", 1)
|
||||
if user_input.strip() == "" or user_input.strip().lower() == "b":
|
||||
break
|
||||
except TimeoutError:
|
||||
pass
|
||||
except KeyboardInterrupt:
|
||||
print()
|
||||
break
|
||||
except Exception as e: # pragma: no cover - defensive
|
||||
logging.error(f"Error displaying TOTP codes: {e}", exc_info=True)
|
||||
print(colored(f"Error: Failed to display TOTP codes: {e}", "red"))
|
@@ -113,10 +113,12 @@ class PasswordGenerator:
|
||||
self.bip85 = bip85
|
||||
self.policy = policy or PasswordPolicy()
|
||||
|
||||
# Derive seed bytes from parent_seed using BIP39 (handled by EncryptionManager)
|
||||
self.seed_bytes = self.encryption_manager.derive_seed_from_mnemonic(
|
||||
self.parent_seed
|
||||
)
|
||||
if isinstance(parent_seed, (bytes, bytearray)):
|
||||
self.seed_bytes = bytes(parent_seed)
|
||||
else:
|
||||
self.seed_bytes = self.encryption_manager.derive_seed_from_mnemonic(
|
||||
self.parent_seed
|
||||
)
|
||||
|
||||
logger.debug("PasswordGenerator initialized successfully.")
|
||||
except Exception as e:
|
||||
@@ -126,7 +128,7 @@ class PasswordGenerator:
|
||||
|
||||
def _derive_password_entropy(self, index: int) -> bytes:
|
||||
"""Derive deterministic entropy for password generation."""
|
||||
entropy = self.bip85.derive_entropy(index=index, bytes_len=64, app_no=32)
|
||||
entropy = self.bip85.derive_entropy(index=index, entropy_bytes=64, app_no=32)
|
||||
logger.debug("Entropy derived for password generation.")
|
||||
|
||||
hkdf = HKDF(
|
||||
@@ -433,7 +435,7 @@ class PasswordGenerator:
|
||||
|
||||
def derive_ssh_key(bip85: BIP85, idx: int) -> bytes:
|
||||
"""Derive 32 bytes of entropy suitable for an SSH key."""
|
||||
return bip85.derive_entropy(index=idx, bytes_len=32, app_no=32)
|
||||
return bip85.derive_entropy(index=idx, entropy_bytes=32, app_no=32)
|
||||
|
||||
|
||||
def derive_ssh_key_pair(parent_seed: str, index: int) -> tuple[str, str]:
|
||||
@@ -499,7 +501,7 @@ def derive_pgp_key(
|
||||
import hashlib
|
||||
import datetime
|
||||
|
||||
entropy = bip85.derive_entropy(index=idx, bytes_len=32, app_no=32)
|
||||
entropy = bip85.derive_entropy(index=idx, entropy_bytes=32, app_no=32)
|
||||
created = datetime.datetime(2000, 1, 1, tzinfo=datetime.timezone.utc)
|
||||
|
||||
if key_type.lower() == "rsa":
|
||||
|
@@ -21,6 +21,7 @@ from utils.key_derivation import (
|
||||
)
|
||||
from .encryption import EncryptionManager
|
||||
from utils.checksum import json_checksum, canonical_json_dumps
|
||||
from .state_manager import StateManager
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -32,6 +33,7 @@ class PortableMode(Enum):
|
||||
"""Encryption mode for portable exports."""
|
||||
|
||||
SEED_ONLY = EncryptionMode.SEED_ONLY.value
|
||||
NONE = "none"
|
||||
|
||||
|
||||
def _derive_export_key(seed: str) -> bytes:
|
||||
@@ -47,8 +49,15 @@ def export_backup(
|
||||
*,
|
||||
publish: bool = False,
|
||||
parent_seed: str | None = None,
|
||||
encrypt: bool = True,
|
||||
) -> Path:
|
||||
"""Export the current vault state to a portable encrypted file."""
|
||||
"""Export the current vault state to a portable file.
|
||||
|
||||
When ``encrypt`` is ``True`` (the default) the payload is encrypted with a
|
||||
key derived from the parent seed. When ``encrypt`` is ``False`` the payload
|
||||
is written in plaintext and the wrapper records an ``encryption_mode`` of
|
||||
:data:`PortableMode.NONE`.
|
||||
"""
|
||||
|
||||
if dest_path is None:
|
||||
ts = int(time.time())
|
||||
@@ -57,24 +66,32 @@ def export_backup(
|
||||
dest_path = dest_dir / EXPORT_NAME_TEMPLATE.format(ts=ts)
|
||||
|
||||
index_data = vault.load_index()
|
||||
seed = (
|
||||
parent_seed
|
||||
if parent_seed is not None
|
||||
else vault.encryption_manager.decrypt_parent_seed()
|
||||
)
|
||||
key = _derive_export_key(seed)
|
||||
enc_mgr = EncryptionManager(key, vault.fingerprint_dir)
|
||||
|
||||
canonical = canonical_json_dumps(index_data)
|
||||
payload_bytes = enc_mgr.encrypt_data(canonical.encode("utf-8"))
|
||||
|
||||
if encrypt:
|
||||
seed = (
|
||||
parent_seed
|
||||
if parent_seed is not None
|
||||
else vault.encryption_manager.decrypt_parent_seed()
|
||||
)
|
||||
key = _derive_export_key(seed)
|
||||
enc_mgr = EncryptionManager(key, vault.fingerprint_dir)
|
||||
payload_bytes = enc_mgr.encrypt_data(canonical.encode("utf-8"))
|
||||
mode = PortableMode.SEED_ONLY
|
||||
cipher = "aes-gcm"
|
||||
else:
|
||||
payload_bytes = canonical.encode("utf-8")
|
||||
mode = PortableMode.NONE
|
||||
cipher = "none"
|
||||
|
||||
checksum = json_checksum(index_data)
|
||||
|
||||
wrapper = {
|
||||
"format_version": FORMAT_VERSION,
|
||||
"created_at": int(time.time()),
|
||||
"fingerprint": vault.fingerprint_dir.name,
|
||||
"encryption_mode": PortableMode.SEED_ONLY.value,
|
||||
"cipher": "aes-gcm",
|
||||
"encryption_mode": mode.value,
|
||||
"cipher": cipher,
|
||||
"checksum": checksum,
|
||||
"payload": base64.b64encode(payload_bytes).decode("utf-8"),
|
||||
}
|
||||
@@ -90,10 +107,12 @@ def export_backup(
|
||||
enc_file.write_bytes(encrypted)
|
||||
os.chmod(enc_file, 0o600)
|
||||
try:
|
||||
idx = StateManager(vault.fingerprint_dir).state.get("nostr_account_idx", 0)
|
||||
client = NostrClient(
|
||||
vault.encryption_manager,
|
||||
vault.fingerprint_dir.name,
|
||||
config_manager=backup_manager.config_manager,
|
||||
account_index=idx,
|
||||
)
|
||||
asyncio.run(client.publish_snapshot(encrypted))
|
||||
except Exception:
|
||||
@@ -118,19 +137,24 @@ def import_backup(
|
||||
if wrapper.get("format_version") != FORMAT_VERSION:
|
||||
raise ValueError("Unsupported backup format")
|
||||
|
||||
if wrapper.get("encryption_mode") != PortableMode.SEED_ONLY.value:
|
||||
raise ValueError("Unsupported encryption mode")
|
||||
mode = wrapper.get("encryption_mode")
|
||||
payload = base64.b64decode(wrapper["payload"])
|
||||
|
||||
seed = (
|
||||
parent_seed
|
||||
if parent_seed is not None
|
||||
else vault.encryption_manager.decrypt_parent_seed()
|
||||
)
|
||||
key = _derive_export_key(seed)
|
||||
enc_mgr = EncryptionManager(key, vault.fingerprint_dir)
|
||||
enc_mgr._legacy_migrate_flag = False
|
||||
index_bytes = enc_mgr.decrypt_data(payload, context="backup payload")
|
||||
if mode == PortableMode.SEED_ONLY.value:
|
||||
seed = (
|
||||
parent_seed
|
||||
if parent_seed is not None
|
||||
else vault.encryption_manager.decrypt_parent_seed()
|
||||
)
|
||||
key = _derive_export_key(seed)
|
||||
enc_mgr = EncryptionManager(key, vault.fingerprint_dir)
|
||||
enc_mgr._legacy_migrate_flag = False
|
||||
index_bytes = enc_mgr.decrypt_data(payload, context="backup payload")
|
||||
elif mode == PortableMode.NONE.value:
|
||||
index_bytes = payload
|
||||
else:
|
||||
raise ValueError("Unsupported encryption mode")
|
||||
|
||||
index = json.loads(index_bytes.decode("utf-8"))
|
||||
|
||||
checksum = json_checksum(index)
|
||||
|
109
src/seedpass/core/profile_service.py
Normal file
109
src/seedpass/core/profile_service.py
Normal file
@@ -0,0 +1,109 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Optional, TYPE_CHECKING
|
||||
|
||||
from termcolor import colored
|
||||
|
||||
import seedpass.core.manager as manager_module
|
||||
|
||||
from utils.password_prompt import prompt_existing_password
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover - typing only
|
||||
from .manager import PasswordManager
|
||||
from nostr.client import NostrClient
|
||||
|
||||
|
||||
class ProfileService:
|
||||
"""Profile-related operations for :class:`PasswordManager`."""
|
||||
|
||||
def __init__(self, manager: PasswordManager) -> None:
|
||||
self.manager = manager
|
||||
|
||||
def handle_switch_fingerprint(self, *, password: Optional[str] = None) -> bool:
|
||||
"""Handle switching to a different seed profile."""
|
||||
pm = self.manager
|
||||
try:
|
||||
print(colored("\nAvailable Seed Profiles:", "cyan"))
|
||||
fingerprints = pm.fingerprint_manager.list_fingerprints()
|
||||
for idx, fp in enumerate(fingerprints, start=1):
|
||||
display = (
|
||||
pm.fingerprint_manager.display_name(fp)
|
||||
if hasattr(pm.fingerprint_manager, "display_name")
|
||||
else fp
|
||||
)
|
||||
print(colored(f"{idx}. {display}", "cyan"))
|
||||
|
||||
choice = input("Select a seed profile by number to switch: ").strip()
|
||||
if not choice.isdigit() or not (1 <= int(choice) <= len(fingerprints)):
|
||||
print(colored("Invalid selection. Returning to main menu.", "red"))
|
||||
return False
|
||||
|
||||
selected_fingerprint = fingerprints[int(choice) - 1]
|
||||
pm.fingerprint_manager.current_fingerprint = selected_fingerprint
|
||||
pm.current_fingerprint = selected_fingerprint
|
||||
if not getattr(pm, "manifest_id", None):
|
||||
pm.manifest_id = None
|
||||
|
||||
pm.fingerprint_dir = pm.fingerprint_manager.get_current_fingerprint_dir()
|
||||
if not pm.fingerprint_dir:
|
||||
print(
|
||||
colored(
|
||||
f"Error: Seed profile directory for {selected_fingerprint} not found.",
|
||||
"red",
|
||||
)
|
||||
)
|
||||
return False
|
||||
|
||||
if password is None:
|
||||
password = prompt_existing_password(
|
||||
"Enter the master password for the selected seed profile: "
|
||||
)
|
||||
|
||||
if not pm.setup_encryption_manager(
|
||||
pm.fingerprint_dir, password, exit_on_fail=False
|
||||
):
|
||||
return False
|
||||
|
||||
pm.initialize_bip85()
|
||||
pm.initialize_managers()
|
||||
pm.start_background_sync()
|
||||
print(colored(f"Switched to seed profile {selected_fingerprint}.", "green"))
|
||||
|
||||
try:
|
||||
pm.nostr_client = manager_module.NostrClient(
|
||||
encryption_manager=pm.encryption_manager,
|
||||
fingerprint=pm.current_fingerprint,
|
||||
config_manager=getattr(pm, "config_manager", None),
|
||||
parent_seed=getattr(pm, "parent_seed", None),
|
||||
key_index=pm.KEY_INDEX,
|
||||
account_index=pm.nostr_account_idx,
|
||||
)
|
||||
if getattr(pm, "manifest_id", None) and hasattr(
|
||||
pm.nostr_client, "_state_lock"
|
||||
):
|
||||
from nostr.backup_models import Manifest
|
||||
|
||||
with pm.nostr_client._state_lock:
|
||||
pm.nostr_client.current_manifest_id = pm.manifest_id
|
||||
pm.nostr_client.current_manifest = Manifest(
|
||||
ver=1,
|
||||
algo="gzip",
|
||||
chunks=[],
|
||||
delta_since=pm.delta_since or None,
|
||||
)
|
||||
logging.info(
|
||||
f"NostrClient re-initialized with seed profile {pm.current_fingerprint}."
|
||||
)
|
||||
except Exception as e:
|
||||
logging.error(f"Failed to re-initialize NostrClient: {e}")
|
||||
print(
|
||||
colored(f"Error: Failed to re-initialize NostrClient: {e}", "red")
|
||||
)
|
||||
return False
|
||||
|
||||
return True
|
||||
except Exception as e: # pragma: no cover - defensive
|
||||
logging.error(f"Error during seed profile switching: {e}", exc_info=True)
|
||||
print(colored(f"Error: Failed to switch seed profiles: {e}", "red"))
|
||||
return False
|
@@ -26,6 +26,7 @@ class StateManager:
|
||||
"manifest_id": None,
|
||||
"delta_since": 0,
|
||||
"relays": list(DEFAULT_RELAYS),
|
||||
"nostr_account_idx": 0,
|
||||
}
|
||||
with shared_lock(self.state_path) as fh:
|
||||
fh.seek(0)
|
||||
@@ -37,6 +38,7 @@ class StateManager:
|
||||
"manifest_id": None,
|
||||
"delta_since": 0,
|
||||
"relays": list(DEFAULT_RELAYS),
|
||||
"nostr_account_idx": 0,
|
||||
}
|
||||
try:
|
||||
obj = json.loads(data.decode())
|
||||
@@ -47,6 +49,7 @@ class StateManager:
|
||||
obj.setdefault("manifest_id", None)
|
||||
obj.setdefault("delta_since", 0)
|
||||
obj.setdefault("relays", list(DEFAULT_RELAYS))
|
||||
obj.setdefault("nostr_account_idx", 0)
|
||||
return obj
|
||||
|
||||
def _save(self, data: dict) -> None:
|
||||
|
@@ -2,8 +2,11 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import base64
|
||||
from typing import Union
|
||||
from urllib.parse import quote
|
||||
from urllib.parse import urlparse, parse_qs, unquote
|
||||
|
||||
@@ -14,17 +17,24 @@ import pyotp
|
||||
from utils import key_derivation
|
||||
|
||||
|
||||
def random_totp_secret(length: int = 20) -> str:
|
||||
"""Return a random Base32 encoded TOTP secret."""
|
||||
return base64.b32encode(os.urandom(length)).decode("ascii").rstrip("=")
|
||||
|
||||
|
||||
class TotpManager:
|
||||
"""Helper methods for TOTP secrets and codes."""
|
||||
|
||||
@staticmethod
|
||||
def derive_secret(seed: str, index: int) -> str:
|
||||
"""Derive a TOTP secret from a BIP39 seed and index."""
|
||||
def derive_secret(seed: Union[str, bytes], index: int) -> str:
|
||||
"""Derive a TOTP secret from a seed or raw key and index."""
|
||||
return key_derivation.derive_totp_secret(seed, index)
|
||||
|
||||
@classmethod
|
||||
def current_code(cls, seed: str, index: int, timestamp: int | None = None) -> str:
|
||||
"""Return the TOTP code for the given seed and index."""
|
||||
def current_code(
|
||||
cls, seed: Union[str, bytes], index: int, timestamp: int | None = None
|
||||
) -> str:
|
||||
"""Return the TOTP code for the given seed/key and index."""
|
||||
secret = cls.derive_secret(seed, index)
|
||||
totp = pyotp.TOTP(secret)
|
||||
if timestamp is None:
|
||||
|
@@ -14,6 +14,7 @@ from .encryption import (
|
||||
USE_ORJSON,
|
||||
json_lib,
|
||||
)
|
||||
from utils.key_derivation import KdfConfig, CURRENT_KDF_VERSION
|
||||
from utils.password_prompt import prompt_existing_password
|
||||
|
||||
|
||||
@@ -38,6 +39,11 @@ class Vault:
|
||||
"""Replace the internal encryption manager."""
|
||||
self.encryption_manager = manager
|
||||
|
||||
def _hkdf_kdf(self) -> KdfConfig:
|
||||
return KdfConfig(
|
||||
name="hkdf", version=CURRENT_KDF_VERSION, params={}, salt_b64=""
|
||||
)
|
||||
|
||||
# ----- Password index helpers -----
|
||||
def load_index(self, *, return_migration_flags: bool = False):
|
||||
"""Return decrypted password index data, applying migrations.
|
||||
@@ -102,10 +108,24 @@ class Vault:
|
||||
)
|
||||
|
||||
try:
|
||||
data = self.encryption_manager.load_json_data(self.index_file)
|
||||
data, kdf = self.encryption_manager.load_json_data(
|
||||
self.index_file, return_kdf=True
|
||||
)
|
||||
migration_performed = getattr(
|
||||
self.encryption_manager, "last_migration_performed", False
|
||||
)
|
||||
if kdf.version < CURRENT_KDF_VERSION:
|
||||
new_kdf = KdfConfig(
|
||||
name=kdf.name,
|
||||
version=CURRENT_KDF_VERSION,
|
||||
params=kdf.params,
|
||||
salt_b64=kdf.salt_b64,
|
||||
)
|
||||
self.encryption_manager.save_json_data(
|
||||
data, self.index_file, kdf=new_kdf
|
||||
)
|
||||
self.encryption_manager.update_checksum(self.index_file)
|
||||
migration_performed = True
|
||||
except LegacyFormatRequiresMigrationError:
|
||||
print(
|
||||
colored(
|
||||
@@ -142,7 +162,9 @@ class Vault:
|
||||
else:
|
||||
data = json_lib.loads(decrypted.decode("utf-8"))
|
||||
if self.encryption_manager._legacy_migrate_flag:
|
||||
self.encryption_manager.save_json_data(data, self.index_file)
|
||||
self.encryption_manager.save_json_data(
|
||||
data, self.index_file, kdf=self._hkdf_kdf()
|
||||
)
|
||||
self.encryption_manager.update_checksum(self.index_file)
|
||||
migration_performed = getattr(
|
||||
self.encryption_manager, "last_migration_performed", False
|
||||
@@ -181,7 +203,9 @@ class Vault:
|
||||
try:
|
||||
data = apply_migrations(data)
|
||||
if schema_migrated:
|
||||
self.encryption_manager.save_json_data(data, self.index_file)
|
||||
self.encryption_manager.save_json_data(
|
||||
data, self.index_file, kdf=self._hkdf_kdf()
|
||||
)
|
||||
self.encryption_manager.update_checksum(self.index_file)
|
||||
except Exception as exc: # noqa: BLE001 - surface clear error and restore
|
||||
if legacy_detected and backup_dir is not None:
|
||||
@@ -214,7 +238,9 @@ class Vault:
|
||||
|
||||
def save_index(self, data: dict) -> None:
|
||||
"""Encrypt and write password index."""
|
||||
self.encryption_manager.save_json_data(data, self.index_file)
|
||||
self.encryption_manager.save_json_data(
|
||||
data, self.index_file, kdf=self._hkdf_kdf()
|
||||
)
|
||||
|
||||
def get_encrypted_index(self) -> Optional[bytes]:
|
||||
"""Return the encrypted index bytes if present."""
|
||||
@@ -252,4 +278,6 @@ class Vault:
|
||||
|
||||
def save_config(self, config: dict) -> None:
|
||||
"""Encrypt and persist configuration."""
|
||||
self.encryption_manager.save_json_data(config, self.config_file)
|
||||
self.encryption_manager.save_json_data(
|
||||
config, self.config_file, kdf=self._hkdf_kdf()
|
||||
)
|
||||
|
4
src/seedpass/errors.py
Normal file
4
src/seedpass/errors.py
Normal file
@@ -0,0 +1,4 @@
|
||||
class VaultLockedError(Exception):
|
||||
"""Raised when an operation requires an unlocked vault."""
|
||||
|
||||
pass
|
@@ -393,7 +393,7 @@ class TotpViewerWindow(toga.Window):
|
||||
def refresh_codes(self) -> None:
|
||||
self.table.data = []
|
||||
for idx, label, *_rest in self.entries.list_entries(
|
||||
filter_kind=EntryType.TOTP.value
|
||||
filter_kinds=[EntryType.TOTP.value]
|
||||
):
|
||||
entry = self.entries.retrieve_entry(idx)
|
||||
code = self.entries.get_totp_code(idx)
|
||||
|
@@ -4,7 +4,7 @@ import sys
|
||||
|
||||
import pytest
|
||||
from httpx import ASGITransport, AsyncClient
|
||||
import hashlib
|
||||
import bcrypt
|
||||
|
||||
sys.path.append(str(Path(__file__).resolve().parents[1]))
|
||||
|
||||
@@ -54,7 +54,7 @@ async def client(monkeypatch):
|
||||
async def test_token_hashed(client):
|
||||
_, token = client
|
||||
assert api.app.state.token_hash != token
|
||||
assert api.app.state.token_hash == hashlib.sha256(token.encode()).hexdigest()
|
||||
assert bcrypt.checkpw(token.encode(), api.app.state.token_hash)
|
||||
|
||||
|
||||
@pytest.mark.anyio
|
||||
|
@@ -53,6 +53,7 @@ async def test_create_and_modify_totp_entry(client):
|
||||
"digits": 8,
|
||||
"notes": "n",
|
||||
"archived": False,
|
||||
"deterministic": False,
|
||||
}
|
||||
|
||||
res = await cl.put(
|
||||
@@ -377,7 +378,7 @@ async def test_vault_export_endpoint(client, tmp_path):
|
||||
out = tmp_path / "out.json"
|
||||
out.write_text("data")
|
||||
|
||||
api.app.state.pm.handle_export_database = lambda: out
|
||||
api.app.state.pm.handle_export_database = lambda *a, **k: out
|
||||
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
@@ -501,8 +502,10 @@ async def test_generate_password_no_special_chars(client):
|
||||
return b"\x00" * 32
|
||||
|
||||
class DummyBIP85:
|
||||
def derive_entropy(self, index: int, bytes_len: int, app_no: int = 32) -> bytes:
|
||||
return bytes(range(bytes_len))
|
||||
def derive_entropy(
|
||||
self, index: int, entropy_bytes: int, app_no: int = 32
|
||||
) -> bytes:
|
||||
return bytes(range(entropy_bytes))
|
||||
|
||||
api.app.state.pm.password_generator = PasswordGenerator(
|
||||
DummyEnc(), "seed", DummyBIP85()
|
||||
@@ -529,8 +532,10 @@ async def test_generate_password_allowed_chars(client):
|
||||
return b"\x00" * 32
|
||||
|
||||
class DummyBIP85:
|
||||
def derive_entropy(self, index: int, bytes_len: int, app_no: int = 32) -> bytes:
|
||||
return bytes((index + i) % 256 for i in range(bytes_len))
|
||||
def derive_entropy(
|
||||
self, index: int, entropy_bytes: int, app_no: int = 32
|
||||
) -> bytes:
|
||||
return bytes((index + i) % 256 for i in range(entropy_bytes))
|
||||
|
||||
api.app.state.pm.password_generator = PasswordGenerator(
|
||||
DummyEnc(), "seed", DummyBIP85()
|
||||
|
@@ -36,6 +36,7 @@ def test_audit_logger_records_events(monkeypatch, tmp_path):
|
||||
monkeypatch.setattr(manager_module, "export_backup", lambda *a, **k: dest)
|
||||
pm.vault = object()
|
||||
pm.backup_manager = object()
|
||||
monkeypatch.setattr("seedpass.core.manager.confirm_action", lambda *_a, **_k: True)
|
||||
pm.handle_export_database(dest)
|
||||
|
||||
confirms = iter([True, False])
|
||||
|
@@ -20,6 +20,7 @@ def test_switch_fingerprint_triggers_bg_sync(monkeypatch, tmp_path):
|
||||
pm.current_fingerprint = None
|
||||
pm.encryption_manager = object()
|
||||
pm.config_manager = SimpleNamespace(get_quick_unlock=lambda: False)
|
||||
pm.nostr_account_idx = 0
|
||||
|
||||
monkeypatch.setattr("builtins.input", lambda *_a, **_k: "1")
|
||||
monkeypatch.setattr(
|
||||
|
56
src/tests/test_backup_restore_startup.py
Normal file
56
src/tests/test_backup_restore_startup.py
Normal file
@@ -0,0 +1,56 @@
|
||||
import main
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def test_cli_flag_restores_before_init(monkeypatch, tmp_path):
|
||||
calls = []
|
||||
backup = tmp_path / "bak.json"
|
||||
backup.write_text("{}")
|
||||
|
||||
def fake_restore(path, fingerprint):
|
||||
calls.append(("restore", Path(path), fingerprint))
|
||||
|
||||
class DummyPM:
|
||||
def __init__(self, fingerprint=None):
|
||||
calls.append(("init", fingerprint))
|
||||
self.secret_mode_enabled = True
|
||||
self.inactivity_timeout = 0
|
||||
|
||||
monkeypatch.setattr(main, "restore_backup_index", fake_restore)
|
||||
monkeypatch.setattr(main, "PasswordManager", DummyPM)
|
||||
monkeypatch.setattr(main, "display_menu", lambda pm, **k: None)
|
||||
|
||||
rc = main.main(["--fingerprint", "fp", "--restore-backup", str(backup)])
|
||||
assert rc == 0
|
||||
assert calls[0][0] == "restore"
|
||||
assert calls[1][0] == "init"
|
||||
assert calls[0][1] == backup
|
||||
assert calls[0][2] == "fp"
|
||||
|
||||
|
||||
def test_menu_option_restores_before_init(monkeypatch, tmp_path):
|
||||
calls = []
|
||||
backup = tmp_path / "bak.json"
|
||||
backup.write_text("{}")
|
||||
|
||||
def fake_restore(path, fingerprint):
|
||||
calls.append(("restore", Path(path), fingerprint))
|
||||
|
||||
class DummyPM:
|
||||
def __init__(self, fingerprint=None):
|
||||
calls.append(("init", fingerprint))
|
||||
self.secret_mode_enabled = True
|
||||
self.inactivity_timeout = 0
|
||||
|
||||
monkeypatch.setattr(main, "restore_backup_index", fake_restore)
|
||||
monkeypatch.setattr(main, "PasswordManager", DummyPM)
|
||||
monkeypatch.setattr(main, "display_menu", lambda pm, **k: None)
|
||||
inputs = iter(["2", str(backup)])
|
||||
monkeypatch.setattr("builtins.input", lambda _prompt="": next(inputs))
|
||||
|
||||
rc = main.main(["--fingerprint", "fp"])
|
||||
assert rc == 0
|
||||
assert calls[0][0] == "restore"
|
||||
assert calls[1][0] == "init"
|
||||
assert calls[0][1] == backup
|
||||
assert calls[0][2] == "fp"
|
52
src/tests/test_bip85_derivation_path.py
Normal file
52
src/tests/test_bip85_derivation_path.py
Normal file
@@ -0,0 +1,52 @@
|
||||
from local_bip85.bip85 import BIP85
|
||||
|
||||
|
||||
class DummyChild:
|
||||
def PrivateKey(self):
|
||||
return self
|
||||
|
||||
def Raw(self):
|
||||
return self
|
||||
|
||||
def ToBytes(self):
|
||||
return b"\x00" * 32
|
||||
|
||||
|
||||
class DummyCtx:
|
||||
def __init__(self):
|
||||
self.last_path = None
|
||||
|
||||
def DerivePath(self, path: str):
|
||||
self.last_path = path
|
||||
return DummyChild()
|
||||
|
||||
|
||||
def test_derivation_paths_for_entropy_lengths():
|
||||
bip85 = BIP85(b"\x00" * 64)
|
||||
ctx = DummyCtx()
|
||||
bip85.bip32_ctx = ctx
|
||||
|
||||
vectors = [
|
||||
(16, 12),
|
||||
(24, 18),
|
||||
(32, 24),
|
||||
]
|
||||
|
||||
for entropy_bytes, word_count in vectors:
|
||||
bip85.derive_entropy(
|
||||
index=0,
|
||||
entropy_bytes=entropy_bytes,
|
||||
app_no=39,
|
||||
word_count=word_count,
|
||||
)
|
||||
assert ctx.last_path == f"m/83696968'/39'/0'/{word_count}'/0'"
|
||||
|
||||
|
||||
def test_default_word_count_from_entropy_bytes():
|
||||
bip85 = BIP85(b"\x00" * 64)
|
||||
ctx = DummyCtx()
|
||||
bip85.bip32_ctx = ctx
|
||||
|
||||
bip85.derive_entropy(index=5, entropy_bytes=20, app_no=39)
|
||||
|
||||
assert ctx.last_path == "m/83696968'/39'/0'/20'/5'"
|
21
src/tests/test_bip85_init.py
Normal file
21
src/tests/test_bip85_init.py
Normal file
@@ -0,0 +1,21 @@
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
sys.path.append(str(Path(__file__).resolve().parents[1]))
|
||||
|
||||
from bip_utils import Bip39SeedGenerator
|
||||
from local_bip85.bip85 import BIP85
|
||||
from helpers import TEST_SEED
|
||||
|
||||
MASTER_XPRV = "xprv9s21ZrQH143K2LBWUUQRFXhucrQqBpKdRRxNVq2zBqsx8HVqFk2uYo8kmbaLLHRdqtQpUm98uKfu3vca1LqdGhUtyoFnCNkfmXRyPXLjbKb"
|
||||
|
||||
|
||||
def test_init_with_seed_bytes():
|
||||
seed_bytes = Bip39SeedGenerator(TEST_SEED).Generate()
|
||||
bip85 = BIP85(seed_bytes)
|
||||
assert isinstance(bip85, BIP85)
|
||||
|
||||
|
||||
def test_init_with_xprv():
|
||||
bip85 = BIP85(MASTER_XPRV)
|
||||
assert isinstance(bip85, BIP85)
|
@@ -16,7 +16,7 @@ from seedpass.core.entry_types import EntryType
|
||||
class DummyPM:
|
||||
def __init__(self):
|
||||
self.entry_manager = SimpleNamespace(
|
||||
list_entries=lambda sort_by="index", filter_kind=None, include_archived=False: [
|
||||
list_entries=lambda sort_by="index", filter_kinds=None, include_archived=False: [
|
||||
(1, "Label", "user", "url", False)
|
||||
],
|
||||
search_entries=lambda q, kinds=None: [
|
||||
@@ -25,7 +25,7 @@ class DummyPM:
|
||||
retrieve_entry=lambda idx: {"type": EntryType.PASSWORD.value, "length": 8},
|
||||
get_totp_code=lambda idx, seed: "123456",
|
||||
add_entry=lambda label, length, username, url, **kwargs: 1,
|
||||
add_totp=lambda label, seed, index=None, secret=None, period=30, digits=6: "totp://",
|
||||
add_totp=lambda label, seed, index=None, secret=None, period=30, digits=6, deterministic=False: "totp://",
|
||||
add_ssh_key=lambda label, seed, index=None, notes="": 2,
|
||||
add_pgp_key=lambda label, seed, index=None, key_type="ed25519", user_id="", notes="": 3,
|
||||
add_nostr_key=lambda label, seed, index=None, notes="": 4,
|
||||
@@ -42,7 +42,7 @@ class DummyPM:
|
||||
)
|
||||
self.parent_seed = "seed"
|
||||
self.handle_display_totp_codes = lambda: None
|
||||
self.handle_export_database = lambda path: None
|
||||
self.handle_export_database = lambda path, **kwargs: None
|
||||
self.handle_import_database = lambda path: None
|
||||
self.change_password = lambda *a, **kw: None
|
||||
self.lock_vault = lambda: None
|
||||
|
@@ -65,8 +65,14 @@ runner = CliRunner()
|
||||
"--digits",
|
||||
"7",
|
||||
],
|
||||
("Label", "seed"),
|
||||
{"index": 1, "secret": "abc", "period": 45, "digits": 7},
|
||||
("Label", None),
|
||||
{
|
||||
"index": 1,
|
||||
"secret": "abc",
|
||||
"period": 45,
|
||||
"digits": 7,
|
||||
"deterministic": False,
|
||||
},
|
||||
"otpauth://uri",
|
||||
),
|
||||
(
|
||||
|
@@ -17,8 +17,8 @@ def _setup_pm(tmp_path: Path):
|
||||
cfg = ConfigManager(vault, tmp_path)
|
||||
backup = BackupManager(tmp_path, cfg)
|
||||
pm = SimpleNamespace(
|
||||
handle_export_database=lambda p: export_backup(
|
||||
vault, backup, p, parent_seed=TEST_SEED
|
||||
handle_export_database=lambda p, encrypt=True: export_backup(
|
||||
vault, backup, p, parent_seed=TEST_SEED, encrypt=encrypt
|
||||
),
|
||||
handle_import_database=lambda p: import_backup(
|
||||
vault, backup, p, parent_seed=TEST_SEED
|
||||
@@ -91,3 +91,36 @@ def test_cli_import_round_trip(monkeypatch, tmp_path):
|
||||
rc = main.main(["import", "--file", str(export_path)])
|
||||
assert rc == 0
|
||||
assert vault.load_index() == original
|
||||
|
||||
|
||||
def test_cli_export_import_unencrypted(monkeypatch, tmp_path):
|
||||
pm, vault = _setup_pm(tmp_path)
|
||||
data = {
|
||||
"schema_version": 4,
|
||||
"entries": {
|
||||
"0": {
|
||||
"label": "example",
|
||||
"type": "password",
|
||||
"notes": "",
|
||||
"custom_fields": [],
|
||||
"origin": "",
|
||||
"tags": [],
|
||||
}
|
||||
},
|
||||
}
|
||||
vault.save_index(data)
|
||||
|
||||
monkeypatch.setattr(main, "PasswordManager", lambda *a, **k: pm)
|
||||
monkeypatch.setattr(main, "configure_logging", lambda: None)
|
||||
monkeypatch.setattr(main, "initialize_app", lambda: None)
|
||||
monkeypatch.setattr(main.signal, "signal", lambda *a, **k: None)
|
||||
|
||||
export_path = tmp_path / "out.json"
|
||||
rc = main.main(["export", "--file", str(export_path), "--unencrypted"])
|
||||
assert rc == 0
|
||||
assert export_path.exists()
|
||||
|
||||
vault.save_index({"schema_version": 4, "entries": {}})
|
||||
rc = main.main(["import", "--file", str(export_path)])
|
||||
assert rc == 0
|
||||
assert vault.load_index() == data
|
||||
|
@@ -21,8 +21,8 @@ class DummyEnc:
|
||||
|
||||
|
||||
class DummyBIP85:
|
||||
def derive_entropy(self, index: int, bytes_len: int, app_no: int = 32) -> bytes:
|
||||
return bytes((index + i) % 256 for i in range(bytes_len))
|
||||
def derive_entropy(self, index: int, entropy_bytes: int, app_no: int = 32) -> bytes:
|
||||
return bytes((index + i) % 256 for i in range(entropy_bytes))
|
||||
|
||||
|
||||
def make_manager(tmp_path: Path) -> PasswordManager:
|
||||
|
@@ -3,11 +3,15 @@ from pathlib import Path
|
||||
|
||||
from hypothesis import given, strategies as st, settings, HealthCheck
|
||||
from mnemonic import Mnemonic
|
||||
import hashlib
|
||||
import base64
|
||||
import os
|
||||
|
||||
from utils.key_derivation import (
|
||||
derive_key_from_password,
|
||||
derive_key_from_password_argon2,
|
||||
derive_index_key,
|
||||
KdfConfig,
|
||||
)
|
||||
from utils.fingerprint import generate_fingerprint
|
||||
from seedpass.core.encryption import EncryptionManager
|
||||
@@ -36,16 +40,27 @@ def test_fuzz_key_round_trip(password, seed_bytes, config, mode, tmp_path: Path)
|
||||
seed_phrase = Mnemonic("english").to_mnemonic(seed_bytes)
|
||||
fp = generate_fingerprint(seed_phrase)
|
||||
if mode == "argon2":
|
||||
key = derive_key_from_password_argon2(
|
||||
password, fp, time_cost=1, memory_cost=8, parallelism=1
|
||||
cfg = KdfConfig(
|
||||
params={"time_cost": 1, "memory_cost": 8, "parallelism": 1},
|
||||
salt_b64=base64.b64encode(
|
||||
hashlib.sha256(fp.encode()).digest()[:16]
|
||||
).decode(),
|
||||
)
|
||||
key = derive_key_from_password_argon2(password, cfg)
|
||||
else:
|
||||
key = derive_key_from_password(password, fp, iterations=1)
|
||||
cfg = KdfConfig(
|
||||
name="pbkdf2",
|
||||
params={"iterations": 1},
|
||||
salt_b64=base64.b64encode(
|
||||
hashlib.sha256(fp.encode()).digest()[:16]
|
||||
).decode(),
|
||||
)
|
||||
|
||||
enc_mgr = EncryptionManager(key, tmp_path)
|
||||
|
||||
# Parent seed round trip
|
||||
enc_mgr.encrypt_parent_seed(seed_phrase)
|
||||
enc_mgr.encrypt_parent_seed(seed_phrase, kdf=cfg)
|
||||
assert enc_mgr.decrypt_parent_seed() == seed_phrase
|
||||
|
||||
# JSON data round trip
|
||||
|
@@ -30,8 +30,8 @@ class DummyEntries:
|
||||
self.data = [(1, "Example", None, None, False)]
|
||||
self.code = "111111"
|
||||
|
||||
def list_entries(self, sort_by="index", filter_kind=None, include_archived=False):
|
||||
if filter_kind:
|
||||
def list_entries(self, sort_by="index", filter_kinds=None, include_archived=False):
|
||||
if filter_kinds:
|
||||
return [(idx, label, None, None, False) for idx, label, *_ in self.data]
|
||||
return self.data
|
||||
|
||||
|
@@ -33,7 +33,9 @@ class FakeEntries:
|
||||
self.added.append(("password", label, length, username, url))
|
||||
return 1
|
||||
|
||||
def add_totp(self, label):
|
||||
def add_totp(
|
||||
self, label, deterministic=False, index=None, secret=None, period=30, digits=6
|
||||
):
|
||||
self.added.append(("totp", label))
|
||||
return 1
|
||||
|
||||
|
@@ -9,7 +9,7 @@ from seedpass_gui.app import MainWindow
|
||||
|
||||
|
||||
class DummyEntries:
|
||||
def list_entries(self, sort_by="index", filter_kind=None, include_archived=False):
|
||||
def list_entries(self, sort_by="index", filter_kinds=None, include_archived=False):
|
||||
return []
|
||||
|
||||
def search_entries(self, q):
|
||||
|
@@ -1,4 +1,6 @@
|
||||
import bcrypt
|
||||
import hashlib
|
||||
import base64
|
||||
from pathlib import Path
|
||||
from tempfile import TemporaryDirectory
|
||||
from types import SimpleNamespace
|
||||
@@ -7,6 +9,7 @@ from utils.key_derivation import (
|
||||
derive_key_from_password,
|
||||
derive_key_from_password_argon2,
|
||||
derive_index_key,
|
||||
KdfConfig,
|
||||
)
|
||||
from seedpass.core.encryption import EncryptionManager
|
||||
from seedpass.core.vault import Vault
|
||||
@@ -21,10 +24,24 @@ def _setup_profile(tmp: Path, mode: str):
|
||||
argon_kwargs = dict(time_cost=1, memory_cost=8, parallelism=1)
|
||||
fp = tmp.name
|
||||
if mode == "argon2":
|
||||
seed_key = derive_key_from_password_argon2(TEST_PASSWORD, fp, **argon_kwargs)
|
||||
cfg = KdfConfig(
|
||||
params=argon_kwargs,
|
||||
salt_b64=base64.b64encode(
|
||||
hashlib.sha256(fp.encode()).digest()[:16]
|
||||
).decode(),
|
||||
)
|
||||
seed_key = derive_key_from_password_argon2(TEST_PASSWORD, cfg)
|
||||
EncryptionManager(seed_key, tmp).encrypt_parent_seed(TEST_SEED, kdf=cfg)
|
||||
else:
|
||||
seed_key = derive_key_from_password(TEST_PASSWORD, fp, iterations=1)
|
||||
EncryptionManager(seed_key, tmp).encrypt_parent_seed(TEST_SEED)
|
||||
cfg = KdfConfig(
|
||||
name="pbkdf2",
|
||||
params={"iterations": 1},
|
||||
salt_b64=base64.b64encode(
|
||||
hashlib.sha256(fp.encode()).digest()[:16]
|
||||
).decode(),
|
||||
)
|
||||
EncryptionManager(seed_key, tmp).encrypt_parent_seed(TEST_SEED, kdf=cfg)
|
||||
|
||||
index_key = derive_index_key(TEST_SEED)
|
||||
enc_mgr = EncryptionManager(index_key, tmp)
|
||||
@@ -65,9 +82,9 @@ def test_setup_encryption_manager_kdf_modes(monkeypatch):
|
||||
)
|
||||
if mode == "argon2":
|
||||
monkeypatch.setattr(
|
||||
"seedpass.core.manager.derive_key_from_password_argon2",
|
||||
lambda pw, fp: derive_key_from_password_argon2(
|
||||
pw, fp, **argon_kwargs
|
||||
"seedpass.core.manager.KdfConfig",
|
||||
lambda salt_b64, **_: KdfConfig(
|
||||
params=argon_kwargs, salt_b64=salt_b64
|
||||
),
|
||||
)
|
||||
monkeypatch.setattr(PasswordManager, "initialize_bip85", lambda self: None)
|
||||
@@ -76,3 +93,26 @@ def test_setup_encryption_manager_kdf_modes(monkeypatch):
|
||||
)
|
||||
assert pm.setup_encryption_manager(path, exit_on_fail=False)
|
||||
assert pm.parent_seed == TEST_SEED
|
||||
|
||||
|
||||
def test_kdf_param_round_trip(tmp_path):
|
||||
cfg = KdfConfig(
|
||||
params={"time_cost": 3, "memory_cost": 32, "parallelism": 1},
|
||||
salt_b64=base64.b64encode(b"static-salt-1234").decode(),
|
||||
)
|
||||
key = derive_key_from_password_argon2(TEST_PASSWORD, cfg)
|
||||
mgr = EncryptionManager(key, tmp_path)
|
||||
mgr.encrypt_parent_seed(TEST_SEED, kdf=cfg)
|
||||
stored = mgr.get_file_kdf(Path("parent_seed.enc"))
|
||||
assert stored.params == cfg.params
|
||||
|
||||
|
||||
def test_vault_kdf_migration(tmp_path):
|
||||
index_key = derive_index_key(TEST_SEED)
|
||||
mgr = EncryptionManager(index_key, tmp_path)
|
||||
vault = Vault(mgr, tmp_path)
|
||||
old_kdf = KdfConfig(name="hkdf", version=0, params={}, salt_b64="")
|
||||
mgr.save_json_data({"entries": {}}, vault.index_file, kdf=old_kdf)
|
||||
vault.load_index()
|
||||
new_kdf = mgr.get_file_kdf(vault.index_file)
|
||||
assert new_kdf.version == KdfConfig().version
|
||||
|
19
src/tests/test_kdf_strength_slider.py
Normal file
19
src/tests/test_kdf_strength_slider.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from pathlib import Path
|
||||
from tempfile import TemporaryDirectory
|
||||
from types import SimpleNamespace
|
||||
|
||||
from helpers import create_vault, TEST_SEED, TEST_PASSWORD
|
||||
from seedpass.core.config_manager import ConfigManager
|
||||
from main import handle_set_kdf_iterations
|
||||
|
||||
|
||||
def test_kdf_strength_slider_persists(monkeypatch):
|
||||
with TemporaryDirectory() as tmpdir:
|
||||
tmp_path = Path(tmpdir)
|
||||
vault, enc_mgr = create_vault(tmp_path, TEST_SEED, TEST_PASSWORD)
|
||||
cfg_mgr = ConfigManager(vault, tmp_path)
|
||||
pm = SimpleNamespace(config_manager=cfg_mgr)
|
||||
inputs = iter(["3"])
|
||||
monkeypatch.setattr("builtins.input", lambda *_: next(inputs))
|
||||
handle_set_kdf_iterations(pm)
|
||||
assert cfg_mgr.get_kdf_iterations() == 100_000
|
@@ -1,11 +1,15 @@
|
||||
import logging
|
||||
import pytest
|
||||
import logging
|
||||
import hashlib
|
||||
import base64
|
||||
from utils.fingerprint import generate_fingerprint
|
||||
from utils.key_derivation import (
|
||||
derive_key_from_password,
|
||||
derive_key_from_password_argon2,
|
||||
derive_index_key_seed_only,
|
||||
derive_index_key,
|
||||
KdfConfig,
|
||||
)
|
||||
|
||||
|
||||
@@ -48,15 +52,17 @@ def test_argon2_fingerprint_affects_key():
|
||||
fp1 = generate_fingerprint("seed one")
|
||||
fp2 = generate_fingerprint("seed two")
|
||||
|
||||
k1 = derive_key_from_password_argon2(
|
||||
password, fp1, time_cost=1, memory_cost=8, parallelism=1
|
||||
cfg1 = KdfConfig(
|
||||
params={"time_cost": 1, "memory_cost": 8, "parallelism": 1},
|
||||
salt_b64=base64.b64encode(hashlib.sha256(fp1.encode()).digest()[:16]).decode(),
|
||||
)
|
||||
k2 = derive_key_from_password_argon2(
|
||||
password, fp1, time_cost=1, memory_cost=8, parallelism=1
|
||||
)
|
||||
k3 = derive_key_from_password_argon2(
|
||||
password, fp2, time_cost=1, memory_cost=8, parallelism=1
|
||||
cfg2 = KdfConfig(
|
||||
params={"time_cost": 1, "memory_cost": 8, "parallelism": 1},
|
||||
salt_b64=base64.b64encode(hashlib.sha256(fp2.encode()).digest()[:16]).decode(),
|
||||
)
|
||||
k1 = derive_key_from_password_argon2(password, cfg1)
|
||||
k2 = derive_key_from_password_argon2(password, cfg1)
|
||||
k3 = derive_key_from_password_argon2(password, cfg2)
|
||||
|
||||
assert k1 == k2
|
||||
assert k1 != k3
|
||||
|
19
src/tests/test_key_hierarchy.py
Normal file
19
src/tests/test_key_hierarchy.py
Normal file
@@ -0,0 +1,19 @@
|
||||
import base64
|
||||
from bip_utils import Bip39SeedGenerator
|
||||
from utils.key_hierarchy import kd
|
||||
from utils.key_derivation import derive_index_key
|
||||
|
||||
|
||||
def test_kd_distinct_infos():
|
||||
root = b"root" * 8
|
||||
k1 = kd(root, b"info1")
|
||||
k2 = kd(root, b"info2")
|
||||
assert k1 != k2
|
||||
|
||||
|
||||
def test_derive_index_key_matches_hierarchy():
|
||||
seed = "abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon about"
|
||||
seed_bytes = Bip39SeedGenerator(seed).Generate()
|
||||
master = kd(seed_bytes, b"seedpass:v1:master")
|
||||
expected = base64.urlsafe_b64encode(kd(master, b"seedpass:v1:storage"))
|
||||
assert derive_index_key(seed) == expected
|
@@ -37,10 +37,30 @@ def test_add_and_modify_key_value():
|
||||
"tags": [],
|
||||
}
|
||||
|
||||
# Appears in listing
|
||||
assert em.list_entries() == [(idx, "API entry", None, None, False)]
|
||||
|
||||
# Modify key and value
|
||||
em.modify_entry(idx, key="api_key2", value="def456")
|
||||
updated = em.retrieve_entry(idx)
|
||||
assert updated["key"] == "api_key2"
|
||||
assert updated["value"] == "def456"
|
||||
|
||||
# Archive and ensure it disappears from the default listing
|
||||
em.archive_entry(idx)
|
||||
archived = em.retrieve_entry(idx)
|
||||
assert archived["archived"] is True
|
||||
assert em.list_entries() == []
|
||||
assert em.list_entries(include_archived=True) == [
|
||||
(idx, "API entry", None, None, True)
|
||||
]
|
||||
|
||||
# Restore and ensure it reappears
|
||||
em.restore_entry(idx)
|
||||
restored = em.retrieve_entry(idx)
|
||||
assert restored["archived"] is False
|
||||
assert em.list_entries() == [(idx, "API entry", None, None, False)]
|
||||
|
||||
# Values are not searchable
|
||||
results = em.search_entries("def456")
|
||||
assert results == []
|
||||
|
@@ -1,4 +1,5 @@
|
||||
import json
|
||||
import base64
|
||||
import hashlib
|
||||
from pathlib import Path
|
||||
|
||||
@@ -82,7 +83,7 @@ def test_failed_migration_restores_legacy(monkeypatch, tmp_path: Path):
|
||||
assert not vault.migrated_from_legacy
|
||||
|
||||
|
||||
def test_migrated_index_has_v2_prefix(monkeypatch, tmp_path: Path):
|
||||
def test_migrated_index_has_v3_prefix(monkeypatch, tmp_path: Path):
|
||||
vault, _ = create_vault(tmp_path, TEST_SEED, TEST_PASSWORD)
|
||||
|
||||
key = derive_index_key(TEST_SEED)
|
||||
@@ -99,7 +100,8 @@ def test_migrated_index_has_v2_prefix(monkeypatch, tmp_path: Path):
|
||||
vault.load_index()
|
||||
|
||||
new_file = tmp_path / "seedpass_entries_db.json.enc"
|
||||
assert new_file.read_bytes().startswith(b"V2:")
|
||||
payload = json.loads(new_file.read_text())
|
||||
assert base64.b64decode(payload["ct"]).startswith(b"V3|")
|
||||
assert vault.migrated_from_legacy
|
||||
|
||||
|
||||
@@ -154,6 +156,14 @@ def test_migration_syncs_when_confirmed(monkeypatch, tmp_path: Path):
|
||||
pm.fingerprint_dir = tmp_path
|
||||
pm.current_fingerprint = tmp_path.name
|
||||
pm.bip85 = SimpleNamespace()
|
||||
from seedpass.core.config_manager import ConfigManager
|
||||
|
||||
cfg_mgr = ConfigManager(pm.vault, tmp_path)
|
||||
cfg = cfg_mgr.load_config(require_pin=False)
|
||||
cfg["offline_mode"] = False
|
||||
cfg_mgr.save_config(cfg)
|
||||
pm.config_manager = cfg_mgr
|
||||
pm.offline_mode = False
|
||||
|
||||
calls = {"sync": 0}
|
||||
pm.sync_vault = lambda *a, **k: calls.__setitem__("sync", calls["sync"] + 1) or {
|
||||
@@ -277,6 +287,7 @@ def test_legacy_index_reinit_syncs_once_when_confirmed(monkeypatch, tmp_path: Pa
|
||||
pm.fingerprint_dir = tmp_path
|
||||
pm.current_fingerprint = tmp_path.name
|
||||
pm.bip85 = SimpleNamespace()
|
||||
pm.offline_mode = True
|
||||
|
||||
monkeypatch.setattr(
|
||||
"seedpass.core.manager.NostrClient", lambda *a, **k: SimpleNamespace()
|
||||
@@ -294,7 +305,7 @@ def test_legacy_index_reinit_syncs_once_when_confirmed(monkeypatch, tmp_path: Pa
|
||||
pm.initialize_managers()
|
||||
pm.initialize_managers()
|
||||
|
||||
assert calls["sync"] == 1
|
||||
assert calls["sync"] == 0
|
||||
assert enc_mgr.last_migration_performed is False
|
||||
|
||||
|
||||
@@ -314,6 +325,13 @@ def test_schema_migration_no_sync_prompt(monkeypatch, tmp_path: Path):
|
||||
pm.fingerprint_dir = tmp_path
|
||||
pm.current_fingerprint = tmp_path.name
|
||||
pm.bip85 = SimpleNamespace()
|
||||
from seedpass.core.config_manager import ConfigManager
|
||||
|
||||
cfg_mgr = ConfigManager(pm.vault, tmp_path)
|
||||
cfg = cfg_mgr.load_config(require_pin=False)
|
||||
cfg["offline_mode"] = False
|
||||
cfg_mgr.save_config(cfg)
|
||||
pm.config_manager = cfg_mgr
|
||||
pm.offline_mode = False
|
||||
|
||||
calls = {"sync": 0, "confirm": 0}
|
||||
|
@@ -66,5 +66,5 @@ def test_migrate_iterations(tmp_path, monkeypatch, iterations):
|
||||
cfg = ConfigManager(vault, tmp_path)
|
||||
assert cfg.get_kdf_iterations() == iterations
|
||||
|
||||
content = (tmp_path / "seedpass_entries_db.json.enc").read_bytes()
|
||||
assert content.startswith(b"V2:")
|
||||
payload = json.loads((tmp_path / "seedpass_entries_db.json.enc").read_text())
|
||||
assert base64.b64decode(payload["ct"]).startswith(b"V3|")
|
||||
|
@@ -50,6 +50,6 @@ def test_migrate_legacy_sets_flag(tmp_path, monkeypatch):
|
||||
monkeypatch.setattr(vault_module, "prompt_existing_password", lambda _: password)
|
||||
monkeypatch.setattr("builtins.input", lambda _: "2")
|
||||
vault.load_index()
|
||||
content = (tmp_path / "seedpass_entries_db.json.enc").read_bytes()
|
||||
assert content.startswith(b"V2:")
|
||||
payload = json.loads((tmp_path / "seedpass_entries_db.json.enc").read_text())
|
||||
assert base64.b64decode(payload["ct"]).startswith(b"V3|")
|
||||
assert vault.encryption_manager.last_migration_performed is True
|
||||
|
@@ -1,4 +1,5 @@
|
||||
import json
|
||||
import base64
|
||||
import hashlib
|
||||
from pathlib import Path
|
||||
from types import SimpleNamespace
|
||||
@@ -34,7 +35,8 @@ def test_legacy_migration_second_session(monkeypatch, tmp_path: Path) -> None:
|
||||
monkeypatch.setattr("builtins.input", lambda *_a, **_k: "y")
|
||||
vault.load_index()
|
||||
new_file = fp_dir / "seedpass_entries_db.json.enc"
|
||||
assert new_file.read_bytes().startswith(b"V2:")
|
||||
payload = json.loads(new_file.read_text())
|
||||
assert base64.b64decode(payload["ct"]).startswith(b"V3|")
|
||||
|
||||
new_enc_mgr = EncryptionManager(key, fp_dir)
|
||||
new_vault = Vault(new_enc_mgr, fp_dir)
|
||||
@@ -59,4 +61,5 @@ def test_legacy_migration_second_session(monkeypatch, tmp_path: Path) -> None:
|
||||
)
|
||||
|
||||
pm.initialize_managers()
|
||||
assert new_file.read_bytes().startswith(b"V2:")
|
||||
payload = json.loads(new_file.read_text())
|
||||
assert base64.b64decode(payload["ct"]).startswith(b"V3|")
|
||||
|
85
src/tests/test_list_entries_all_types.py
Normal file
85
src/tests/test_list_entries_all_types.py
Normal file
@@ -0,0 +1,85 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from tempfile import TemporaryDirectory
|
||||
from types import SimpleNamespace
|
||||
|
||||
from typer.testing import CliRunner
|
||||
|
||||
from seedpass.cli import app as cli_app
|
||||
from seedpass.cli import entry as entry_cli
|
||||
from helpers import create_vault, TEST_SEED, TEST_PASSWORD
|
||||
from seedpass.core.backup import BackupManager
|
||||
from seedpass.core.config_manager import ConfigManager
|
||||
from seedpass.core.entry_management import EntryManager
|
||||
from seedpass.core.manager import PasswordManager, EncryptionMode
|
||||
|
||||
|
||||
def _setup_manager(tmp_path: Path) -> tuple[PasswordManager, EntryManager]:
|
||||
vault, enc_mgr = create_vault(tmp_path, TEST_SEED, TEST_PASSWORD)
|
||||
cfg_mgr = ConfigManager(vault, tmp_path)
|
||||
backup_mgr = BackupManager(tmp_path, cfg_mgr)
|
||||
entry_mgr = EntryManager(vault, backup_mgr)
|
||||
|
||||
pm = PasswordManager.__new__(PasswordManager)
|
||||
pm.encryption_mode = EncryptionMode.SEED_ONLY
|
||||
pm.encryption_manager = enc_mgr
|
||||
pm.vault = vault
|
||||
pm.entry_manager = entry_mgr
|
||||
pm.backup_manager = backup_mgr
|
||||
pm.parent_seed = TEST_SEED
|
||||
pm.nostr_client = SimpleNamespace()
|
||||
pm.fingerprint_dir = tmp_path
|
||||
pm.secret_mode_enabled = False
|
||||
return pm, entry_mgr
|
||||
|
||||
|
||||
def _create_all_entries(em: EntryManager) -> None:
|
||||
em.add_entry("pw", 8)
|
||||
em.add_totp("totp", TEST_SEED)
|
||||
em.add_ssh_key("ssh", TEST_SEED)
|
||||
em.add_seed("seed", TEST_SEED, words_num=12)
|
||||
em.add_nostr_key("nostr", TEST_SEED)
|
||||
em.add_pgp_key("pgp", TEST_SEED)
|
||||
em.add_key_value("kv", "k", "v")
|
||||
em.add_managed_account("acct", TEST_SEED)
|
||||
|
||||
|
||||
def test_cli_list_all_types(monkeypatch):
|
||||
with TemporaryDirectory() as tmpdir:
|
||||
tmp_path = Path(tmpdir)
|
||||
pm, em = _setup_manager(tmp_path)
|
||||
_create_all_entries(em)
|
||||
|
||||
def fake_get_entry_service(_ctx):
|
||||
return SimpleNamespace(
|
||||
list_entries=lambda sort_by, filter_kinds, include_archived: pm.entry_manager.list_entries(
|
||||
sort_by=sort_by,
|
||||
filter_kinds=filter_kinds,
|
||||
include_archived=include_archived,
|
||||
)
|
||||
)
|
||||
|
||||
monkeypatch.setattr(entry_cli, "_get_entry_service", fake_get_entry_service)
|
||||
|
||||
runner = CliRunner()
|
||||
result = runner.invoke(cli_app, ["entry", "list"])
|
||||
assert result.exit_code == 0
|
||||
out = result.stdout
|
||||
for label in ["pw", "totp", "ssh", "seed", "nostr", "pgp", "kv", "acct"]:
|
||||
assert label in out
|
||||
|
||||
|
||||
def test_menu_list_all_types(monkeypatch, capsys):
|
||||
with TemporaryDirectory() as tmpdir:
|
||||
tmp_path = Path(tmpdir)
|
||||
pm, em = _setup_manager(tmp_path)
|
||||
_create_all_entries(em)
|
||||
|
||||
inputs = iter(["1", "", ""]) # choose All then exit
|
||||
monkeypatch.setattr("builtins.input", lambda *_: next(inputs))
|
||||
|
||||
pm.handle_list_entries()
|
||||
out = capsys.readouterr().out
|
||||
for label in ["pw", "totp", "ssh", "seed", "nostr", "pgp", "kv", "acct"]:
|
||||
assert label in out
|
@@ -57,5 +57,5 @@ def test_filter_by_type():
|
||||
em = setup_entry_manager(tmp_path)
|
||||
em.add_entry("site", 8, "user")
|
||||
em.add_totp("Example", TEST_SEED)
|
||||
result = em.list_entries(filter_kind=EntryType.TOTP.value)
|
||||
result = em.list_entries(filter_kinds=[EntryType.TOTP.value])
|
||||
assert result == [(1, "Example", None, None, False)]
|
||||
|
@@ -41,6 +41,9 @@ def test_add_and_get_managed_account_seed():
|
||||
assert fp
|
||||
assert (tmp_path / "accounts" / fp).exists()
|
||||
|
||||
# Appears in listing
|
||||
assert mgr.list_entries() == [(idx, "acct", None, None, False)]
|
||||
|
||||
phrase_a = mgr.get_managed_account_seed(idx, TEST_SEED)
|
||||
phrase_b = mgr.get_managed_account_seed(idx, TEST_SEED)
|
||||
assert phrase_a == phrase_b
|
||||
@@ -51,6 +54,23 @@ def test_add_and_get_managed_account_seed():
|
||||
assert phrase_a == expected
|
||||
assert generate_fingerprint(phrase_a) == fp
|
||||
|
||||
# Archive and ensure it disappears from default listing
|
||||
mgr.archive_entry(idx)
|
||||
archived = mgr.retrieve_entry(idx)
|
||||
assert archived["archived"] is True
|
||||
assert mgr.list_entries() == []
|
||||
assert mgr.list_entries(include_archived=True) == [
|
||||
(idx, "acct", None, None, True)
|
||||
]
|
||||
|
||||
# Restore and ensure deterministic derivation is unchanged
|
||||
mgr.restore_entry(idx)
|
||||
restored = mgr.retrieve_entry(idx)
|
||||
assert restored["archived"] is False
|
||||
assert mgr.list_entries() == [(idx, "acct", None, None, False)]
|
||||
phrase_c = mgr.get_managed_account_seed(idx, TEST_SEED)
|
||||
assert phrase_c == expected
|
||||
|
||||
|
||||
def test_load_and_exit_managed_account(monkeypatch):
|
||||
with TemporaryDirectory() as tmpdir:
|
||||
|
@@ -60,15 +60,11 @@ def test_handle_add_totp(monkeypatch, capsys):
|
||||
out = capsys.readouterr().out
|
||||
|
||||
entry = entry_mgr.retrieve_entry(0)
|
||||
assert entry == {
|
||||
"type": "totp",
|
||||
"kind": "totp",
|
||||
"label": "Example",
|
||||
"index": 0,
|
||||
"period": 30,
|
||||
"digits": 6,
|
||||
"archived": False,
|
||||
"notes": "",
|
||||
"tags": [],
|
||||
}
|
||||
assert entry["type"] == "totp"
|
||||
assert entry["kind"] == "totp"
|
||||
assert entry["label"] == "Example"
|
||||
assert entry["deterministic"] is False
|
||||
assert "index" not in entry
|
||||
assert "secret" in entry
|
||||
assert len(entry["secret"]) >= 16
|
||||
assert "ID 0" in out
|
||||
|
@@ -32,7 +32,7 @@ def test_handle_display_totp_codes(monkeypatch, capsys, password_manager):
|
||||
|
||||
pm.handle_display_totp_codes()
|
||||
out = capsys.readouterr().out
|
||||
assert "Generated 2FA Codes" in out
|
||||
assert "Imported 2FA Codes" in out
|
||||
assert "[0] Example" in out
|
||||
assert "123456" in out
|
||||
|
||||
|
18
src/tests/test_manifest_id_privacy.py
Normal file
18
src/tests/test_manifest_id_privacy.py
Normal file
@@ -0,0 +1,18 @@
|
||||
import asyncio
|
||||
|
||||
from helpers import dummy_nostr_client
|
||||
|
||||
|
||||
def test_published_events_no_fingerprint(dummy_nostr_client):
|
||||
client, relay = dummy_nostr_client
|
||||
asyncio.run(client.publish_snapshot(b"secret"))
|
||||
fingerprint = "fp"
|
||||
events = list(relay.manifests) + list(relay.chunks.values())
|
||||
seen = set()
|
||||
for ev in events:
|
||||
if id(ev) in seen:
|
||||
continue
|
||||
seen.add(id(ev))
|
||||
assert fingerprint not in ev.id
|
||||
for tag in getattr(ev, "tags", []):
|
||||
assert fingerprint not in tag
|
@@ -5,6 +5,7 @@ from tempfile import TemporaryDirectory
|
||||
from seedpass.core.manager import PasswordManager
|
||||
from utils.fingerprint_manager import FingerprintManager
|
||||
from utils.fingerprint import generate_fingerprint
|
||||
from seedpass.core.state_manager import StateManager
|
||||
|
||||
VALID_SEED = "abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon about"
|
||||
|
||||
@@ -13,6 +14,7 @@ def setup_pm(tmp_path, monkeypatch):
|
||||
pm = PasswordManager.__new__(PasswordManager)
|
||||
pm.fingerprint_manager = FingerprintManager(tmp_path)
|
||||
pm.config_manager = type("Cfg", (), {"get_kdf_iterations": lambda self: 1})()
|
||||
pm.state_manager = StateManager(tmp_path)
|
||||
monkeypatch.setattr("seedpass.core.manager.prompt_for_password", lambda: "pw")
|
||||
monkeypatch.setattr("seedpass.core.manager.derive_index_key", lambda seed: b"idx")
|
||||
monkeypatch.setattr(
|
||||
@@ -49,3 +51,5 @@ def test_generate_new_seed_creates_profile(monkeypatch):
|
||||
|
||||
assert fingerprint == generate_fingerprint(VALID_SEED)
|
||||
assert pm.fingerprint_manager.list_fingerprints() == [fingerprint]
|
||||
sm = StateManager(tmp_path / fingerprint)
|
||||
assert sm.state["nostr_account_idx"] == 1
|
||||
|
19
src/tests/test_nonce_uniqueness.py
Normal file
19
src/tests/test_nonce_uniqueness.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from pathlib import Path
|
||||
|
||||
from helpers import TEST_SEED
|
||||
from utils.key_derivation import derive_index_key
|
||||
from seedpass.core.encryption import EncryptionManager
|
||||
|
||||
|
||||
def test_nonce_uniqueness(tmp_path: Path) -> None:
|
||||
key = derive_index_key(TEST_SEED)
|
||||
manager = EncryptionManager(key, tmp_path)
|
||||
plaintext = b"repeat"
|
||||
nonces = set()
|
||||
for _ in range(10):
|
||||
payload = manager.encrypt_data(plaintext)
|
||||
assert payload.startswith(b"V3|")
|
||||
nonce = payload[3:15]
|
||||
assert nonce not in nonces
|
||||
nonces.add(nonce)
|
||||
assert len(nonces) == 10
|
@@ -5,7 +5,6 @@ import json
|
||||
|
||||
from helpers import DummyEvent, DummyFilter, dummy_nostr_client
|
||||
from nostr.backup_models import KIND_MANIFEST, KIND_SNAPSHOT_CHUNK
|
||||
from nostr.client import MANIFEST_ID_PREFIX
|
||||
from nostr_sdk import Keys
|
||||
|
||||
|
||||
@@ -55,9 +54,7 @@ def test_fetch_snapshot_legacy_key_fallback(dummy_nostr_client, monkeypatch):
|
||||
],
|
||||
}
|
||||
)
|
||||
manifest_event = DummyEvent(
|
||||
KIND_MANIFEST, manifest_json, tags=[f"{MANIFEST_ID_PREFIX}fp"]
|
||||
)
|
||||
manifest_event = DummyEvent(KIND_MANIFEST, manifest_json, tags=["legacy"])
|
||||
chunk_event = DummyEvent(
|
||||
KIND_SNAPSHOT_CHUNK,
|
||||
base64.b64encode(chunk_bytes).decode("utf-8"),
|
||||
@@ -69,9 +66,9 @@ def test_fetch_snapshot_legacy_key_fallback(dummy_nostr_client, monkeypatch):
|
||||
async def fake_fetch_events(f, _timeout):
|
||||
call["count"] += 1
|
||||
call["authors"].append(getattr(f, "author_pk", None))
|
||||
if call["count"] <= 2:
|
||||
if call["count"] == 1:
|
||||
return type("R", (), {"to_vec": lambda self: []})()
|
||||
elif call["count"] == 3:
|
||||
elif call["count"] == 2:
|
||||
return type("R", (), {"to_vec": lambda self: [manifest_event]})()
|
||||
else:
|
||||
return type("R", (), {"to_vec": lambda self: [chunk_event]})()
|
||||
|
@@ -1,49 +0,0 @@
|
||||
import asyncio
|
||||
|
||||
from helpers import TEST_SEED, dummy_nostr_client
|
||||
from nostr.backup_models import KIND_MANIFEST
|
||||
from nostr.client import MANIFEST_ID_PREFIX, NostrClient
|
||||
|
||||
|
||||
def test_fetch_latest_snapshot_legacy_identifier(dummy_nostr_client, monkeypatch):
|
||||
client, relay = dummy_nostr_client
|
||||
data = b"legacy"
|
||||
asyncio.run(client.publish_snapshot(data))
|
||||
relay.manifests[-1].tags = [MANIFEST_ID_PREFIX.rstrip("-")]
|
||||
relay.filters.clear()
|
||||
|
||||
orig_fetch = relay.fetch_events
|
||||
|
||||
async def fetch_events(self, f, timeout):
|
||||
identifier = f.ids[0] if getattr(f, "ids", None) else None
|
||||
kind = getattr(f, "kind_val", None)
|
||||
if kind == KIND_MANIFEST:
|
||||
events = [m for m in self.manifests if identifier in m.tags]
|
||||
self.filters.append(f)
|
||||
|
||||
class Res:
|
||||
def __init__(self, evs):
|
||||
self._evs = evs
|
||||
|
||||
def to_vec(self):
|
||||
return self._evs
|
||||
|
||||
return Res(events)
|
||||
return await orig_fetch(f, timeout)
|
||||
|
||||
monkeypatch.setattr(
|
||||
relay, "fetch_events", fetch_events.__get__(relay, relay.__class__)
|
||||
)
|
||||
|
||||
enc_mgr = client.encryption_manager
|
||||
monkeypatch.setattr(
|
||||
enc_mgr, "decrypt_parent_seed", lambda: TEST_SEED, raising=False
|
||||
)
|
||||
monkeypatch.setattr("nostr.client.KeyManager", type(client.key_manager))
|
||||
client2 = NostrClient(enc_mgr, "fp")
|
||||
relay.filters.clear()
|
||||
result = asyncio.run(client2.fetch_latest_snapshot())
|
||||
assert result is not None
|
||||
ids = [f.ids[0] for f in relay.filters]
|
||||
assert ids[0] == f"{MANIFEST_ID_PREFIX}fp"
|
||||
assert MANIFEST_ID_PREFIX.rstrip("-") in ids
|
14
src/tests/test_offline_mode_default_enabled.py
Normal file
14
src/tests/test_offline_mode_default_enabled.py
Normal file
@@ -0,0 +1,14 @@
|
||||
from pathlib import Path
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
from seedpass.core.config_manager import ConfigManager
|
||||
from helpers import create_vault, TEST_SEED, TEST_PASSWORD
|
||||
|
||||
|
||||
def test_offline_mode_default_enabled():
|
||||
with TemporaryDirectory() as tmpdir:
|
||||
tmp_path = Path(tmpdir)
|
||||
vault, enc_mgr = create_vault(tmp_path, TEST_SEED, TEST_PASSWORD)
|
||||
cfg_mgr = ConfigManager(vault, tmp_path)
|
||||
config = cfg_mgr.load_config(require_pin=False)
|
||||
assert config["offline_mode"] is True
|
@@ -35,6 +35,7 @@ def test_change_password_triggers_nostr_backup(monkeypatch):
|
||||
pm.parent_seed = TEST_SEED
|
||||
pm.store_hashed_password = lambda pw: None
|
||||
pm.verify_password = lambda pw: True
|
||||
pm.nostr_account_idx = 0
|
||||
|
||||
with patch("seedpass.core.manager.NostrClient") as MockClient:
|
||||
mock_instance = MockClient.return_value
|
||||
|
@@ -13,8 +13,8 @@ class DummyEnc:
|
||||
|
||||
|
||||
class DummyBIP85:
|
||||
def derive_entropy(self, index: int, bytes_len: int, app_no: int = 32) -> bytes:
|
||||
return bytes((index + i) % 256 for i in range(bytes_len))
|
||||
def derive_entropy(self, index: int, entropy_bytes: int, app_no: int = 32) -> bytes:
|
||||
return bytes((index + i) % 256 for i in range(entropy_bytes))
|
||||
|
||||
|
||||
def make_generator(policy=None):
|
||||
|
@@ -8,8 +8,8 @@ class DummyEnc:
|
||||
|
||||
|
||||
class DummyBIP85:
|
||||
def derive_entropy(self, index: int, bytes_len: int, app_no: int = 32) -> bytes:
|
||||
return bytes((index + i) % 256 for i in range(bytes_len))
|
||||
def derive_entropy(self, index: int, entropy_bytes: int, app_no: int = 32) -> bytes:
|
||||
return bytes((index + i) % 256 for i in range(entropy_bytes))
|
||||
|
||||
|
||||
def make_generator():
|
||||
|
@@ -14,8 +14,8 @@ class DummyEnc:
|
||||
|
||||
|
||||
class DummyBIP85:
|
||||
def derive_entropy(self, index: int, bytes_len: int, app_no: int = 32) -> bytes:
|
||||
return bytes((index + i) % 256 for i in range(bytes_len))
|
||||
def derive_entropy(self, index: int, entropy_bytes: int, app_no: int = 32) -> bytes:
|
||||
return bytes((index + i) % 256 for i in range(entropy_bytes))
|
||||
|
||||
|
||||
def make_generator():
|
||||
|
@@ -15,8 +15,8 @@ class DummyEnc:
|
||||
|
||||
|
||||
class DummyBIP85:
|
||||
def derive_entropy(self, index: int, bytes_len: int, app_no: int = 32) -> bytes:
|
||||
return bytes((index + i) % 256 for i in range(bytes_len))
|
||||
def derive_entropy(self, index: int, entropy_bytes: int, app_no: int = 32) -> bytes:
|
||||
return bytes((index + i) % 256 for i in range(entropy_bytes))
|
||||
|
||||
|
||||
def make_generator():
|
||||
|
@@ -12,8 +12,8 @@ class DummyEnc:
|
||||
|
||||
|
||||
class DummyBIP85:
|
||||
def derive_entropy(self, index: int, bytes_len: int, app_no: int = 32) -> bytes:
|
||||
return bytes((index + i) % 256 for i in range(bytes_len))
|
||||
def derive_entropy(self, index: int, entropy_bytes: int, app_no: int = 32) -> bytes:
|
||||
return bytes((index + i) % 256 for i in range(entropy_bytes))
|
||||
|
||||
|
||||
def make_generator():
|
||||
|
@@ -15,8 +15,8 @@ class DummyEnc:
|
||||
|
||||
|
||||
class DummyBIP85:
|
||||
def derive_entropy(self, index: int, bytes_len: int, app_no: int = 32) -> bytes:
|
||||
return bytes((index + i) % 256 for i in range(bytes_len))
|
||||
def derive_entropy(self, index: int, entropy_bytes: int, app_no: int = 32) -> bytes:
|
||||
return bytes((index + i) % 256 for i in range(entropy_bytes))
|
||||
|
||||
|
||||
def make_generator(policy=None):
|
||||
|
@@ -14,8 +14,8 @@ class DummyEnc:
|
||||
|
||||
|
||||
class DummyBIP85:
|
||||
def derive_entropy(self, index: int, bytes_len: int, app_no: int = 32) -> bytes:
|
||||
return bytes((index + i) % 256 for i in range(bytes_len))
|
||||
def derive_entropy(self, index: int, entropy_bytes: int, app_no: int = 32) -> bytes:
|
||||
return bytes((index + i) % 256 for i in range(entropy_bytes))
|
||||
|
||||
|
||||
def make_generator(policy=None):
|
||||
|
@@ -62,6 +62,7 @@ def test_password_change_and_unlock(monkeypatch):
|
||||
pm.nostr_client = SimpleNamespace(
|
||||
publish_snapshot=lambda *a, **k: (None, "abcd")
|
||||
)
|
||||
pm.nostr_account_idx = 0
|
||||
|
||||
monkeypatch.setattr(
|
||||
"seedpass.core.manager.prompt_existing_password", lambda *_: old_pw
|
||||
|
@@ -15,6 +15,7 @@ from seedpass.core.vault import Vault
|
||||
from seedpass.core.backup import BackupManager
|
||||
from seedpass.core.config_manager import ConfigManager
|
||||
from seedpass.core.portable_backup import export_backup, import_backup
|
||||
from seedpass.core.portable_backup import PortableMode
|
||||
from utils.key_derivation import derive_index_key, derive_key_from_password
|
||||
from utils.fingerprint import generate_fingerprint
|
||||
|
||||
@@ -54,6 +55,22 @@ def test_round_trip(monkeypatch):
|
||||
assert vault.load_index()["pw"] == data["pw"]
|
||||
|
||||
|
||||
def test_round_trip_unencrypted(monkeypatch):
|
||||
with TemporaryDirectory() as td:
|
||||
tmp = Path(td)
|
||||
vault, backup, _ = setup_vault(tmp)
|
||||
data = {"pw": 1}
|
||||
vault.save_index(data)
|
||||
|
||||
path = export_backup(vault, backup, parent_seed=SEED, encrypt=False)
|
||||
wrapper = json.loads(path.read_text())
|
||||
assert wrapper["encryption_mode"] == PortableMode.NONE.value
|
||||
|
||||
vault.save_index({"pw": 0})
|
||||
import_backup(vault, backup, path, parent_seed=SEED)
|
||||
assert vault.load_index()["pw"] == data["pw"]
|
||||
|
||||
|
||||
from cryptography.fernet import InvalidToken
|
||||
|
||||
|
||||
|
@@ -20,6 +20,7 @@ def setup_pm(tmp_path):
|
||||
pm.encryption_mode = manager_module.EncryptionMode.SEED_ONLY
|
||||
pm.fingerprint_manager = manager_module.FingerprintManager(constants.APP_DIR)
|
||||
pm.current_fingerprint = None
|
||||
pm.state_manager = manager_module.StateManager(constants.APP_DIR)
|
||||
return pm, constants, manager_module
|
||||
|
||||
|
||||
@@ -41,8 +42,8 @@ def test_generate_seed_cleanup_on_failure(monkeypatch):
|
||||
|
||||
# fingerprint list should be empty and only fingerprints.json should remain
|
||||
assert pm.fingerprint_manager.list_fingerprints() == []
|
||||
contents = list(const.APP_DIR.iterdir())
|
||||
assert len(contents) == 1 and contents[0].name == "fingerprints.json"
|
||||
contents = sorted(p.name for p in const.APP_DIR.iterdir())
|
||||
assert contents == ["fingerprints.json", "seedpass_state.json"]
|
||||
fp_file = pm.fingerprint_manager.fingerprints_file
|
||||
with open(fp_file) as f:
|
||||
data = json.load(f)
|
||||
|
@@ -29,6 +29,7 @@ def test_add_and_switch_fingerprint(monkeypatch):
|
||||
pm.fingerprint_manager = fm
|
||||
pm.encryption_manager = object()
|
||||
pm.current_fingerprint = None
|
||||
pm.nostr_account_idx = 0
|
||||
|
||||
monkeypatch.setattr("builtins.input", lambda *_args, **_kwargs: "1")
|
||||
monkeypatch.setattr(
|
||||
|
@@ -82,9 +82,11 @@ def test_publish_snapshot_success():
|
||||
with patch.object(
|
||||
client.client, "send_event", side_effect=fake_send
|
||||
) as mock_send:
|
||||
manifest, event_id = asyncio.run(client.publish_snapshot(b"data"))
|
||||
with patch("nostr.snapshot.new_manifest_id", return_value=("id", b"nonce")):
|
||||
manifest, event_id = asyncio.run(client.publish_snapshot(b"data"))
|
||||
assert isinstance(manifest, Manifest)
|
||||
assert event_id == "seedpass-manifest-fp"
|
||||
assert event_id == "id"
|
||||
assert manifest.nonce == base64.b64encode(b"nonce").decode("utf-8")
|
||||
assert mock_send.await_count >= 1
|
||||
|
||||
|
||||
|
@@ -21,6 +21,7 @@ def setup_password_manager():
|
||||
pm.fingerprint_manager = manager_module.FingerprintManager(constants.APP_DIR)
|
||||
pm.current_fingerprint = None
|
||||
pm.save_and_encrypt_seed = lambda seed, fingerprint_dir: None
|
||||
pm.state_manager = manager_module.StateManager(constants.APP_DIR)
|
||||
return pm, constants
|
||||
|
||||
|
||||
|
@@ -1,4 +1,6 @@
|
||||
import sys
|
||||
import json
|
||||
import base64
|
||||
from pathlib import Path
|
||||
from cryptography.fernet import Fernet
|
||||
|
||||
@@ -28,4 +30,5 @@ def test_parent_seed_migrates_from_fernet(tmp_path: Path) -> None:
|
||||
|
||||
assert new_file.exists()
|
||||
assert new_file.read_bytes() != encrypted
|
||||
assert new_file.read_bytes().startswith(b"V2:")
|
||||
payload = json.loads(new_file.read_text())
|
||||
assert base64.b64decode(payload["ct"]).startswith(b"V3|")
|
||||
|
93
src/tests/test_seed_word_by_word_flow.py
Normal file
93
src/tests/test_seed_word_by_word_flow.py
Normal file
@@ -0,0 +1,93 @@
|
||||
import builtins
|
||||
from types import SimpleNamespace
|
||||
|
||||
import pytest
|
||||
|
||||
import seedpass.core.manager as manager_module
|
||||
from helpers import TEST_SEED
|
||||
from utils import seed_prompt
|
||||
|
||||
|
||||
def test_prompt_seed_words_confirmation_loop(monkeypatch):
|
||||
phrase = TEST_SEED
|
||||
words = phrase.split()
|
||||
inputs = iter(words + [words[2]])
|
||||
confirmations = iter(["y", "y", "n", "y"] + ["y"] * (len(words) - 3))
|
||||
|
||||
monkeypatch.setattr(seed_prompt, "masked_input", lambda *_: next(inputs))
|
||||
monkeypatch.setattr(seed_prompt, "_apply_backoff", lambda *_a, **_k: None)
|
||||
monkeypatch.setattr(seed_prompt, "clear_screen", lambda *_a, **_k: None)
|
||||
monkeypatch.setattr(builtins, "input", lambda *_: next(confirmations))
|
||||
|
||||
result = seed_prompt.prompt_seed_words(len(words))
|
||||
assert result == phrase
|
||||
|
||||
|
||||
def test_prompt_seed_words_invalid_word(monkeypatch):
|
||||
phrase = TEST_SEED
|
||||
words = phrase.split()
|
||||
inputs = iter(["invalid"] + words)
|
||||
confirmations = iter(["y"] * len(words))
|
||||
|
||||
monkeypatch.setattr(seed_prompt, "masked_input", lambda *_: next(inputs))
|
||||
monkeypatch.setattr(seed_prompt, "_apply_backoff", lambda *_a, **_k: None)
|
||||
monkeypatch.setattr(seed_prompt, "clear_screen", lambda *_a, **_k: None)
|
||||
monkeypatch.setattr(builtins, "input", lambda *_: next(confirmations))
|
||||
|
||||
result = seed_prompt.prompt_seed_words(len(words))
|
||||
assert result == phrase
|
||||
|
||||
|
||||
def test_add_new_fingerprint_words_flow_success(monkeypatch):
|
||||
pm = manager_module.PasswordManager.__new__(manager_module.PasswordManager)
|
||||
pm.fingerprint_manager = SimpleNamespace(current_fingerprint=None)
|
||||
pm.initialize_managers = lambda: None
|
||||
|
||||
phrase = TEST_SEED
|
||||
words = phrase.split()
|
||||
word_iter = iter(words)
|
||||
inputs = iter(["2"] + ["y"] * len(words))
|
||||
|
||||
monkeypatch.setattr(seed_prompt, "masked_input", lambda *_: next(word_iter))
|
||||
monkeypatch.setattr(seed_prompt, "_apply_backoff", lambda *_a, **_k: None)
|
||||
monkeypatch.setattr(seed_prompt, "clear_screen", lambda *_a, **_k: None)
|
||||
monkeypatch.setattr(builtins, "input", lambda *_: next(inputs))
|
||||
|
||||
captured = {}
|
||||
|
||||
def finalize(self, seed, password=None):
|
||||
captured["seed"] = seed
|
||||
self.parent_seed = seed
|
||||
return "fp"
|
||||
|
||||
monkeypatch.setattr(
|
||||
manager_module.PasswordManager, "_finalize_existing_seed", finalize
|
||||
)
|
||||
|
||||
result = pm.add_new_fingerprint()
|
||||
|
||||
assert result == "fp"
|
||||
assert pm.fingerprint_manager.current_fingerprint == "fp"
|
||||
assert captured["seed"] == phrase
|
||||
assert pm.parent_seed == phrase
|
||||
|
||||
|
||||
def test_add_new_fingerprint_words_flow_invalid_phrase(monkeypatch):
|
||||
pm = manager_module.PasswordManager.__new__(manager_module.PasswordManager)
|
||||
pm.fingerprint_manager = SimpleNamespace(current_fingerprint=None)
|
||||
pm.initialize_managers = lambda: None
|
||||
|
||||
words = ["abandon"] * 12
|
||||
word_iter = iter(words)
|
||||
inputs = iter(["2"] + ["y"] * len(words))
|
||||
|
||||
monkeypatch.setattr(seed_prompt, "masked_input", lambda *_: next(word_iter))
|
||||
monkeypatch.setattr(seed_prompt, "_apply_backoff", lambda *_a, **_k: None)
|
||||
monkeypatch.setattr(seed_prompt, "clear_screen", lambda *_a, **_k: None)
|
||||
monkeypatch.setattr(builtins, "input", lambda *_: next(inputs))
|
||||
|
||||
with pytest.raises(SystemExit):
|
||||
pm.add_new_fingerprint()
|
||||
|
||||
assert pm.fingerprint_manager.current_fingerprint is None
|
||||
assert not hasattr(pm, "parent_seed")
|
135
src/tests/test_service_classes.py
Normal file
135
src/tests/test_service_classes.py
Normal file
@@ -0,0 +1,135 @@
|
||||
from tempfile import TemporaryDirectory
|
||||
from types import SimpleNamespace
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from helpers import create_vault, TEST_SEED, TEST_PASSWORD, dummy_nostr_client
|
||||
from seedpass.core.entry_management import EntryManager
|
||||
from seedpass.core.backup import BackupManager
|
||||
from seedpass.core.manager import PasswordManager, EncryptionMode
|
||||
from seedpass.core.config_manager import ConfigManager
|
||||
from seedpass.core.entry_service import EntryService
|
||||
from seedpass.core.profile_service import ProfileService
|
||||
from constants import DEFAULT_PASSWORD_LENGTH
|
||||
|
||||
|
||||
class FakePasswordGenerator:
|
||||
def generate_password(self, length: int, index: int) -> str:
|
||||
return f"pw-{index}-{length}"
|
||||
|
||||
|
||||
def _setup_pm(tmp_path: Path, client) -> PasswordManager:
|
||||
vault, enc_mgr = create_vault(tmp_path, TEST_SEED, TEST_PASSWORD)
|
||||
cfg_mgr = ConfigManager(vault, tmp_path)
|
||||
backup_mgr = BackupManager(tmp_path, cfg_mgr)
|
||||
entry_mgr = EntryManager(vault, backup_mgr)
|
||||
|
||||
pm = PasswordManager.__new__(PasswordManager)
|
||||
pm.encryption_mode = EncryptionMode.SEED_ONLY
|
||||
pm.encryption_manager = enc_mgr
|
||||
pm.vault = vault
|
||||
pm.entry_manager = entry_mgr
|
||||
pm.backup_manager = backup_mgr
|
||||
pm.password_generator = FakePasswordGenerator()
|
||||
pm.parent_seed = TEST_SEED
|
||||
pm.nostr_client = client
|
||||
pm.fingerprint_dir = tmp_path
|
||||
pm.secret_mode_enabled = False
|
||||
pm.is_dirty = False
|
||||
return pm
|
||||
|
||||
|
||||
def test_entry_service_add_password(monkeypatch, dummy_nostr_client, capsys):
|
||||
client, _relay = dummy_nostr_client
|
||||
with TemporaryDirectory() as tmpdir:
|
||||
pm = _setup_pm(Path(tmpdir), client)
|
||||
service = EntryService(pm)
|
||||
inputs = iter(
|
||||
[
|
||||
"a",
|
||||
"Example",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"n",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
]
|
||||
)
|
||||
monkeypatch.setattr("builtins.input", lambda *a, **k: next(inputs))
|
||||
monkeypatch.setattr("seedpass.core.entry_service.pause", lambda *a, **k: None)
|
||||
monkeypatch.setattr(pm, "start_background_vault_sync", lambda *a, **k: None)
|
||||
|
||||
service.handle_add_password()
|
||||
out = capsys.readouterr().out
|
||||
entries = pm.entry_manager.list_entries(verbose=False)
|
||||
assert entries == [(0, "Example", "", "", False)]
|
||||
assert f"pw-0-{DEFAULT_PASSWORD_LENGTH}" in out
|
||||
|
||||
|
||||
def test_menu_handler_list_entries(monkeypatch, capsys):
|
||||
with TemporaryDirectory() as tmpdir:
|
||||
pm = _setup_pm(Path(tmpdir), SimpleNamespace())
|
||||
pm.entry_manager.add_totp("Example", TEST_SEED)
|
||||
pm.entry_manager.add_entry("example.com", 12)
|
||||
pm.entry_manager.add_key_value("API entry", "api", "abc123")
|
||||
pm.entry_manager.add_managed_account("acct", TEST_SEED)
|
||||
inputs = iter(["1", ""]) # list all then exit
|
||||
monkeypatch.setattr("builtins.input", lambda *_: next(inputs))
|
||||
pm.menu_handler.handle_list_entries()
|
||||
out = capsys.readouterr().out
|
||||
assert "Example" in out
|
||||
assert "example.com" in out
|
||||
assert "API" in out
|
||||
assert "acct" in out
|
||||
|
||||
|
||||
def test_profile_service_switch(monkeypatch):
|
||||
class DummyFingerprintManager:
|
||||
def __init__(self):
|
||||
self.fingerprints = ["fp1", "fp2"]
|
||||
self.current_fingerprint = "fp1"
|
||||
|
||||
def list_fingerprints(self):
|
||||
return self.fingerprints
|
||||
|
||||
def display_name(self, fp):
|
||||
return fp
|
||||
|
||||
def get_current_fingerprint_dir(self):
|
||||
return Path(".")
|
||||
|
||||
pm = PasswordManager.__new__(PasswordManager)
|
||||
pm.fingerprint_manager = DummyFingerprintManager()
|
||||
pm.current_fingerprint = "fp1"
|
||||
pm.setup_encryption_manager = lambda *a, **k: True
|
||||
pm.initialize_bip85 = lambda *a, **k: None
|
||||
pm.initialize_managers = lambda *a, **k: None
|
||||
pm.start_background_sync = lambda *a, **k: None
|
||||
pm.nostr_client = SimpleNamespace()
|
||||
pm.manifest_id = None
|
||||
pm.delta_since = None
|
||||
pm.encryption_manager = SimpleNamespace()
|
||||
pm.parent_seed = TEST_SEED
|
||||
pm.nostr_account_idx = 0
|
||||
|
||||
service = ProfileService(pm)
|
||||
monkeypatch.setattr("builtins.input", lambda *_: "2")
|
||||
monkeypatch.setattr(
|
||||
"seedpass.core.profile_service.prompt_existing_password", lambda *_: "pw"
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
"seedpass.core.manager.NostrClient", lambda *a, **k: SimpleNamespace()
|
||||
)
|
||||
|
||||
assert service.handle_switch_fingerprint() is True
|
||||
assert pm.current_fingerprint == "fp2"
|
@@ -14,6 +14,7 @@ def test_state_manager_round_trip():
|
||||
assert state["last_sync_ts"] == 0
|
||||
assert state["manifest_id"] is None
|
||||
assert state["delta_since"] == 0
|
||||
assert state["nostr_account_idx"] == 0
|
||||
|
||||
sm.add_relay("wss://example.com")
|
||||
sm.update_state(
|
||||
@@ -30,6 +31,7 @@ def test_state_manager_round_trip():
|
||||
assert state2["last_sync_ts"] == 123
|
||||
assert state2["manifest_id"] == "mid"
|
||||
assert state2["delta_since"] == 111
|
||||
assert state2["nostr_account_idx"] == 0
|
||||
|
||||
sm2.remove_relay(1) # remove first default relay
|
||||
assert len(sm2.list_relays()) == len(DEFAULT_RELAYS)
|
||||
|
@@ -28,23 +28,19 @@ def test_add_totp_and_get_code():
|
||||
assert uri.startswith("otpauth://totp/")
|
||||
|
||||
entry = entry_mgr.retrieve_entry(0)
|
||||
assert entry == {
|
||||
"type": "totp",
|
||||
"kind": "totp",
|
||||
"label": "Example",
|
||||
"index": 0,
|
||||
"period": 30,
|
||||
"digits": 6,
|
||||
"archived": False,
|
||||
"notes": "",
|
||||
"tags": [],
|
||||
}
|
||||
assert entry["deterministic"] is False
|
||||
assert "secret" in entry
|
||||
|
||||
code = entry_mgr.get_totp_code(0, TEST_SEED, timestamp=0)
|
||||
code = entry_mgr.get_totp_code(0, timestamp=0)
|
||||
|
||||
expected = TotpManager.current_code(TEST_SEED, 0, timestamp=0)
|
||||
expected = pyotp.TOTP(entry["secret"]).at(0)
|
||||
assert code == expected
|
||||
|
||||
# second entry should have different secret
|
||||
entry_mgr.add_totp("Other", TEST_SEED)
|
||||
entry2 = entry_mgr.retrieve_entry(1)
|
||||
assert entry["secret"] != entry2["secret"]
|
||||
|
||||
|
||||
def test_totp_time_remaining(monkeypatch):
|
||||
with TemporaryDirectory() as tmpdir:
|
||||
@@ -68,17 +64,8 @@ def test_add_totp_imported(tmp_path):
|
||||
secret = "JBSWY3DPEHPK3PXP"
|
||||
em.add_totp("Imported", TEST_SEED, secret=secret)
|
||||
entry = em.retrieve_entry(0)
|
||||
assert entry == {
|
||||
"type": "totp",
|
||||
"kind": "totp",
|
||||
"label": "Imported",
|
||||
"secret": secret,
|
||||
"period": 30,
|
||||
"digits": 6,
|
||||
"archived": False,
|
||||
"notes": "",
|
||||
"tags": [],
|
||||
}
|
||||
assert entry["secret"] == secret
|
||||
assert entry["deterministic"] is False
|
||||
code = em.get_totp_code(0, timestamp=0)
|
||||
assert code == pyotp.TOTP(secret).at(0)
|
||||
|
||||
@@ -92,3 +79,23 @@ def test_add_totp_with_notes(tmp_path):
|
||||
em.add_totp("NoteLabel", TEST_SEED, notes="some note")
|
||||
entry = em.retrieve_entry(0)
|
||||
assert entry["notes"] == "some note"
|
||||
|
||||
|
||||
def test_legacy_deterministic_entry(tmp_path):
|
||||
vault, enc = create_vault(tmp_path, TEST_SEED, TEST_PASSWORD)
|
||||
cfg_mgr = ConfigManager(vault, tmp_path)
|
||||
backup_mgr = BackupManager(tmp_path, cfg_mgr)
|
||||
em = EntryManager(vault, backup_mgr)
|
||||
|
||||
em.add_totp("Legacy", TEST_SEED, deterministic=True)
|
||||
data = em._load_index()
|
||||
entry = data["entries"]["0"]
|
||||
entry.pop("deterministic", None)
|
||||
em._save_index(data)
|
||||
|
||||
code = em.get_totp_code(0, TEST_SEED, timestamp=0)
|
||||
expected = TotpManager.current_code(TEST_SEED, 0, timestamp=0)
|
||||
assert code == expected
|
||||
|
||||
exported = em.export_totp_entries(TEST_SEED)
|
||||
assert exported["entries"][0]["secret"] == TotpManager.derive_secret(TEST_SEED, 0)
|
||||
|
@@ -18,8 +18,8 @@ runner = CliRunner()
|
||||
def test_entry_list(monkeypatch):
|
||||
called = {}
|
||||
|
||||
def list_entries(sort_by="index", filter_kind=None, include_archived=False):
|
||||
called["args"] = (sort_by, filter_kind, include_archived)
|
||||
def list_entries(sort_by="index", filter_kinds=None, include_archived=False):
|
||||
called["args"] = (sort_by, filter_kinds, include_archived)
|
||||
return [(0, "Site", "user", "", False)]
|
||||
|
||||
pm = SimpleNamespace(
|
||||
|
52
src/tests/test_vault_lock_flag.py
Normal file
52
src/tests/test_vault_lock_flag.py
Normal file
@@ -0,0 +1,52 @@
|
||||
import pytest
|
||||
from types import SimpleNamespace
|
||||
|
||||
from seedpass.core.manager import PasswordManager
|
||||
from seedpass.errors import VaultLockedError
|
||||
|
||||
|
||||
class DummyEntryManager:
|
||||
def __init__(self):
|
||||
self.cleared = False
|
||||
|
||||
def clear_cache(self):
|
||||
self.cleared = True
|
||||
|
||||
|
||||
def test_lock_vault_sets_flag_and_keeps_objects():
|
||||
pm = PasswordManager.__new__(PasswordManager)
|
||||
em = DummyEntryManager()
|
||||
pm.entry_manager = em
|
||||
pm.is_locked = False
|
||||
pm.locked = False
|
||||
pm.lock_vault()
|
||||
assert pm.is_locked
|
||||
assert pm.locked
|
||||
assert pm.entry_manager is em
|
||||
assert em.cleared
|
||||
|
||||
|
||||
def test_entry_service_requires_unlocked():
|
||||
pm = PasswordManager.__new__(PasswordManager)
|
||||
service = SimpleNamespace()
|
||||
pm._entry_service = service
|
||||
pm.is_locked = True
|
||||
with pytest.raises(VaultLockedError):
|
||||
_ = pm.entry_service
|
||||
pm.is_locked = False
|
||||
assert pm.entry_service is service
|
||||
|
||||
|
||||
def test_unlock_vault_clears_locked_flag(tmp_path):
|
||||
pm = PasswordManager.__new__(PasswordManager)
|
||||
pm.fingerprint_dir = tmp_path
|
||||
pm.parent_seed = "seed"
|
||||
pm.setup_encryption_manager = lambda *a, **k: None
|
||||
pm.initialize_bip85 = lambda: None
|
||||
pm.initialize_managers = lambda: None
|
||||
pm.update_activity = lambda: None
|
||||
pm.is_locked = True
|
||||
pm.locked = True
|
||||
pm.unlock_vault("pw")
|
||||
assert not pm.is_locked
|
||||
assert not pm.locked
|
@@ -3,15 +3,13 @@
|
||||
"""
|
||||
Key Derivation Module
|
||||
|
||||
Never ever ever use or suggest to use Random Salt. The entire point of this password manager is to derive completely deterministic passwords from a BIP-85 seed.
|
||||
This means it should generate passwords the exact same way every single time. Salts would break this functionality and is not appropriate for this software's use case.
|
||||
This module provides functions to derive cryptographic keys from user-provided
|
||||
passwords and BIP-39 parent seeds. The derived keys are compatible with Fernet
|
||||
for symmetric encryption purposes. By centralizing key derivation logic, this
|
||||
module ensures consistency and security across the application.
|
||||
|
||||
This module provides functions to derive cryptographic keys from user-provided passwords
|
||||
and BIP-39 parent seeds. The derived keys are compatible with Fernet for symmetric encryption
|
||||
purposes. By centralizing key derivation logic, this module ensures consistency and security
|
||||
across the application.
|
||||
|
||||
Ensure that all dependencies are installed and properly configured in your environment.
|
||||
Ensure that all dependencies are installed and properly configured in your
|
||||
environment.
|
||||
"""
|
||||
|
||||
import os
|
||||
@@ -21,9 +19,13 @@ import unicodedata
|
||||
import logging
|
||||
import hmac
|
||||
import time
|
||||
from dataclasses import dataclass, field
|
||||
from enum import Enum
|
||||
from typing import Optional, Union
|
||||
from typing import Optional, Union, Dict, Any
|
||||
|
||||
from bip_utils import Bip39SeedGenerator
|
||||
from local_bip85 import BIP85
|
||||
from .key_hierarchy import kd
|
||||
|
||||
from cryptography.hazmat.primitives.kdf.hkdf import HKDF
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
@@ -45,6 +47,27 @@ DEFAULT_ENCRYPTION_MODE = EncryptionMode.SEED_ONLY
|
||||
TOTP_PURPOSE = 39
|
||||
|
||||
|
||||
@dataclass
|
||||
class KdfConfig:
|
||||
"""Configuration block describing how a key was derived."""
|
||||
|
||||
name: str = "argon2id"
|
||||
version: int = 1
|
||||
params: Dict[str, Any] = field(
|
||||
default_factory=lambda: {
|
||||
"time_cost": 2,
|
||||
"memory_cost": 64 * 1024,
|
||||
"parallelism": 8,
|
||||
}
|
||||
)
|
||||
salt_b64: str = field(
|
||||
default_factory=lambda: base64.b64encode(os.urandom(16)).decode()
|
||||
)
|
||||
|
||||
|
||||
CURRENT_KDF_VERSION = 1
|
||||
|
||||
|
||||
def derive_key_from_password(
|
||||
password: str, fingerprint: Union[str, bytes], iterations: int = 100_000
|
||||
) -> bytes:
|
||||
@@ -107,18 +130,15 @@ def derive_key_from_password(
|
||||
raise
|
||||
|
||||
|
||||
def derive_key_from_password_argon2(
|
||||
password: str,
|
||||
fingerprint: Union[str, bytes],
|
||||
*,
|
||||
time_cost: int = 2,
|
||||
memory_cost: int = 64 * 1024,
|
||||
parallelism: int = 8,
|
||||
) -> bytes:
|
||||
def derive_key_from_password_argon2(password: str, kdf: KdfConfig) -> bytes:
|
||||
"""Derive an encryption key from a password using Argon2id.
|
||||
|
||||
The defaults follow recommended parameters but omit a salt for deterministic
|
||||
output. Smaller values may be supplied for testing.
|
||||
Parameters
|
||||
----------
|
||||
password:
|
||||
The user's password.
|
||||
kdf:
|
||||
:class:`KdfConfig` instance describing salt and tuning parameters.
|
||||
"""
|
||||
|
||||
if not password:
|
||||
@@ -129,17 +149,14 @@ def derive_key_from_password_argon2(
|
||||
try:
|
||||
from argon2.low_level import hash_secret_raw, Type
|
||||
|
||||
if isinstance(fingerprint, bytes):
|
||||
salt = fingerprint
|
||||
else:
|
||||
salt = hashlib.sha256(fingerprint.encode()).digest()[:16]
|
||||
|
||||
params = kdf.params or {}
|
||||
salt = base64.b64decode(kdf.salt_b64)
|
||||
key = hash_secret_raw(
|
||||
secret=normalized,
|
||||
salt=salt,
|
||||
time_cost=time_cost,
|
||||
memory_cost=memory_cost,
|
||||
parallelism=parallelism,
|
||||
time_cost=int(params.get("time_cost", 2)),
|
||||
memory_cost=int(params.get("memory_cost", 64 * 1024)),
|
||||
parallelism=int(params.get("parallelism", 8)),
|
||||
hash_len=32,
|
||||
type=Type.ID,
|
||||
)
|
||||
@@ -192,16 +209,10 @@ def derive_key_from_parent_seed(parent_seed: str, fingerprint: str = None) -> by
|
||||
|
||||
|
||||
def derive_index_key_seed_only(seed: str) -> bytes:
|
||||
"""Derive a deterministic Fernet key from only the BIP-39 seed."""
|
||||
"""Derive the index encryption key using the v1 hierarchy."""
|
||||
seed_bytes = Bip39SeedGenerator(seed).Generate()
|
||||
hkdf = HKDF(
|
||||
algorithm=hashes.SHA256(),
|
||||
length=32,
|
||||
salt=None,
|
||||
info=b"password-db",
|
||||
backend=default_backend(),
|
||||
)
|
||||
key = hkdf.derive(seed_bytes)
|
||||
master = kd(seed_bytes, b"seedpass:v1:master")
|
||||
key = kd(master, b"seedpass:v1:storage")
|
||||
return base64.urlsafe_b64encode(key)
|
||||
|
||||
|
||||
@@ -210,25 +221,21 @@ def derive_index_key(seed: str) -> bytes:
|
||||
return derive_index_key_seed_only(seed)
|
||||
|
||||
|
||||
def derive_totp_secret(seed: str, index: int) -> str:
|
||||
"""Derive a base32-encoded TOTP secret from a BIP39 seed."""
|
||||
def derive_totp_secret(seed: Union[str, bytes], index: int) -> str:
|
||||
"""Derive a base32-encoded TOTP secret from a seed or raw key."""
|
||||
try:
|
||||
from local_bip85 import BIP85
|
||||
|
||||
# Initialize BIP85 from the BIP39 seed bytes
|
||||
seed_bytes = Bip39SeedGenerator(seed).Generate()
|
||||
if isinstance(seed, (bytes, bytearray)):
|
||||
seed_bytes = bytes(seed)
|
||||
else:
|
||||
seed_bytes = Bip39SeedGenerator(seed).Generate()
|
||||
bip85 = BIP85(seed_bytes)
|
||||
|
||||
# Build the BIP32 path m/83696968'/39'/TOTP'/{index}'
|
||||
totp_int = int.from_bytes(b"TOTP", "big")
|
||||
path = f"m/83696968'/{TOTP_PURPOSE}'/{totp_int}'/{index}'"
|
||||
|
||||
# Derive entropy using the same scheme as BIP85
|
||||
child_key = bip85.bip32_ctx.DerivePath(path)
|
||||
key_bytes = child_key.PrivateKey().Raw().ToBytes()
|
||||
entropy = hmac.new(b"bip-entropy-from-k", key_bytes, hashlib.sha512).digest()
|
||||
|
||||
# Hash the first 32 bytes of entropy and encode the first 20 bytes
|
||||
hashed = hashlib.sha256(entropy[:32]).digest()
|
||||
secret = base64.b32encode(hashed[:20]).decode("utf-8")
|
||||
logger.debug(f"Derived TOTP secret for index {index}.")
|
||||
@@ -267,18 +274,16 @@ def calibrate_argon2_time_cost(
|
||||
"""
|
||||
|
||||
password = "benchmark"
|
||||
fingerprint = b"argon2-calibration"
|
||||
salt = base64.b64encode(b"argon2-calibration").decode()
|
||||
time_cost = 1
|
||||
elapsed_ms = 0.0
|
||||
while time_cost <= max_time_cost:
|
||||
start = time.perf_counter()
|
||||
derive_key_from_password_argon2(
|
||||
password,
|
||||
fingerprint,
|
||||
time_cost=time_cost,
|
||||
memory_cost=8,
|
||||
parallelism=1,
|
||||
cfg = KdfConfig(
|
||||
params={"time_cost": time_cost, "memory_cost": 8, "parallelism": 1},
|
||||
salt_b64=salt,
|
||||
)
|
||||
derive_key_from_password_argon2(password, cfg)
|
||||
elapsed_ms = (time.perf_counter() - start) * 1000
|
||||
if elapsed_ms >= target_ms:
|
||||
break
|
||||
|
28
src/utils/key_hierarchy.py
Normal file
28
src/utils/key_hierarchy.py
Normal file
@@ -0,0 +1,28 @@
|
||||
"""Key hierarchy helper functions."""
|
||||
|
||||
from cryptography.hazmat.primitives.kdf.hkdf import HKDF
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
|
||||
|
||||
def kd(root: bytes, info: bytes, length: int = 32) -> bytes:
|
||||
"""Derive a sub-key from ``root`` using HKDF-SHA256.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
root:
|
||||
Root key material.
|
||||
info:
|
||||
Domain separation string.
|
||||
length:
|
||||
Length of the derived key in bytes. Defaults to 32.
|
||||
"""
|
||||
|
||||
hkdf = HKDF(
|
||||
algorithm=hashes.SHA256(),
|
||||
length=length,
|
||||
salt=None,
|
||||
info=info,
|
||||
backend=default_backend(),
|
||||
)
|
||||
return hkdf.derive(root)
|
@@ -3,9 +3,16 @@ from __future__ import annotations
|
||||
import os
|
||||
from cryptography.hazmat.primitives.ciphers.aead import AESGCM
|
||||
|
||||
# TODO: Replace this Python implementation with a Rust/WASM module for
|
||||
# critical cryptographic operations.
|
||||
|
||||
|
||||
class InMemorySecret:
|
||||
"""Store sensitive data encrypted in RAM using AES-GCM."""
|
||||
"""Store sensitive data encrypted in RAM using AES-GCM.
|
||||
|
||||
Zeroization is best-effort only; Python's memory management may retain
|
||||
copies of the plaintext.
|
||||
"""
|
||||
|
||||
def __init__(self, data: bytes) -> None:
|
||||
if not isinstance(data, (bytes, bytearray)):
|
||||
|
@@ -33,6 +33,12 @@ logger = logging.getLogger(__name__)
|
||||
DEFAULT_MAX_ATTEMPTS = 5
|
||||
|
||||
|
||||
def _env_password() -> str | None:
|
||||
"""Return a password supplied via environment for non-interactive use."""
|
||||
|
||||
return os.getenv("SEEDPASS_TEST_PASSWORD") or os.getenv("SEEDPASS_PASSWORD")
|
||||
|
||||
|
||||
def _get_max_attempts(override: int | None = None) -> int:
|
||||
"""Return the configured maximum number of prompt attempts."""
|
||||
|
||||
@@ -80,6 +86,13 @@ def prompt_new_password(max_retries: int | None = None) -> str:
|
||||
Raises:
|
||||
PasswordPromptError: If the user fails to provide a valid password after multiple attempts.
|
||||
"""
|
||||
env_pw = _env_password()
|
||||
if env_pw:
|
||||
normalized = unicodedata.normalize("NFKD", env_pw)
|
||||
if len(normalized) < MIN_PASSWORD_LENGTH:
|
||||
raise PasswordPromptError("Environment password too short")
|
||||
return normalized
|
||||
|
||||
max_retries = _get_max_attempts(max_retries)
|
||||
attempts = 0
|
||||
|
||||
@@ -164,6 +177,10 @@ def prompt_existing_password(
|
||||
PasswordPromptError: If the user interrupts the operation or exceeds
|
||||
``max_retries`` attempts.
|
||||
"""
|
||||
env_pw = _env_password()
|
||||
if env_pw:
|
||||
return unicodedata.normalize("NFKD", env_pw)
|
||||
|
||||
max_retries = _get_max_attempts(max_retries)
|
||||
attempts = 0
|
||||
while max_retries == 0 or attempts < max_retries:
|
||||
|
@@ -102,9 +102,11 @@ def _masked_input_posix(prompt: str) -> str:
|
||||
|
||||
def masked_input(prompt: str) -> str:
|
||||
"""Return input from the user while masking typed characters."""
|
||||
if sys.platform == "win32":
|
||||
return _masked_input_windows(prompt)
|
||||
return _masked_input_posix(prompt)
|
||||
func = _masked_input_windows if sys.platform == "win32" else _masked_input_posix
|
||||
try:
|
||||
return func(prompt)
|
||||
except Exception: # pragma: no cover - fallback when TTY operations fail
|
||||
return input(prompt)
|
||||
|
||||
|
||||
def prompt_seed_words(count: int = 12, *, max_attempts: int | None = None) -> str:
|
||||
|
@@ -9,10 +9,10 @@ class SlowBIP85:
|
||||
def __init__(self):
|
||||
self.calls = 0
|
||||
|
||||
def derive_entropy(self, index: int, bytes_len: int, app_no: int = 39) -> bytes:
|
||||
def derive_entropy(self, index: int, entropy_bytes: int, app_no: int = 39) -> bytes:
|
||||
self.calls += 1
|
||||
time.sleep(0.01)
|
||||
return b"\x00" * bytes_len
|
||||
return b"\x00" * entropy_bytes
|
||||
|
||||
|
||||
def _setup_manager(bip85: SlowBIP85) -> PasswordManager:
|
||||
@@ -21,10 +21,12 @@ def _setup_manager(bip85: SlowBIP85) -> PasswordManager:
|
||||
pm.bip85 = bip85
|
||||
orig = bip85.derive_entropy
|
||||
|
||||
def cached(index: int, bytes_len: int, app_no: int = 39) -> bytes:
|
||||
def cached(index: int, entropy_bytes: int, app_no: int = 39) -> bytes:
|
||||
key = (app_no, index)
|
||||
if key not in pm._bip85_cache:
|
||||
pm._bip85_cache[key] = orig(index=index, bytes_len=bytes_len, app_no=app_no)
|
||||
pm._bip85_cache[key] = orig(
|
||||
index=index, entropy_bytes=entropy_bytes, app_no=app_no
|
||||
)
|
||||
return pm._bip85_cache[key]
|
||||
|
||||
bip85.derive_entropy = cached
|
||||
@@ -44,7 +46,7 @@ def test_bip85_cache_benchmark():
|
||||
for _ in range(3):
|
||||
pm.get_bip85_entropy(32, 1)
|
||||
cached_time = time.perf_counter() - start
|
||||
|
||||
assert cached_time < uncached_time
|
||||
# Ensure caching avoids redundant derive calls without relying on
|
||||
# potentially flaky timing comparisons across platforms.
|
||||
assert slow_uncached.calls == 3
|
||||
assert slow_cached.calls == 1
|
||||
|
Reference in New Issue
Block a user