mirror of
https://github.com/PR0M3TH3AN/SeedPass.git
synced 2025-09-10 08:19:23 +00:00
Compare commits
42 Commits
alpha
...
15df3f10a6
Author | SHA1 | Date | |
---|---|---|---|
![]() |
15df3f10a6 | ||
![]() |
b451097c65 | ||
![]() |
9cacd1b13d | ||
![]() |
b97d60778b | ||
![]() |
bbb26ca55a | ||
![]() |
d6e03d5e7a | ||
![]() |
26632c0e70 | ||
![]() |
06ca51993a | ||
![]() |
1b6b0ab5c5 | ||
![]() |
87999b1888 | ||
![]() |
6928b4ddbf | ||
![]() |
73183d53a5 | ||
![]() |
c9ad16f150 | ||
![]() |
bd86bdbb3a | ||
![]() |
8d5374ef5b | ||
![]() |
468608a369 | ||
![]() |
56e652089a | ||
![]() |
c353c04472 | ||
![]() |
2559920a14 | ||
![]() |
57935bdfc1 | ||
![]() |
55fdee522c | ||
![]() |
af4eb72385 | ||
![]() |
90c304ff6e | ||
![]() |
7b1ef2abe2 | ||
![]() |
5194adf145 | ||
![]() |
8f74ac27f4 | ||
![]() |
1232630dba | ||
![]() |
62983df69c | ||
![]() |
b4238791aa | ||
![]() |
d1fccbc4f2 | ||
![]() |
50532597b8 | ||
![]() |
bb733bb194 | ||
![]() |
785acf938c | ||
![]() |
4973095a5c | ||
![]() |
69f1619816 | ||
![]() |
e1b821bc55 | ||
![]() |
a21efa91db | ||
![]() |
5109f96ce7 | ||
![]() |
19577163cf | ||
![]() |
b0e4ab9bc6 | ||
![]() |
3ff3e4e1d6 | ||
![]() |
08c4453326 |
@@ -39,6 +39,11 @@ This project is written in **Python**. Follow these instructions when working wi
|
|||||||
|
|
||||||
Following these practices helps keep the code base consistent and secure.
|
Following these practices helps keep the code base consistent and secure.
|
||||||
|
|
||||||
|
## Deterministic Artifact Generation
|
||||||
|
|
||||||
|
- All generated artifacts (passwords, keys, TOTP secrets, etc.) must be fully deterministic across runs and platforms.
|
||||||
|
- Randomness is only permitted for security primitives (e.g., encryption nonces, in-memory keys) and must never influence derived artifacts.
|
||||||
|
|
||||||
## Legacy Index Migration
|
## Legacy Index Migration
|
||||||
|
|
||||||
- Always provide a migration path for index archives and import/export routines.
|
- Always provide a migration path for index archives and import/export routines.
|
||||||
|
22
README.md
22
README.md
@@ -146,6 +146,10 @@ The Windows installer will attempt to install Git automatically if it is not alr
|
|||||||
|
|
||||||
**Note:** If this fallback fails, install Python 3.12 manually or install the [Microsoft Visual C++ Build Tools](https://visualstudio.microsoft.com/visual-cpp-build-tools/) and rerun the installer.
|
**Note:** If this fallback fails, install Python 3.12 manually or install the [Microsoft Visual C++ Build Tools](https://visualstudio.microsoft.com/visual-cpp-build-tools/) and rerun the installer.
|
||||||
|
|
||||||
|
#### Installer Dependency Checks
|
||||||
|
|
||||||
|
The installer verifies that core build tooling—C/C++ build tools, Rust, CMake, and the imaging/GTK libraries—are available before completing. Pass `--no-gui` to skip installing GUI packages. On Linux, ensure `xclip` or `wl-clipboard` is installed for clipboard support.
|
||||||
|
|
||||||
#### Windows Nostr Sync Troubleshooting
|
#### Windows Nostr Sync Troubleshooting
|
||||||
|
|
||||||
When backing up or restoring from Nostr on Windows, a few issues are common:
|
When backing up or restoring from Nostr on Windows, a few issues are common:
|
||||||
@@ -274,10 +278,10 @@ You can then launch SeedPass and create a backup:
|
|||||||
seedpass
|
seedpass
|
||||||
|
|
||||||
# Export your index
|
# Export your index
|
||||||
seedpass export --file "~/seedpass_backup.json"
|
seedpass vault export --file "~/seedpass_backup.json"
|
||||||
|
|
||||||
# Later you can restore it
|
# Later you can restore it
|
||||||
seedpass import --file "~/seedpass_backup.json"
|
seedpass vault import --file "~/seedpass_backup.json"
|
||||||
|
|
||||||
# Quickly find or retrieve entries
|
# Quickly find or retrieve entries
|
||||||
seedpass search "github"
|
seedpass search "github"
|
||||||
@@ -431,6 +435,16 @@ For a full list of commands see [docs/advanced_cli.md](docs/advanced_cli.md). Th
|
|||||||
```
|
```
|
||||||
*(or `python src/main.py` when running directly from the repository)*
|
*(or `python src/main.py` when running directly from the repository)*
|
||||||
|
|
||||||
|
To restore a previously backed up index at launch, provide the backup path
|
||||||
|
and fingerprint:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
seedpass --restore-backup /path/to/backup.json.enc --fingerprint <fp>
|
||||||
|
```
|
||||||
|
|
||||||
|
Without the flag, the startup prompt offers a **Restore from backup** option
|
||||||
|
before the vault is initialized.
|
||||||
|
|
||||||
2. **Follow the Prompts:**
|
2. **Follow the Prompts:**
|
||||||
|
|
||||||
- **Seed Profile Selection:** If you have existing seed profiles, you'll be prompted to select one or add a new one.
|
- **Seed Profile Selection:** If you have existing seed profiles, you'll be prompted to select one or add a new one.
|
||||||
@@ -616,6 +630,10 @@ initial setup. You must provide both your 12‑word master seed and the master
|
|||||||
password that encrypted the vault; without the correct password the retrieved
|
password that encrypted the vault; without the correct password the retrieved
|
||||||
data cannot be decrypted.
|
data cannot be decrypted.
|
||||||
|
|
||||||
|
Alternatively, a local backup file can be loaded at startup. Launch the
|
||||||
|
application with `--restore-backup <file> --fingerprint <fp>` or choose the
|
||||||
|
**Restore from backup** option presented before the vault initializes.
|
||||||
|
|
||||||
1. Start SeedPass and choose option **4** when prompted to set up a seed.
|
1. Start SeedPass and choose option **4** when prompted to set up a seed.
|
||||||
2. Paste your BIP‑85 seed phrase when asked.
|
2. Paste your BIP‑85 seed phrase when asked.
|
||||||
3. Enter the master password associated with that seed.
|
3. Enter the master password associated with that seed.
|
||||||
|
@@ -120,6 +120,11 @@ isn't on your PATH. If these tools are unavailable you'll see a link to download
|
|||||||
the installer now attempts to download Python 3.12 automatically so you don't have to compile packages from source.
|
the installer now attempts to download Python 3.12 automatically so you don't have to compile packages from source.
|
||||||
|
|
||||||
**Note:** If this fallback fails, install Python 3.12 manually or install the [Microsoft Visual C++ Build Tools](https://visualstudio.microsoft.com/visual-cpp-build-tools/) and rerun the installer.
|
**Note:** If this fallback fails, install Python 3.12 manually or install the [Microsoft Visual C++ Build Tools](https://visualstudio.microsoft.com/visual-cpp-build-tools/) and rerun the installer.
|
||||||
|
|
||||||
|
#### Installer Dependency Checks
|
||||||
|
|
||||||
|
The installer verifies that core build tooling—C/C++ build tools, Rust, CMake, and the imaging/GTK libraries—are available before completing. Pass `--no-gui` to skip installing GUI packages. On Linux, ensure `xclip` or `wl-clipboard` is installed for clipboard support.
|
||||||
|
|
||||||
### Uninstall
|
### Uninstall
|
||||||
|
|
||||||
Run the matching uninstaller if you need to remove a previous installation or clean up an old `seedpass` command:
|
Run the matching uninstaller if you need to remove a previous installation or clean up an old `seedpass` command:
|
||||||
|
@@ -5,7 +5,9 @@
|
|||||||
# Supports installing from a specific branch using the -b or --branch flag.
|
# Supports installing from a specific branch using the -b or --branch flag.
|
||||||
# Example: ./install.sh -b beta
|
# Example: ./install.sh -b beta
|
||||||
|
|
||||||
set -e
|
set -euo pipefail
|
||||||
|
IFS=$'\n\t'
|
||||||
|
trap 'echo "[ERROR] Line $LINENO failed"; exit 1' ERR
|
||||||
|
|
||||||
# --- Configuration ---
|
# --- Configuration ---
|
||||||
REPO_URL="https://github.com/PR0M3TH3AN/SeedPass.git"
|
REPO_URL="https://github.com/PR0M3TH3AN/SeedPass.git"
|
||||||
@@ -15,13 +17,13 @@ VENV_DIR="$INSTALL_DIR/venv"
|
|||||||
LAUNCHER_DIR="$HOME/.local/bin"
|
LAUNCHER_DIR="$HOME/.local/bin"
|
||||||
LAUNCHER_PATH="$LAUNCHER_DIR/seedpass"
|
LAUNCHER_PATH="$LAUNCHER_DIR/seedpass"
|
||||||
BRANCH="main" # Default branch
|
BRANCH="main" # Default branch
|
||||||
INSTALL_GUI=false
|
INSTALL_GUI=true
|
||||||
|
|
||||||
# --- Helper Functions ---
|
# --- Helper Functions ---
|
||||||
print_info() { echo -e "\033[1;34m[INFO]\033[0m $1"; }
|
print_info() { echo -e "\033[1;34m[INFO]\033[0m" "$1"; }
|
||||||
print_success() { echo -e "\033[1;32m[SUCCESS]\033[0m $1"; }
|
print_success() { echo -e "\033[1;32m[SUCCESS]\033[0m" "$1"; }
|
||||||
print_warning() { echo -e "\033[1;33m[WARNING]\033[0m $1"; }
|
print_warning() { echo -e "\033[1;33m[WARNING]\033[0m" "$1"; }
|
||||||
print_error() { echo -e "\033[1;31m[ERROR]\033[0m $1" >&2; exit 1; }
|
print_error() { echo -e "\033[1;31m[ERROR]\033[0m" "$1" >&2; exit 1; }
|
||||||
|
|
||||||
# Install build dependencies for Gtk/GObject if available via the system package manager
|
# Install build dependencies for Gtk/GObject if available via the system package manager
|
||||||
install_dependencies() {
|
install_dependencies() {
|
||||||
@@ -30,29 +32,36 @@ install_dependencies() {
|
|||||||
sudo apt-get update && sudo apt-get install -y \\
|
sudo apt-get update && sudo apt-get install -y \\
|
||||||
build-essential pkg-config libcairo2 libcairo2-dev \\
|
build-essential pkg-config libcairo2 libcairo2-dev \\
|
||||||
libgirepository1.0-dev gobject-introspection \\
|
libgirepository1.0-dev gobject-introspection \\
|
||||||
gir1.2-gtk-3.0 python3-dev libffi-dev libssl-dev
|
gir1.2-gtk-3.0 libgtk-3-dev python3-dev libffi-dev libssl-dev \\
|
||||||
|
cmake rustc cargo zlib1g-dev libjpeg-dev libpng-dev \\
|
||||||
|
libfreetype6-dev xclip wl-clipboard
|
||||||
elif command -v yum &>/dev/null; then
|
elif command -v yum &>/dev/null; then
|
||||||
sudo yum install -y @'Development Tools' cairo cairo-devel \\
|
sudo yum install -y @'Development Tools' cairo cairo-devel \\
|
||||||
gobject-introspection-devel gtk3-devel python3-devel \\
|
gobject-introspection-devel gtk3-devel python3-devel \\
|
||||||
libffi-devel openssl-devel
|
libffi-devel openssl-devel cmake rust cargo zlib-devel \\
|
||||||
|
libjpeg-turbo-devel libpng-devel freetype-devel xclip \\
|
||||||
|
wl-clipboard
|
||||||
elif command -v dnf &>/dev/null; then
|
elif command -v dnf &>/dev/null; then
|
||||||
sudo dnf groupinstall -y "Development Tools" && sudo dnf install -y \\
|
sudo dnf groupinstall -y "Development Tools" && sudo dnf install -y \\
|
||||||
cairo cairo-devel gobject-introspection-devel gtk3-devel \\
|
cairo cairo-devel gobject-introspection-devel gtk3-devel \\
|
||||||
python3-devel libffi-devel openssl-devel
|
python3-devel libffi-devel openssl-devel cmake rust cargo \\
|
||||||
|
zlib-devel libjpeg-turbo-devel libpng-devel freetype-devel \\
|
||||||
|
xclip wl-clipboard
|
||||||
elif command -v pacman &>/dev/null; then
|
elif command -v pacman &>/dev/null; then
|
||||||
sudo pacman -Syu --noconfirm base-devel pkgconf cairo \\
|
sudo pacman -Syu --noconfirm base-devel pkgconf cmake rustup \\
|
||||||
gobject-introspection gtk3 python
|
gtk3 gobject-introspection cairo libjpeg-turbo zlib \\
|
||||||
|
libpng freetype xclip wl-clipboard && rustup default stable
|
||||||
elif command -v brew &>/dev/null; then
|
elif command -v brew &>/dev/null; then
|
||||||
brew install pkg-config cairo gobject-introspection gtk+3
|
brew install pkg-config cairo gobject-introspection gtk+3 cmake rustup-init && \\
|
||||||
|
rustup-init -y
|
||||||
else
|
else
|
||||||
print_warning "Unsupported package manager. Please install Gtk/GObject dependencies manually."
|
print_warning "Unsupported package manager. Please install Gtk/GObject dependencies manually."
|
||||||
fi
|
fi
|
||||||
print_warning "Install 'xclip' manually to enable clipboard features in secret mode."
|
|
||||||
}
|
}
|
||||||
usage() {
|
usage() {
|
||||||
echo "Usage: $0 [-b | --branch <branch_name>] [--with-gui] [-h | --help]"
|
echo "Usage: $0 [-b | --branch <branch_name>] [--no-gui] [-h | --help]"
|
||||||
echo " -b, --branch Specify the git branch to install (default: main)"
|
echo " -b, --branch Specify the git branch to install (default: main)"
|
||||||
echo " --with-gui Include graphical interface dependencies"
|
echo " --no-gui Skip graphical interface dependencies (default: include GUI)"
|
||||||
echo " -h, --help Display this help message"
|
echo " -h, --help Display this help message"
|
||||||
exit 0
|
exit 0
|
||||||
}
|
}
|
||||||
@@ -73,8 +82,8 @@ main() {
|
|||||||
-h|--help)
|
-h|--help)
|
||||||
usage
|
usage
|
||||||
;;
|
;;
|
||||||
--with-gui)
|
--no-gui)
|
||||||
INSTALL_GUI=true
|
INSTALL_GUI=false
|
||||||
shift
|
shift
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
@@ -179,6 +188,11 @@ main() {
|
|||||||
else
|
else
|
||||||
pip install -e .
|
pip install -e .
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
if ! "$VENV_DIR/bin/python" -c "import seedpass.cli; print('ok')"; then
|
||||||
|
print_error "SeedPass CLI import check failed."
|
||||||
|
fi
|
||||||
|
|
||||||
deactivate
|
deactivate
|
||||||
|
|
||||||
# 7. Create launcher script
|
# 7. Create launcher script
|
||||||
|
@@ -6,10 +6,10 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
from .bip85 import BIP85
|
from .bip85 import BIP85
|
||||||
|
except Exception as exc:
|
||||||
|
logger.error("Failed to import BIP85 module: %s", exc, exc_info=True)
|
||||||
|
raise ImportError(
|
||||||
|
"BIP85 dependencies are missing. Install 'bip_utils', 'cryptography', and 'colorama'."
|
||||||
|
) from exc
|
||||||
|
|
||||||
logger.info("BIP85 module imported successfully.")
|
__all__ = ["BIP85"]
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to import BIP85 module: {e}", exc_info=True)
|
|
||||||
BIP85 = None
|
|
||||||
|
|
||||||
__all__ = ["BIP85"] if BIP85 is not None else []
|
|
||||||
|
@@ -18,6 +18,8 @@ import hashlib
|
|||||||
import hmac
|
import hmac
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
from typing import Union
|
||||||
|
|
||||||
from colorama import Fore
|
from colorama import Fore
|
||||||
|
|
||||||
from bip_utils import Bip32Slip10Secp256k1, Bip39MnemonicGenerator, Bip39Languages
|
from bip_utils import Bip32Slip10Secp256k1, Bip39MnemonicGenerator, Bip39Languages
|
||||||
@@ -37,13 +39,19 @@ class Bip85Error(Exception):
|
|||||||
|
|
||||||
|
|
||||||
class BIP85:
|
class BIP85:
|
||||||
def __init__(self, seed_bytes: bytes | str):
|
def __init__(self, seed_or_xprv: Union[bytes, str]):
|
||||||
"""Initialize from BIP39 seed bytes or BIP32 xprv string."""
|
"""Initialize from seed bytes or an ``xprv`` string.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
seed_or_xprv (Union[bytes, str]): Either raw BIP39 seed bytes
|
||||||
|
or a BIP32 extended private key (``xprv``) string.
|
||||||
|
"""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if isinstance(seed_bytes, (bytes, bytearray)):
|
if isinstance(seed_or_xprv, (bytes, bytearray)):
|
||||||
self.bip32_ctx = Bip32Slip10Secp256k1.FromSeed(seed_bytes)
|
self.bip32_ctx = Bip32Slip10Secp256k1.FromSeed(seed_or_xprv)
|
||||||
else:
|
else:
|
||||||
self.bip32_ctx = Bip32Slip10Secp256k1.FromExtendedKey(seed_bytes)
|
self.bip32_ctx = Bip32Slip10Secp256k1.FromExtendedKey(seed_or_xprv)
|
||||||
logging.debug("BIP32 context initialized successfully.")
|
logging.debug("BIP32 context initialized successfully.")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"Error initializing BIP32 context: {e}", exc_info=True)
|
logging.error(f"Error initializing BIP32 context: {e}", exc_info=True)
|
||||||
@@ -51,26 +59,34 @@ class BIP85:
|
|||||||
raise Bip85Error(f"Error initializing BIP32 context: {e}")
|
raise Bip85Error(f"Error initializing BIP32 context: {e}")
|
||||||
|
|
||||||
def derive_entropy(
|
def derive_entropy(
|
||||||
self, index: int, bytes_len: int, app_no: int = 39, words_len: int | None = None
|
self,
|
||||||
|
index: int,
|
||||||
|
entropy_bytes: int,
|
||||||
|
app_no: int = 39,
|
||||||
|
word_count: int | None = None,
|
||||||
) -> bytes:
|
) -> bytes:
|
||||||
"""
|
"""Derive entropy using the BIP-85 HMAC-SHA512 method.
|
||||||
Derives entropy using BIP-85 HMAC-SHA512 method.
|
|
||||||
|
|
||||||
Parameters:
|
Parameters:
|
||||||
index (int): Index for the child entropy.
|
index (int): Index for the child entropy.
|
||||||
bytes_len (int): Number of bytes to derive for the entropy.
|
entropy_bytes (int): Number of bytes of entropy to derive.
|
||||||
app_no (int): Application number (default 39 for BIP39)
|
app_no (int): Application number (default 39 for BIP39).
|
||||||
|
word_count (int | None): Number of words used in the derivation path
|
||||||
|
for BIP39. If ``None`` and ``app_no`` is ``39``, ``word_count``
|
||||||
|
defaults to ``entropy_bytes``. The final segment of the
|
||||||
|
derivation path becomes ``m/83696968'/39'/0'/word_count'/index'``.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
bytes: Derived entropy.
|
bytes: Derived entropy of length ``entropy_bytes``.
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
SystemExit: If derivation fails or entropy length is invalid.
|
SystemExit: If derivation fails or the derived entropy length is
|
||||||
|
invalid.
|
||||||
"""
|
"""
|
||||||
if app_no == 39:
|
if app_no == 39:
|
||||||
if words_len is None:
|
if word_count is None:
|
||||||
words_len = bytes_len
|
word_count = entropy_bytes
|
||||||
path = f"m/83696968'/{app_no}'/0'/{words_len}'/{index}'"
|
path = f"m/83696968'/{app_no}'/0'/{word_count}'/{index}'"
|
||||||
elif app_no == 32:
|
elif app_no == 32:
|
||||||
path = f"m/83696968'/{app_no}'/{index}'"
|
path = f"m/83696968'/{app_no}'/{index}'"
|
||||||
else:
|
else:
|
||||||
@@ -86,17 +102,17 @@ class BIP85:
|
|||||||
hmac_result = hmac.new(hmac_key, k, hashlib.sha512).digest()
|
hmac_result = hmac.new(hmac_key, k, hashlib.sha512).digest()
|
||||||
logging.debug(f"HMAC-SHA512 result: {hmac_result.hex()}")
|
logging.debug(f"HMAC-SHA512 result: {hmac_result.hex()}")
|
||||||
|
|
||||||
entropy = hmac_result[:bytes_len]
|
entropy = hmac_result[:entropy_bytes]
|
||||||
|
|
||||||
if len(entropy) != bytes_len:
|
if len(entropy) != entropy_bytes:
|
||||||
logging.error(
|
logging.error(
|
||||||
f"Derived entropy length is {len(entropy)} bytes; expected {bytes_len} bytes."
|
f"Derived entropy length is {len(entropy)} bytes; expected {entropy_bytes} bytes."
|
||||||
)
|
)
|
||||||
print(
|
print(
|
||||||
f"{Fore.RED}Error: Derived entropy length is {len(entropy)} bytes; expected {bytes_len} bytes."
|
f"{Fore.RED}Error: Derived entropy length is {len(entropy)} bytes; expected {entropy_bytes} bytes."
|
||||||
)
|
)
|
||||||
raise Bip85Error(
|
raise Bip85Error(
|
||||||
f"Derived entropy length is {len(entropy)} bytes; expected {bytes_len} bytes."
|
f"Derived entropy length is {len(entropy)} bytes; expected {entropy_bytes} bytes."
|
||||||
)
|
)
|
||||||
|
|
||||||
logging.debug(f"Derived entropy: {entropy.hex()}")
|
logging.debug(f"Derived entropy: {entropy.hex()}")
|
||||||
@@ -107,14 +123,17 @@ class BIP85:
|
|||||||
raise Bip85Error(f"Error deriving entropy: {e}")
|
raise Bip85Error(f"Error deriving entropy: {e}")
|
||||||
|
|
||||||
def derive_mnemonic(self, index: int, words_num: int) -> str:
|
def derive_mnemonic(self, index: int, words_num: int) -> str:
|
||||||
bytes_len = {12: 16, 18: 24, 24: 32}.get(words_num)
|
entropy_bytes = {12: 16, 18: 24, 24: 32}.get(words_num)
|
||||||
if not bytes_len:
|
if not entropy_bytes:
|
||||||
logging.error(f"Unsupported number of words: {words_num}")
|
logging.error(f"Unsupported number of words: {words_num}")
|
||||||
print(f"{Fore.RED}Error: Unsupported number of words: {words_num}")
|
print(f"{Fore.RED}Error: Unsupported number of words: {words_num}")
|
||||||
raise Bip85Error(f"Unsupported number of words: {words_num}")
|
raise Bip85Error(f"Unsupported number of words: {words_num}")
|
||||||
|
|
||||||
entropy = self.derive_entropy(
|
entropy = self.derive_entropy(
|
||||||
index=index, bytes_len=bytes_len, app_no=39, words_len=words_num
|
index=index,
|
||||||
|
entropy_bytes=entropy_bytes,
|
||||||
|
app_no=39,
|
||||||
|
word_count=words_num,
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
mnemonic = Bip39MnemonicGenerator(Bip39Languages.ENGLISH).FromEntropy(
|
mnemonic = Bip39MnemonicGenerator(Bip39Languages.ENGLISH).FromEntropy(
|
||||||
@@ -130,7 +149,7 @@ class BIP85:
|
|||||||
def derive_symmetric_key(self, index: int = 0, app_no: int = 2) -> bytes:
|
def derive_symmetric_key(self, index: int = 0, app_no: int = 2) -> bytes:
|
||||||
"""Derive 32 bytes of entropy for symmetric key usage."""
|
"""Derive 32 bytes of entropy for symmetric key usage."""
|
||||||
try:
|
try:
|
||||||
key = self.derive_entropy(index=index, bytes_len=32, app_no=app_no)
|
key = self.derive_entropy(index=index, entropy_bytes=32, app_no=app_no)
|
||||||
logging.debug(f"Derived symmetric key: {key.hex()}")
|
logging.debug(f"Derived symmetric key: {key.hex()}")
|
||||||
return key
|
return key
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
46
src/main.py
46
src/main.py
@@ -19,7 +19,7 @@ from termcolor import colored
|
|||||||
from utils.color_scheme import color_text
|
from utils.color_scheme import color_text
|
||||||
import importlib
|
import importlib
|
||||||
|
|
||||||
from seedpass.core.manager import PasswordManager
|
from seedpass.core.manager import PasswordManager, restore_backup_index
|
||||||
from nostr.client import NostrClient
|
from nostr.client import NostrClient
|
||||||
from seedpass.core.entry_types import EntryType
|
from seedpass.core.entry_types import EntryType
|
||||||
from seedpass.core.config_manager import ConfigManager
|
from seedpass.core.config_manager import ConfigManager
|
||||||
@@ -1285,6 +1285,10 @@ def main(argv: list[str] | None = None, *, fingerprint: str | None = None) -> in
|
|||||||
load_global_config()
|
load_global_config()
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument("--fingerprint")
|
parser.add_argument("--fingerprint")
|
||||||
|
parser.add_argument(
|
||||||
|
"--restore-backup",
|
||||||
|
help="Restore index from backup file before starting",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--no-clipboard",
|
"--no-clipboard",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
@@ -1315,6 +1319,41 @@ def main(argv: list[str] | None = None, *, fingerprint: str | None = None) -> in
|
|||||||
|
|
||||||
args = parser.parse_args(argv)
|
args = parser.parse_args(argv)
|
||||||
|
|
||||||
|
if args.restore_backup:
|
||||||
|
fp_target = args.fingerprint or fingerprint
|
||||||
|
if fp_target is None:
|
||||||
|
print(
|
||||||
|
colored(
|
||||||
|
"Error: --fingerprint is required when using --restore-backup.",
|
||||||
|
"red",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return 1
|
||||||
|
try:
|
||||||
|
restore_backup_index(Path(args.restore_backup), fp_target)
|
||||||
|
logger.info("Restored backup from %s", args.restore_backup)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to restore backup: {e}", exc_info=True)
|
||||||
|
print(colored(f"Error: Failed to restore backup: {e}", "red"))
|
||||||
|
return 1
|
||||||
|
elif args.command is None:
|
||||||
|
print("Startup Options:")
|
||||||
|
print("1. Continue")
|
||||||
|
print("2. Restore from backup")
|
||||||
|
choice = input("Select an option: ").strip()
|
||||||
|
if choice == "2":
|
||||||
|
path = input("Enter backup file path: ").strip()
|
||||||
|
fp_target = args.fingerprint or fingerprint
|
||||||
|
if fp_target is None:
|
||||||
|
fp_target = input("Enter fingerprint for restore: ").strip()
|
||||||
|
try:
|
||||||
|
restore_backup_index(Path(path), fp_target)
|
||||||
|
logger.info("Restored backup from %s", path)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to restore backup: {e}", exc_info=True)
|
||||||
|
print(colored(f"Error: Failed to restore backup: {e}", "red"))
|
||||||
|
return 1
|
||||||
|
|
||||||
if args.max_prompt_attempts is not None:
|
if args.max_prompt_attempts is not None:
|
||||||
os.environ["SEEDPASS_MAX_PROMPT_ATTEMPTS"] = str(args.max_prompt_attempts)
|
os.environ["SEEDPASS_MAX_PROMPT_ATTEMPTS"] = str(args.max_prompt_attempts)
|
||||||
|
|
||||||
@@ -1376,9 +1415,10 @@ def main(argv: list[str] | None = None, *, fingerprint: str | None = None) -> in
|
|||||||
if entry.get("type") != EntryType.TOTP.value:
|
if entry.get("type") != EntryType.TOTP.value:
|
||||||
print(colored("Entry is not a TOTP entry.", "red"))
|
print(colored("Entry is not a TOTP entry.", "red"))
|
||||||
return 1
|
return 1
|
||||||
code = password_manager.entry_manager.get_totp_code(
|
key = getattr(password_manager, "KEY_TOTP_DET", None) or getattr(
|
||||||
idx, password_manager.parent_seed
|
password_manager, "parent_seed", None
|
||||||
)
|
)
|
||||||
|
code = password_manager.entry_manager.get_totp_code(idx, key)
|
||||||
print(code)
|
print(code)
|
||||||
try:
|
try:
|
||||||
if copy_to_clipboard(code, password_manager.clipboard_clear_delay):
|
if copy_to_clipboard(code, password_manager.clipboard_clear_delay):
|
||||||
|
@@ -85,7 +85,7 @@ class KeyManager:
|
|||||||
# Derive entropy for Nostr key (32 bytes)
|
# Derive entropy for Nostr key (32 bytes)
|
||||||
entropy_bytes = self.bip85.derive_entropy(
|
entropy_bytes = self.bip85.derive_entropy(
|
||||||
index=index,
|
index=index,
|
||||||
bytes_len=32,
|
entropy_bytes=32,
|
||||||
app_no=NOSTR_KEY_APP_ID,
|
app_no=NOSTR_KEY_APP_ID,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -102,7 +102,7 @@ class KeyManager:
|
|||||||
"""Derive Nostr keys using the legacy application ID."""
|
"""Derive Nostr keys using the legacy application ID."""
|
||||||
try:
|
try:
|
||||||
entropy = self.bip85.derive_entropy(
|
entropy = self.bip85.derive_entropy(
|
||||||
index=0, bytes_len=32, app_no=LEGACY_NOSTR_KEY_APP_ID
|
index=0, entropy_bytes=32, app_no=LEGACY_NOSTR_KEY_APP_ID
|
||||||
)
|
)
|
||||||
return Keys(priv_k=entropy.hex())
|
return Keys(priv_k=entropy.hex())
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
@@ -1,41 +0,0 @@
|
|||||||
# nostr/logging_config.py
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
|
|
||||||
# Comment out or remove the configure_logging function to avoid conflicts
|
|
||||||
# def configure_logging():
|
|
||||||
# """
|
|
||||||
# Configures logging with both file and console handlers.
|
|
||||||
# Logs include the timestamp, log level, message, filename, and line number.
|
|
||||||
# Only ERROR and higher-level messages are shown in the terminal, while all messages
|
|
||||||
# are logged in the log file.
|
|
||||||
# """
|
|
||||||
# logger = logging.getLogger()
|
|
||||||
# logger.setLevel(logging.DEBUG) # Set root logger to DEBUG
|
|
||||||
#
|
|
||||||
# # Prevent adding multiple handlers if configure_logging is called multiple times
|
|
||||||
# if not logger.handlers:
|
|
||||||
# # Create the 'logs' folder if it doesn't exist
|
|
||||||
# log_directory = 'logs'
|
|
||||||
# if not os.path.exists(log_directory):
|
|
||||||
# os.makedirs(log_directory)
|
|
||||||
#
|
|
||||||
# # Create handlers
|
|
||||||
# c_handler = logging.StreamHandler()
|
|
||||||
# f_handler = logging.FileHandler(os.path.join(log_directory, 'app.log'))
|
|
||||||
#
|
|
||||||
# # Set levels: only errors and critical messages will be shown in the console
|
|
||||||
# c_handler.setLevel(logging.ERROR)
|
|
||||||
# f_handler.setLevel(logging.DEBUG)
|
|
||||||
#
|
|
||||||
# # Create formatters and add them to handlers, include file and line number in log messages
|
|
||||||
# formatter = logging.Formatter(
|
|
||||||
# '%(asctime)s [%(levelname)s] %(message)s [%(filename)s:%(lineno)d]'
|
|
||||||
# )
|
|
||||||
# c_handler.setFormatter(formatter)
|
|
||||||
# f_handler.setFormatter(formatter)
|
|
||||||
#
|
|
||||||
# # Add handlers to the logger
|
|
||||||
# logger.addHandler(c_handler)
|
|
||||||
# logger.addHandler(f_handler)
|
|
@@ -1,9 +0,0 @@
|
|||||||
"""Placeholder utilities for Nostr.
|
|
||||||
|
|
||||||
This module is intentionally left minimal and will be expanded in future
|
|
||||||
releases as the Nostr integration grows.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# The module currently provides no functionality.
|
|
||||||
# `pass` denotes the intentional absence of implementation.
|
|
||||||
pass
|
|
@@ -28,7 +28,6 @@ Generated on: 2025-04-06
|
|||||||
├── encryption_manager.py
|
├── encryption_manager.py
|
||||||
├── event_handler.py
|
├── event_handler.py
|
||||||
├── key_manager.py
|
├── key_manager.py
|
||||||
├── logging_config.py
|
|
||||||
├── utils.py
|
├── utils.py
|
||||||
├── utils/
|
├── utils/
|
||||||
├── __init__.py
|
├── __init__.py
|
||||||
@@ -3082,52 +3081,6 @@ __all__ = ['NostrClient']
|
|||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## nostr/logging_config.py
|
|
||||||
```python
|
|
||||||
# nostr/logging_config.py
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
|
|
||||||
# Comment out or remove the configure_logging function to avoid conflicts
|
|
||||||
# def configure_logging():
|
|
||||||
# """
|
|
||||||
# Configures logging with both file and console handlers.
|
|
||||||
# Logs include the timestamp, log level, message, filename, and line number.
|
|
||||||
# Only ERROR and higher-level messages are shown in the terminal, while all messages
|
|
||||||
# are logged in the log file.
|
|
||||||
# """
|
|
||||||
# logger = logging.getLogger()
|
|
||||||
# logger.setLevel(logging.DEBUG) # Set root logger to DEBUG
|
|
||||||
#
|
|
||||||
# # Prevent adding multiple handlers if configure_logging is called multiple times
|
|
||||||
# if not logger.handlers:
|
|
||||||
# # Create the 'logs' folder if it doesn't exist
|
|
||||||
# log_directory = 'logs'
|
|
||||||
# if not os.path.exists(log_directory):
|
|
||||||
# os.makedirs(log_directory)
|
|
||||||
#
|
|
||||||
# # Create handlers
|
|
||||||
# c_handler = logging.StreamHandler()
|
|
||||||
# f_handler = logging.FileHandler(os.path.join(log_directory, 'app.log'))
|
|
||||||
#
|
|
||||||
# # Set levels: only errors and critical messages will be shown in the console
|
|
||||||
# c_handler.setLevel(logging.ERROR)
|
|
||||||
# f_handler.setLevel(logging.DEBUG)
|
|
||||||
#
|
|
||||||
# # Create formatters and add them to handlers, include file and line number in log messages
|
|
||||||
# formatter = logging.Formatter(
|
|
||||||
# '%(asctime)s [%(levelname)s] %(message)s [%(filename)s:%(lineno)d]'
|
|
||||||
# )
|
|
||||||
# c_handler.setFormatter(formatter)
|
|
||||||
# f_handler.setFormatter(formatter)
|
|
||||||
#
|
|
||||||
# # Add handlers to the logger
|
|
||||||
# logger.addHandler(c_handler)
|
|
||||||
# logger.addHandler(f_handler)
|
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
## nostr/event_handler.py
|
## nostr/event_handler.py
|
||||||
```python
|
```python
|
||||||
# nostr/event_handler.py
|
# nostr/event_handler.py
|
||||||
|
@@ -464,7 +464,8 @@ def export_totp(
|
|||||||
_check_token(request, authorization)
|
_check_token(request, authorization)
|
||||||
_require_password(request, password)
|
_require_password(request, password)
|
||||||
pm = _get_pm(request)
|
pm = _get_pm(request)
|
||||||
return pm.entry_manager.export_totp_entries(pm.parent_seed)
|
key = getattr(pm, "KEY_TOTP_DET", None) or getattr(pm, "parent_seed", None)
|
||||||
|
return pm.entry_manager.export_totp_entries(key)
|
||||||
|
|
||||||
|
|
||||||
@app.get("/api/v1/totp")
|
@app.get("/api/v1/totp")
|
||||||
@@ -478,11 +479,12 @@ def get_totp_codes(
|
|||||||
_require_password(request, password)
|
_require_password(request, password)
|
||||||
pm = _get_pm(request)
|
pm = _get_pm(request)
|
||||||
entries = pm.entry_manager.list_entries(
|
entries = pm.entry_manager.list_entries(
|
||||||
filter_kind=EntryType.TOTP.value, include_archived=False
|
filter_kinds=[EntryType.TOTP.value], include_archived=False
|
||||||
)
|
)
|
||||||
codes = []
|
codes = []
|
||||||
for idx, label, _u, _url, _arch in entries:
|
for idx, label, _u, _url, _arch in entries:
|
||||||
code = pm.entry_manager.get_totp_code(idx, pm.parent_seed)
|
key = getattr(pm, "KEY_TOTP_DET", None) or getattr(pm, "parent_seed", None)
|
||||||
|
code = pm.entry_manager.get_totp_code(idx, key)
|
||||||
|
|
||||||
rem = pm.entry_manager.get_totp_time_remaining(idx)
|
rem = pm.entry_manager.get_totp_time_remaining(idx)
|
||||||
|
|
||||||
|
@@ -6,8 +6,10 @@ from pathlib import Path
|
|||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
import typer
|
import typer
|
||||||
|
import click
|
||||||
|
|
||||||
from .common import _get_entry_service, EntryType
|
from .common import _get_entry_service, EntryType
|
||||||
|
from seedpass.core.entry_types import ALL_ENTRY_TYPES
|
||||||
from utils.clipboard import ClipboardUnavailableError
|
from utils.clipboard import ClipboardUnavailableError
|
||||||
|
|
||||||
|
|
||||||
@@ -20,13 +22,20 @@ def entry_list(
|
|||||||
sort: str = typer.Option(
|
sort: str = typer.Option(
|
||||||
"index", "--sort", help="Sort by 'index', 'label', or 'updated'"
|
"index", "--sort", help="Sort by 'index', 'label', or 'updated'"
|
||||||
),
|
),
|
||||||
kind: Optional[str] = typer.Option(None, "--kind", help="Filter by entry type"),
|
kind: Optional[str] = typer.Option(
|
||||||
|
None,
|
||||||
|
"--kind",
|
||||||
|
help="Filter by entry type",
|
||||||
|
click_type=click.Choice(ALL_ENTRY_TYPES),
|
||||||
|
),
|
||||||
archived: bool = typer.Option(False, "--archived", help="Include archived"),
|
archived: bool = typer.Option(False, "--archived", help="Include archived"),
|
||||||
) -> None:
|
) -> None:
|
||||||
"""List entries in the vault."""
|
"""List entries in the vault."""
|
||||||
service = _get_entry_service(ctx)
|
service = _get_entry_service(ctx)
|
||||||
entries = service.list_entries(
|
entries = service.list_entries(
|
||||||
sort_by=sort, filter_kind=kind, include_archived=archived
|
sort_by=sort,
|
||||||
|
filter_kinds=[kind] if kind else None,
|
||||||
|
include_archived=archived,
|
||||||
)
|
)
|
||||||
for idx, label, username, url, is_archived in entries:
|
for idx, label, username, url, is_archived in entries:
|
||||||
line = f"{idx}: {label}"
|
line = f"{idx}: {label}"
|
||||||
@@ -43,16 +52,17 @@ def entry_list(
|
|||||||
def entry_search(
|
def entry_search(
|
||||||
ctx: typer.Context,
|
ctx: typer.Context,
|
||||||
query: str,
|
query: str,
|
||||||
kind: List[str] = typer.Option(
|
kinds: List[str] = typer.Option(
|
||||||
None,
|
None,
|
||||||
"--kind",
|
"--kind",
|
||||||
"-k",
|
"-k",
|
||||||
help="Filter by entry kinds (can be repeated)",
|
help="Filter by entry kinds (can be repeated)",
|
||||||
|
click_type=click.Choice(ALL_ENTRY_TYPES),
|
||||||
),
|
),
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Search entries."""
|
"""Search entries."""
|
||||||
service = _get_entry_service(ctx)
|
service = _get_entry_service(ctx)
|
||||||
kinds = list(kind) if kind else None
|
kinds = list(kinds) if kinds else None
|
||||||
results = service.search_entries(query, kinds=kinds)
|
results = service.search_entries(query, kinds=kinds)
|
||||||
if not results:
|
if not results:
|
||||||
typer.echo("No matching entries found")
|
typer.echo("No matching entries found")
|
||||||
|
@@ -265,13 +265,13 @@ class EntryService:
|
|||||||
def list_entries(
|
def list_entries(
|
||||||
self,
|
self,
|
||||||
sort_by: str = "index",
|
sort_by: str = "index",
|
||||||
filter_kind: str | None = None,
|
filter_kinds: list[str] | None = None,
|
||||||
include_archived: bool = False,
|
include_archived: bool = False,
|
||||||
):
|
):
|
||||||
with self._lock:
|
with self._lock:
|
||||||
return self._manager.entry_manager.list_entries(
|
return self._manager.entry_manager.list_entries(
|
||||||
sort_by=sort_by,
|
sort_by=sort_by,
|
||||||
filter_kind=filter_kind,
|
filter_kinds=filter_kinds,
|
||||||
include_archived=include_archived,
|
include_archived=include_archived,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -305,9 +305,10 @@ class EntryService:
|
|||||||
|
|
||||||
def get_totp_code(self, entry_id: int) -> str:
|
def get_totp_code(self, entry_id: int) -> str:
|
||||||
with self._lock:
|
with self._lock:
|
||||||
return self._manager.entry_manager.get_totp_code(
|
key = getattr(self._manager, "KEY_TOTP_DET", None) or getattr(
|
||||||
entry_id, self._manager.parent_seed
|
self._manager, "parent_seed", None
|
||||||
)
|
)
|
||||||
|
return self._manager.entry_manager.get_totp_code(entry_id, key)
|
||||||
|
|
||||||
def add_entry(
|
def add_entry(
|
||||||
self,
|
self,
|
||||||
@@ -515,9 +516,10 @@ class EntryService:
|
|||||||
|
|
||||||
def export_totp_entries(self) -> dict:
|
def export_totp_entries(self) -> dict:
|
||||||
with self._lock:
|
with self._lock:
|
||||||
return self._manager.entry_manager.export_totp_entries(
|
key = getattr(self._manager, "KEY_TOTP_DET", None) or getattr(
|
||||||
self._manager.parent_seed
|
self._manager, "parent_seed", None
|
||||||
)
|
)
|
||||||
|
return self._manager.entry_manager.export_totp_entries(key)
|
||||||
|
|
||||||
def display_totp_codes(self) -> None:
|
def display_totp_codes(self) -> None:
|
||||||
with self._lock:
|
with self._lock:
|
||||||
|
@@ -16,8 +16,9 @@ except Exception: # pragma: no cover - fallback for environments without orjson
|
|||||||
import hashlib
|
import hashlib
|
||||||
import os
|
import os
|
||||||
import base64
|
import base64
|
||||||
|
from dataclasses import asdict
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Optional
|
from typing import Optional, Tuple
|
||||||
|
|
||||||
from cryptography.hazmat.primitives.ciphers.aead import AESGCM
|
from cryptography.hazmat.primitives.ciphers.aead import AESGCM
|
||||||
from cryptography.exceptions import InvalidTag
|
from cryptography.exceptions import InvalidTag
|
||||||
@@ -26,6 +27,7 @@ from termcolor import colored
|
|||||||
from utils.file_lock import exclusive_lock
|
from utils.file_lock import exclusive_lock
|
||||||
from mnemonic import Mnemonic
|
from mnemonic import Mnemonic
|
||||||
from utils.password_prompt import prompt_existing_password
|
from utils.password_prompt import prompt_existing_password
|
||||||
|
from utils.key_derivation import KdfConfig, CURRENT_KDF_VERSION
|
||||||
|
|
||||||
# Instantiate the logger
|
# Instantiate the logger
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@@ -231,40 +233,76 @@ class EncryptionManager:
|
|||||||
raise ValueError("Invalid path outside fingerprint directory")
|
raise ValueError("Invalid path outside fingerprint directory")
|
||||||
return candidate
|
return candidate
|
||||||
|
|
||||||
def encrypt_parent_seed(self, parent_seed: str) -> None:
|
def encrypt_parent_seed(
|
||||||
|
self, parent_seed: str, kdf: Optional[KdfConfig] = None
|
||||||
|
) -> None:
|
||||||
"""Encrypts and saves the parent seed to 'parent_seed.enc'."""
|
"""Encrypts and saves the parent seed to 'parent_seed.enc'."""
|
||||||
data = parent_seed.encode("utf-8")
|
data = parent_seed.encode("utf-8")
|
||||||
encrypted_data = self.encrypt_data(data) # This now creates V2 format
|
self.encrypt_and_save_file(data, self.parent_seed_file, kdf=kdf)
|
||||||
with exclusive_lock(self.parent_seed_file) as fh:
|
|
||||||
fh.seek(0)
|
|
||||||
fh.truncate()
|
|
||||||
fh.write(encrypted_data)
|
|
||||||
os.chmod(self.parent_seed_file, 0o600)
|
|
||||||
logger.info(f"Parent seed encrypted and saved to '{self.parent_seed_file}'.")
|
logger.info(f"Parent seed encrypted and saved to '{self.parent_seed_file}'.")
|
||||||
|
|
||||||
def decrypt_parent_seed(self) -> str:
|
def decrypt_parent_seed(self) -> str:
|
||||||
"""Decrypts and returns the parent seed, handling migration."""
|
"""Decrypts and returns the parent seed, handling migration."""
|
||||||
with exclusive_lock(self.parent_seed_file) as fh:
|
with exclusive_lock(self.parent_seed_file) as fh:
|
||||||
fh.seek(0)
|
fh.seek(0)
|
||||||
encrypted_data = fh.read()
|
blob = fh.read()
|
||||||
|
|
||||||
|
kdf, encrypted_data = self._deserialize(blob)
|
||||||
is_legacy = not encrypted_data.startswith(b"V2:")
|
is_legacy = not encrypted_data.startswith(b"V2:")
|
||||||
decrypted_data = self.decrypt_data(encrypted_data, context="seed")
|
decrypted_data = self.decrypt_data(encrypted_data, context="seed")
|
||||||
|
|
||||||
if is_legacy:
|
if is_legacy:
|
||||||
logger.info("Parent seed was in legacy format. Re-encrypting to V2 format.")
|
logger.info("Parent seed was in legacy format. Re-encrypting to V2 format.")
|
||||||
self.encrypt_parent_seed(decrypted_data.decode("utf-8").strip())
|
self.encrypt_parent_seed(decrypted_data.decode("utf-8").strip(), kdf=kdf)
|
||||||
|
|
||||||
return decrypted_data.decode("utf-8").strip()
|
return decrypted_data.decode("utf-8").strip()
|
||||||
|
|
||||||
def encrypt_and_save_file(self, data: bytes, relative_path: Path) -> None:
|
def _serialize(self, kdf: KdfConfig, ciphertext: bytes) -> bytes:
|
||||||
|
payload = {"kdf": asdict(kdf), "ct": base64.b64encode(ciphertext).decode()}
|
||||||
|
if USE_ORJSON:
|
||||||
|
return json_lib.dumps(payload)
|
||||||
|
return json_lib.dumps(payload, separators=(",", ":")).encode("utf-8")
|
||||||
|
|
||||||
|
def _deserialize(self, blob: bytes) -> Tuple[KdfConfig, bytes]:
|
||||||
|
"""Return ``(KdfConfig, ciphertext)`` from serialized *blob*.
|
||||||
|
|
||||||
|
Legacy files stored the raw ciphertext without a JSON wrapper. If
|
||||||
|
decoding the wrapper fails, treat ``blob`` as the ciphertext and return
|
||||||
|
a default HKDF configuration.
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
if USE_ORJSON:
|
||||||
|
obj = json_lib.loads(blob)
|
||||||
|
else:
|
||||||
|
obj = json_lib.loads(blob.decode("utf-8"))
|
||||||
|
kdf = KdfConfig(**obj.get("kdf", {}))
|
||||||
|
ct_b64 = obj.get("ct", "")
|
||||||
|
ciphertext = base64.b64decode(ct_b64)
|
||||||
|
if ciphertext:
|
||||||
|
return kdf, ciphertext
|
||||||
|
except Exception: # pragma: no cover - fall back to legacy path
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Legacy format: ``blob`` already contains the ciphertext
|
||||||
|
return (
|
||||||
|
KdfConfig(name="hkdf", version=CURRENT_KDF_VERSION, params={}, salt_b64=""),
|
||||||
|
blob,
|
||||||
|
)
|
||||||
|
|
||||||
|
def encrypt_and_save_file(
|
||||||
|
self, data: bytes, relative_path: Path, *, kdf: Optional[KdfConfig] = None
|
||||||
|
) -> None:
|
||||||
|
if kdf is None:
|
||||||
|
kdf = KdfConfig()
|
||||||
file_path = self.resolve_relative_path(relative_path)
|
file_path = self.resolve_relative_path(relative_path)
|
||||||
file_path.parent.mkdir(parents=True, exist_ok=True)
|
file_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
encrypted_data = self.encrypt_data(data)
|
encrypted_data = self.encrypt_data(data)
|
||||||
|
payload = self._serialize(kdf, encrypted_data)
|
||||||
with exclusive_lock(file_path) as fh:
|
with exclusive_lock(file_path) as fh:
|
||||||
fh.seek(0)
|
fh.seek(0)
|
||||||
fh.truncate()
|
fh.truncate()
|
||||||
fh.write(encrypted_data)
|
fh.write(payload)
|
||||||
fh.flush()
|
fh.flush()
|
||||||
os.fsync(fh.fileno())
|
os.fsync(fh.fileno())
|
||||||
os.chmod(file_path, 0o600)
|
os.chmod(file_path, 0o600)
|
||||||
@@ -273,20 +311,37 @@ class EncryptionManager:
|
|||||||
file_path = self.resolve_relative_path(relative_path)
|
file_path = self.resolve_relative_path(relative_path)
|
||||||
with exclusive_lock(file_path) as fh:
|
with exclusive_lock(file_path) as fh:
|
||||||
fh.seek(0)
|
fh.seek(0)
|
||||||
encrypted_data = fh.read()
|
blob = fh.read()
|
||||||
|
_, encrypted_data = self._deserialize(blob)
|
||||||
return self.decrypt_data(encrypted_data, context=str(relative_path))
|
return self.decrypt_data(encrypted_data, context=str(relative_path))
|
||||||
|
|
||||||
def save_json_data(self, data: dict, relative_path: Optional[Path] = None) -> None:
|
def get_file_kdf(self, relative_path: Path) -> KdfConfig:
|
||||||
|
file_path = self.resolve_relative_path(relative_path)
|
||||||
|
with exclusive_lock(file_path) as fh:
|
||||||
|
fh.seek(0)
|
||||||
|
blob = fh.read()
|
||||||
|
kdf, _ = self._deserialize(blob)
|
||||||
|
return kdf
|
||||||
|
|
||||||
|
def save_json_data(
|
||||||
|
self,
|
||||||
|
data: dict,
|
||||||
|
relative_path: Optional[Path] = None,
|
||||||
|
*,
|
||||||
|
kdf: Optional[KdfConfig] = None,
|
||||||
|
) -> None:
|
||||||
if relative_path is None:
|
if relative_path is None:
|
||||||
relative_path = Path("seedpass_entries_db.json.enc")
|
relative_path = Path("seedpass_entries_db.json.enc")
|
||||||
if USE_ORJSON:
|
if USE_ORJSON:
|
||||||
json_data = json_lib.dumps(data)
|
json_data = json_lib.dumps(data)
|
||||||
else:
|
else:
|
||||||
json_data = json_lib.dumps(data, separators=(",", ":")).encode("utf-8")
|
json_data = json_lib.dumps(data, separators=(",", ":")).encode("utf-8")
|
||||||
self.encrypt_and_save_file(json_data, relative_path)
|
self.encrypt_and_save_file(json_data, relative_path, kdf=kdf)
|
||||||
logger.debug(f"JSON data encrypted and saved to '{relative_path}'.")
|
logger.debug(f"JSON data encrypted and saved to '{relative_path}'.")
|
||||||
|
|
||||||
def load_json_data(self, relative_path: Optional[Path] = None) -> dict:
|
def load_json_data(
|
||||||
|
self, relative_path: Optional[Path] = None, *, return_kdf: bool = False
|
||||||
|
) -> dict | Tuple[dict, KdfConfig]:
|
||||||
"""
|
"""
|
||||||
Loads and decrypts JSON data, automatically migrating and re-saving
|
Loads and decrypts JSON data, automatically migrating and re-saving
|
||||||
if it's in the legacy format.
|
if it's in the legacy format.
|
||||||
@@ -295,12 +350,18 @@ class EncryptionManager:
|
|||||||
relative_path = Path("seedpass_entries_db.json.enc")
|
relative_path = Path("seedpass_entries_db.json.enc")
|
||||||
file_path = self.resolve_relative_path(relative_path)
|
file_path = self.resolve_relative_path(relative_path)
|
||||||
if not file_path.exists():
|
if not file_path.exists():
|
||||||
return {"entries": {}}
|
empty: dict = {"entries": {}}
|
||||||
|
if return_kdf:
|
||||||
|
return empty, KdfConfig(
|
||||||
|
name="hkdf", version=CURRENT_KDF_VERSION, params={}, salt_b64=""
|
||||||
|
)
|
||||||
|
return empty
|
||||||
|
|
||||||
with exclusive_lock(file_path) as fh:
|
with exclusive_lock(file_path) as fh:
|
||||||
fh.seek(0)
|
fh.seek(0)
|
||||||
encrypted_data = fh.read()
|
blob = fh.read()
|
||||||
|
|
||||||
|
kdf, encrypted_data = self._deserialize(blob)
|
||||||
is_legacy = not encrypted_data.startswith(b"V2:")
|
is_legacy = not encrypted_data.startswith(b"V2:")
|
||||||
self.last_migration_performed = False
|
self.last_migration_performed = False
|
||||||
|
|
||||||
@@ -316,10 +377,12 @@ class EncryptionManager:
|
|||||||
# If it was a legacy file, re-save it in the new format now
|
# If it was a legacy file, re-save it in the new format now
|
||||||
if is_legacy and self._legacy_migrate_flag:
|
if is_legacy and self._legacy_migrate_flag:
|
||||||
logger.info(f"Migrating and re-saving legacy vault file: {file_path}")
|
logger.info(f"Migrating and re-saving legacy vault file: {file_path}")
|
||||||
self.save_json_data(data, relative_path)
|
self.save_json_data(data, relative_path, kdf=kdf)
|
||||||
self.update_checksum(relative_path)
|
self.update_checksum(relative_path)
|
||||||
self.last_migration_performed = True
|
self.last_migration_performed = True
|
||||||
|
|
||||||
|
if return_kdf:
|
||||||
|
return data, kdf
|
||||||
return data
|
return data
|
||||||
except (InvalidToken, InvalidTag, JSONDecodeError) as e:
|
except (InvalidToken, InvalidTag, JSONDecodeError) as e:
|
||||||
logger.error(
|
logger.error(
|
||||||
@@ -360,7 +423,8 @@ class EncryptionManager:
|
|||||||
if relative_path is None:
|
if relative_path is None:
|
||||||
relative_path = Path("seedpass_entries_db.json.enc")
|
relative_path = Path("seedpass_entries_db.json.enc")
|
||||||
|
|
||||||
is_legacy = not encrypted_data.startswith(b"V2:")
|
kdf, ciphertext = self._deserialize(encrypted_data)
|
||||||
|
is_legacy = not ciphertext.startswith(b"V2:")
|
||||||
self.last_migration_performed = False
|
self.last_migration_performed = False
|
||||||
|
|
||||||
def _process(decrypted: bytes) -> dict:
|
def _process(decrypted: bytes) -> dict:
|
||||||
@@ -386,11 +450,9 @@ class EncryptionManager:
|
|||||||
return data
|
return data
|
||||||
|
|
||||||
try:
|
try:
|
||||||
decrypted_data = self.decrypt_data(
|
decrypted_data = self.decrypt_data(ciphertext, context=str(relative_path))
|
||||||
encrypted_data, context=str(relative_path)
|
|
||||||
)
|
|
||||||
data = _process(decrypted_data)
|
data = _process(decrypted_data)
|
||||||
self.save_json_data(data, relative_path) # This always saves in V2 format
|
self.save_json_data(data, relative_path, kdf=kdf)
|
||||||
self.update_checksum(relative_path)
|
self.update_checksum(relative_path)
|
||||||
logger.info("Index file from Nostr was processed and saved successfully.")
|
logger.info("Index file from Nostr was processed and saved successfully.")
|
||||||
self.last_migration_performed = is_legacy
|
self.last_migration_performed = is_legacy
|
||||||
@@ -401,10 +463,10 @@ class EncryptionManager:
|
|||||||
"Enter your master password for legacy decryption: "
|
"Enter your master password for legacy decryption: "
|
||||||
)
|
)
|
||||||
decrypted_data = self.decrypt_legacy(
|
decrypted_data = self.decrypt_legacy(
|
||||||
encrypted_data, password, context=str(relative_path)
|
ciphertext, password, context=str(relative_path)
|
||||||
)
|
)
|
||||||
data = _process(decrypted_data)
|
data = _process(decrypted_data)
|
||||||
self.save_json_data(data, relative_path)
|
self.save_json_data(data, relative_path, kdf=kdf)
|
||||||
self.update_checksum(relative_path)
|
self.update_checksum(relative_path)
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"Index decrypted using legacy password-only key derivation."
|
"Index decrypted using legacy password-only key derivation."
|
||||||
|
@@ -33,7 +33,7 @@ from pathlib import Path
|
|||||||
|
|
||||||
from termcolor import colored
|
from termcolor import colored
|
||||||
from .migrations import LATEST_VERSION
|
from .migrations import LATEST_VERSION
|
||||||
from .entry_types import EntryType
|
from .entry_types import EntryType, ALL_ENTRY_TYPES
|
||||||
from .totp import TotpManager
|
from .totp import TotpManager
|
||||||
from utils.fingerprint import generate_fingerprint
|
from utils.fingerprint import generate_fingerprint
|
||||||
from utils.checksum import canonical_json_dumps
|
from utils.checksum import canonical_json_dumps
|
||||||
@@ -257,7 +257,7 @@ class EntryManager:
|
|||||||
def add_totp(
|
def add_totp(
|
||||||
self,
|
self,
|
||||||
label: str,
|
label: str,
|
||||||
parent_seed: str,
|
parent_seed: str | bytes,
|
||||||
*,
|
*,
|
||||||
archived: bool = False,
|
archived: bool = False,
|
||||||
secret: str | None = None,
|
secret: str | None = None,
|
||||||
@@ -461,7 +461,7 @@ class EntryManager:
|
|||||||
|
|
||||||
seed_bytes = Bip39SeedGenerator(parent_seed).Generate()
|
seed_bytes = Bip39SeedGenerator(parent_seed).Generate()
|
||||||
bip85 = BIP85(seed_bytes)
|
bip85 = BIP85(seed_bytes)
|
||||||
entropy = bip85.derive_entropy(index=index, bytes_len=32)
|
entropy = bip85.derive_entropy(index=index, entropy_bytes=32)
|
||||||
keys = Keys(priv_k=entropy.hex())
|
keys = Keys(priv_k=entropy.hex())
|
||||||
npub = Keys.hex_to_bech32(keys.public_key_hex(), "npub")
|
npub = Keys.hex_to_bech32(keys.public_key_hex(), "npub")
|
||||||
nsec = Keys.hex_to_bech32(keys.private_key_hex(), "nsec")
|
nsec = Keys.hex_to_bech32(keys.private_key_hex(), "nsec")
|
||||||
@@ -539,7 +539,7 @@ class EntryManager:
|
|||||||
bip85 = BIP85(seed_bytes)
|
bip85 = BIP85(seed_bytes)
|
||||||
|
|
||||||
key_idx = int(entry.get("index", index))
|
key_idx = int(entry.get("index", index))
|
||||||
entropy = bip85.derive_entropy(index=key_idx, bytes_len=32)
|
entropy = bip85.derive_entropy(index=key_idx, entropy_bytes=32)
|
||||||
keys = Keys(priv_k=entropy.hex())
|
keys = Keys(priv_k=entropy.hex())
|
||||||
npub = Keys.hex_to_bech32(keys.public_key_hex(), "npub")
|
npub = Keys.hex_to_bech32(keys.public_key_hex(), "npub")
|
||||||
nsec = Keys.hex_to_bech32(keys.private_key_hex(), "nsec")
|
nsec = Keys.hex_to_bech32(keys.private_key_hex(), "nsec")
|
||||||
@@ -689,7 +689,10 @@ class EntryManager:
|
|||||||
return derive_seed_phrase(bip85, seed_index, words)
|
return derive_seed_phrase(bip85, seed_index, words)
|
||||||
|
|
||||||
def get_totp_code(
|
def get_totp_code(
|
||||||
self, index: int, parent_seed: str | None = None, timestamp: int | None = None
|
self,
|
||||||
|
index: int,
|
||||||
|
parent_seed: str | bytes | None = None,
|
||||||
|
timestamp: int | None = None,
|
||||||
) -> str:
|
) -> str:
|
||||||
"""Return the current TOTP code for the specified entry."""
|
"""Return the current TOTP code for the specified entry."""
|
||||||
entry = self.retrieve_entry(index)
|
entry = self.retrieve_entry(index)
|
||||||
@@ -719,7 +722,9 @@ class EntryManager:
|
|||||||
period = int(entry.get("period", 30))
|
period = int(entry.get("period", 30))
|
||||||
return TotpManager.time_remaining(period)
|
return TotpManager.time_remaining(period)
|
||||||
|
|
||||||
def export_totp_entries(self, parent_seed: str) -> dict[str, list[dict[str, Any]]]:
|
def export_totp_entries(
|
||||||
|
self, parent_seed: str | bytes
|
||||||
|
) -> dict[str, list[dict[str, Any]]]:
|
||||||
"""Return all TOTP secrets and metadata for external use."""
|
"""Return all TOTP secrets and metadata for external use."""
|
||||||
data = self._load_index()
|
data = self._load_index()
|
||||||
entries = data.get("entries", {})
|
entries = data.get("entries", {})
|
||||||
@@ -1076,7 +1081,7 @@ class EntryManager:
|
|||||||
def list_entries(
|
def list_entries(
|
||||||
self,
|
self,
|
||||||
sort_by: str = "index",
|
sort_by: str = "index",
|
||||||
filter_kind: str | None = None,
|
filter_kinds: list[str] | None = None,
|
||||||
*,
|
*,
|
||||||
include_archived: bool = False,
|
include_archived: bool = False,
|
||||||
verbose: bool = True,
|
verbose: bool = True,
|
||||||
@@ -1088,8 +1093,9 @@ class EntryManager:
|
|||||||
sort_by:
|
sort_by:
|
||||||
Field to sort by. Supported values are ``"index"``, ``"label"`` and
|
Field to sort by. Supported values are ``"index"``, ``"label"`` and
|
||||||
``"updated"``.
|
``"updated"``.
|
||||||
filter_kind:
|
filter_kinds:
|
||||||
Optional entry kind to restrict the results.
|
Optional list of entry kinds to restrict the results. Defaults to
|
||||||
|
``ALL_ENTRY_TYPES``.
|
||||||
|
|
||||||
Archived entries are omitted unless ``include_archived`` is ``True``.
|
Archived entries are omitted unless ``include_archived`` is ``True``.
|
||||||
"""
|
"""
|
||||||
@@ -1118,12 +1124,14 @@ class EntryManager:
|
|||||||
|
|
||||||
sorted_items = sorted(entries_data.items(), key=sort_key)
|
sorted_items = sorted(entries_data.items(), key=sort_key)
|
||||||
|
|
||||||
|
if filter_kinds is None:
|
||||||
|
filter_kinds = ALL_ENTRY_TYPES
|
||||||
|
|
||||||
filtered_items: List[Tuple[int, Dict[str, Any]]] = []
|
filtered_items: List[Tuple[int, Dict[str, Any]]] = []
|
||||||
for idx_str, entry in sorted_items:
|
for idx_str, entry in sorted_items:
|
||||||
if (
|
if (
|
||||||
filter_kind is not None
|
entry.get("type", entry.get("kind", EntryType.PASSWORD.value))
|
||||||
and entry.get("type", entry.get("kind", EntryType.PASSWORD.value))
|
not in filter_kinds
|
||||||
!= filter_kind
|
|
||||||
):
|
):
|
||||||
continue
|
continue
|
||||||
if not include_archived and entry.get(
|
if not include_archived and entry.get(
|
||||||
@@ -1371,7 +1379,7 @@ class EntryManager:
|
|||||||
def list_all_entries(
|
def list_all_entries(
|
||||||
self,
|
self,
|
||||||
sort_by: str = "index",
|
sort_by: str = "index",
|
||||||
filter_kind: str | None = None,
|
filter_kinds: list[str] | None = None,
|
||||||
*,
|
*,
|
||||||
include_archived: bool = False,
|
include_archived: bool = False,
|
||||||
) -> None:
|
) -> None:
|
||||||
@@ -1379,7 +1387,7 @@ class EntryManager:
|
|||||||
try:
|
try:
|
||||||
entries = self.list_entries(
|
entries = self.list_entries(
|
||||||
sort_by=sort_by,
|
sort_by=sort_by,
|
||||||
filter_kind=filter_kind,
|
filter_kinds=filter_kinds,
|
||||||
include_archived=include_archived,
|
include_archived=include_archived,
|
||||||
)
|
)
|
||||||
if not entries:
|
if not entries:
|
||||||
@@ -1403,7 +1411,7 @@ class EntryManager:
|
|||||||
|
|
||||||
def get_entry_summaries(
|
def get_entry_summaries(
|
||||||
self,
|
self,
|
||||||
filter_kind: str | None = None,
|
filter_kinds: list[str] | None = None,
|
||||||
*,
|
*,
|
||||||
include_archived: bool = False,
|
include_archived: bool = False,
|
||||||
) -> list[tuple[int, str, str]]:
|
) -> list[tuple[int, str, str]]:
|
||||||
@@ -1412,10 +1420,13 @@ class EntryManager:
|
|||||||
data = self._load_index()
|
data = self._load_index()
|
||||||
entries_data = data.get("entries", {})
|
entries_data = data.get("entries", {})
|
||||||
|
|
||||||
|
if filter_kinds is None:
|
||||||
|
filter_kinds = ALL_ENTRY_TYPES
|
||||||
|
|
||||||
summaries: list[tuple[int, str, str]] = []
|
summaries: list[tuple[int, str, str]] = []
|
||||||
for idx_str, entry in entries_data.items():
|
for idx_str, entry in entries_data.items():
|
||||||
etype = entry.get("type", entry.get("kind", EntryType.PASSWORD.value))
|
etype = entry.get("type", entry.get("kind", EntryType.PASSWORD.value))
|
||||||
if filter_kind and etype != filter_kind:
|
if etype not in filter_kinds:
|
||||||
continue
|
continue
|
||||||
if not include_archived and entry.get(
|
if not include_archived and entry.get(
|
||||||
"archived", entry.get("blacklisted", False)
|
"archived", entry.get("blacklisted", False)
|
||||||
|
233
src/seedpass/core/entry_service.py
Normal file
233
src/seedpass/core/entry_service.py
Normal file
@@ -0,0 +1,233 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from termcolor import colored
|
||||||
|
|
||||||
|
from constants import (
|
||||||
|
DEFAULT_PASSWORD_LENGTH,
|
||||||
|
MAX_PASSWORD_LENGTH,
|
||||||
|
MIN_PASSWORD_LENGTH,
|
||||||
|
)
|
||||||
|
import seedpass.core.manager as manager_module
|
||||||
|
from utils.terminal_utils import clear_header_with_notification, pause
|
||||||
|
|
||||||
|
if TYPE_CHECKING: # pragma: no cover - typing only
|
||||||
|
from .manager import PasswordManager
|
||||||
|
|
||||||
|
|
||||||
|
class EntryService:
|
||||||
|
"""Entry management operations for :class:`PasswordManager`."""
|
||||||
|
|
||||||
|
def __init__(self, manager: PasswordManager) -> None:
|
||||||
|
self.manager = manager
|
||||||
|
|
||||||
|
def handle_add_password(self) -> None:
|
||||||
|
pm = self.manager
|
||||||
|
try:
|
||||||
|
fp, parent_fp, child_fp = pm.header_fingerprint_args
|
||||||
|
clear_header_with_notification(
|
||||||
|
pm,
|
||||||
|
fp,
|
||||||
|
"Main Menu > Add Entry > Password",
|
||||||
|
parent_fingerprint=parent_fp,
|
||||||
|
child_fingerprint=child_fp,
|
||||||
|
)
|
||||||
|
|
||||||
|
def prompt_length() -> int | None:
|
||||||
|
length_input = input(
|
||||||
|
f"Enter desired password length (default {DEFAULT_PASSWORD_LENGTH}): "
|
||||||
|
).strip()
|
||||||
|
length = DEFAULT_PASSWORD_LENGTH
|
||||||
|
if length_input:
|
||||||
|
if not length_input.isdigit():
|
||||||
|
print(
|
||||||
|
colored("Error: Password length must be a number.", "red")
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
length = int(length_input)
|
||||||
|
if not (MIN_PASSWORD_LENGTH <= length <= MAX_PASSWORD_LENGTH):
|
||||||
|
print(
|
||||||
|
colored(
|
||||||
|
f"Error: Password length must be between {MIN_PASSWORD_LENGTH} and {MAX_PASSWORD_LENGTH}.",
|
||||||
|
"red",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
return length
|
||||||
|
|
||||||
|
def finalize_entry(index: int, label: str, length: int) -> None:
|
||||||
|
pm.is_dirty = True
|
||||||
|
pm.last_update = time.time()
|
||||||
|
|
||||||
|
entry = pm.entry_manager.retrieve_entry(index)
|
||||||
|
password = pm._generate_password_for_entry(entry, index, length)
|
||||||
|
|
||||||
|
print(
|
||||||
|
colored(
|
||||||
|
f"\n[+] Password generated and indexed with ID {index}.\n",
|
||||||
|
"green",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if pm.secret_mode_enabled:
|
||||||
|
if manager_module.copy_to_clipboard(
|
||||||
|
password, pm.clipboard_clear_delay
|
||||||
|
):
|
||||||
|
print(
|
||||||
|
colored(
|
||||||
|
f"[+] Password copied to clipboard. Will clear in {pm.clipboard_clear_delay} seconds.",
|
||||||
|
"green",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
print(colored(f"Password for {label}: {password}\n", "yellow"))
|
||||||
|
|
||||||
|
try:
|
||||||
|
pm.start_background_vault_sync()
|
||||||
|
logging.info(
|
||||||
|
"Encrypted index posted to Nostr after entry addition."
|
||||||
|
)
|
||||||
|
except Exception as nostr_error: # pragma: no cover - best effort
|
||||||
|
logging.error(
|
||||||
|
f"Failed to post updated index to Nostr: {nostr_error}",
|
||||||
|
exc_info=True,
|
||||||
|
)
|
||||||
|
pause()
|
||||||
|
|
||||||
|
mode = input("Choose mode: [Q]uick or [A]dvanced? ").strip().lower()
|
||||||
|
|
||||||
|
website_name = input("Enter the label or website name: ").strip()
|
||||||
|
if not website_name:
|
||||||
|
print(colored("Error: Label cannot be empty.", "red"))
|
||||||
|
return
|
||||||
|
|
||||||
|
username = input("Enter the username (optional): ").strip()
|
||||||
|
url = input("Enter the URL (optional): ").strip()
|
||||||
|
|
||||||
|
if mode.startswith("q"):
|
||||||
|
length = prompt_length()
|
||||||
|
if length is None:
|
||||||
|
return
|
||||||
|
include_special_input = (
|
||||||
|
input("Include special characters? (Y/n): ").strip().lower()
|
||||||
|
)
|
||||||
|
include_special_chars: bool | None = None
|
||||||
|
if include_special_input:
|
||||||
|
include_special_chars = include_special_input != "n"
|
||||||
|
|
||||||
|
index = pm.entry_manager.add_entry(
|
||||||
|
website_name,
|
||||||
|
length,
|
||||||
|
username,
|
||||||
|
url,
|
||||||
|
include_special_chars=include_special_chars,
|
||||||
|
)
|
||||||
|
|
||||||
|
finalize_entry(index, website_name, length)
|
||||||
|
return
|
||||||
|
|
||||||
|
notes = input("Enter notes (optional): ").strip()
|
||||||
|
tags_input = input("Enter tags (comma-separated, optional): ").strip()
|
||||||
|
tags = (
|
||||||
|
[t.strip() for t in tags_input.split(",") if t.strip()]
|
||||||
|
if tags_input
|
||||||
|
else []
|
||||||
|
)
|
||||||
|
|
||||||
|
custom_fields: list[dict[str, object]] = []
|
||||||
|
while True:
|
||||||
|
add_field = input("Add custom field? (y/N): ").strip().lower()
|
||||||
|
if add_field != "y":
|
||||||
|
break
|
||||||
|
label = input(" Field label: ").strip()
|
||||||
|
value = input(" Field value: ").strip()
|
||||||
|
hidden = input(" Hidden field? (y/N): ").strip().lower() == "y"
|
||||||
|
custom_fields.append(
|
||||||
|
{"label": label, "value": value, "is_hidden": hidden}
|
||||||
|
)
|
||||||
|
|
||||||
|
length = prompt_length()
|
||||||
|
if length is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
include_special_input = (
|
||||||
|
input("Include special characters? (Y/n): ").strip().lower()
|
||||||
|
)
|
||||||
|
include_special_chars: bool | None = None
|
||||||
|
if include_special_input:
|
||||||
|
include_special_chars = include_special_input != "n"
|
||||||
|
|
||||||
|
allowed_special_chars = input(
|
||||||
|
"Allowed special characters (leave blank for default): "
|
||||||
|
).strip()
|
||||||
|
if not allowed_special_chars:
|
||||||
|
allowed_special_chars = None
|
||||||
|
|
||||||
|
special_mode = input("Special character mode (safe/leave blank): ").strip()
|
||||||
|
if not special_mode:
|
||||||
|
special_mode = None
|
||||||
|
|
||||||
|
exclude_ambiguous_input = (
|
||||||
|
input("Exclude ambiguous characters? (y/N): ").strip().lower()
|
||||||
|
)
|
||||||
|
exclude_ambiguous: bool | None = None
|
||||||
|
if exclude_ambiguous_input:
|
||||||
|
exclude_ambiguous = exclude_ambiguous_input == "y"
|
||||||
|
|
||||||
|
min_uppercase_input = input(
|
||||||
|
"Minimum uppercase letters (blank for default): "
|
||||||
|
).strip()
|
||||||
|
if min_uppercase_input and not min_uppercase_input.isdigit():
|
||||||
|
print(colored("Error: Minimum uppercase must be a number.", "red"))
|
||||||
|
return
|
||||||
|
min_uppercase = int(min_uppercase_input) if min_uppercase_input else None
|
||||||
|
|
||||||
|
min_lowercase_input = input(
|
||||||
|
"Minimum lowercase letters (blank for default): "
|
||||||
|
).strip()
|
||||||
|
if min_lowercase_input and not min_lowercase_input.isdigit():
|
||||||
|
print(colored("Error: Minimum lowercase must be a number.", "red"))
|
||||||
|
return
|
||||||
|
min_lowercase = int(min_lowercase_input) if min_lowercase_input else None
|
||||||
|
|
||||||
|
min_digits_input = input("Minimum digits (blank for default): ").strip()
|
||||||
|
if min_digits_input and not min_digits_input.isdigit():
|
||||||
|
print(colored("Error: Minimum digits must be a number.", "red"))
|
||||||
|
return
|
||||||
|
min_digits = int(min_digits_input) if min_digits_input else None
|
||||||
|
|
||||||
|
min_special_input = input(
|
||||||
|
"Minimum special characters (blank for default): "
|
||||||
|
).strip()
|
||||||
|
if min_special_input and not min_special_input.isdigit():
|
||||||
|
print(colored("Error: Minimum special must be a number.", "red"))
|
||||||
|
return
|
||||||
|
min_special = int(min_special_input) if min_special_input else None
|
||||||
|
|
||||||
|
index = pm.entry_manager.add_entry(
|
||||||
|
website_name,
|
||||||
|
length,
|
||||||
|
username,
|
||||||
|
url,
|
||||||
|
archived=False,
|
||||||
|
notes=notes,
|
||||||
|
custom_fields=custom_fields,
|
||||||
|
tags=tags,
|
||||||
|
include_special_chars=include_special_chars,
|
||||||
|
allowed_special_chars=allowed_special_chars,
|
||||||
|
special_mode=special_mode,
|
||||||
|
exclude_ambiguous=exclude_ambiguous,
|
||||||
|
min_uppercase=min_uppercase,
|
||||||
|
min_lowercase=min_lowercase,
|
||||||
|
min_digits=min_digits,
|
||||||
|
min_special=min_special,
|
||||||
|
)
|
||||||
|
|
||||||
|
finalize_entry(index, website_name, length)
|
||||||
|
|
||||||
|
except Exception as e: # pragma: no cover - defensive
|
||||||
|
logging.error(f"Error during password generation: {e}", exc_info=True)
|
||||||
|
print(colored(f"Error: Failed to generate password: {e}", "red"))
|
||||||
|
pause()
|
@@ -15,3 +15,7 @@ class EntryType(str, Enum):
|
|||||||
NOSTR = "nostr"
|
NOSTR = "nostr"
|
||||||
KEY_VALUE = "key_value"
|
KEY_VALUE = "key_value"
|
||||||
MANAGED_ACCOUNT = "managed_account"
|
MANAGED_ACCOUNT = "managed_account"
|
||||||
|
|
||||||
|
|
||||||
|
# List of all entry type values for convenience
|
||||||
|
ALL_ENTRY_TYPES = [e.value for e in EntryType]
|
||||||
|
File diff suppressed because it is too large
Load Diff
185
src/seedpass/core/menu_handler.py
Normal file
185
src/seedpass/core/menu_handler.py
Normal file
@@ -0,0 +1,185 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from termcolor import colored
|
||||||
|
|
||||||
|
from .entry_types import EntryType, ALL_ENTRY_TYPES
|
||||||
|
import seedpass.core.manager as manager_module
|
||||||
|
from utils.color_scheme import color_text
|
||||||
|
from utils.terminal_utils import clear_header_with_notification
|
||||||
|
|
||||||
|
if TYPE_CHECKING: # pragma: no cover - typing only
|
||||||
|
from .manager import PasswordManager
|
||||||
|
|
||||||
|
|
||||||
|
class MenuHandler:
|
||||||
|
"""Handle interactive menu operations for :class:`PasswordManager`."""
|
||||||
|
|
||||||
|
def __init__(self, manager: PasswordManager) -> None:
|
||||||
|
self.manager = manager
|
||||||
|
|
||||||
|
def handle_list_entries(self) -> None:
|
||||||
|
"""List entries and optionally show details."""
|
||||||
|
pm = self.manager
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
fp, parent_fp, child_fp = pm.header_fingerprint_args
|
||||||
|
clear_header_with_notification(
|
||||||
|
pm,
|
||||||
|
fp,
|
||||||
|
"Main Menu > List Entries",
|
||||||
|
parent_fingerprint=parent_fp,
|
||||||
|
child_fingerprint=child_fp,
|
||||||
|
)
|
||||||
|
print(color_text("\nList Entries:", "menu"))
|
||||||
|
print(color_text("1. All", "menu"))
|
||||||
|
option_map: dict[str, str] = {}
|
||||||
|
for i, etype in enumerate(ALL_ENTRY_TYPES, start=2):
|
||||||
|
label = etype.replace("_", " ").title()
|
||||||
|
print(color_text(f"{i}. {label}", "menu"))
|
||||||
|
option_map[str(i)] = etype
|
||||||
|
choice = input("Select entry type or press Enter to go back: ").strip()
|
||||||
|
if choice == "1":
|
||||||
|
filter_kinds = None
|
||||||
|
elif choice in option_map:
|
||||||
|
filter_kinds = [option_map[choice]]
|
||||||
|
elif not choice:
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
print(colored("Invalid choice.", "red"))
|
||||||
|
continue
|
||||||
|
|
||||||
|
while True:
|
||||||
|
summaries = pm.entry_manager.get_entry_summaries(
|
||||||
|
filter_kinds, include_archived=False
|
||||||
|
)
|
||||||
|
if not summaries:
|
||||||
|
break
|
||||||
|
fp, parent_fp, child_fp = pm.header_fingerprint_args
|
||||||
|
clear_header_with_notification(
|
||||||
|
pm,
|
||||||
|
fp,
|
||||||
|
"Main Menu > List Entries",
|
||||||
|
parent_fingerprint=parent_fp,
|
||||||
|
child_fingerprint=child_fp,
|
||||||
|
)
|
||||||
|
print(colored("\n[+] Entries:\n", "green"))
|
||||||
|
for idx, etype, label in summaries:
|
||||||
|
if filter_kinds is None:
|
||||||
|
display_type = etype.capitalize()
|
||||||
|
print(colored(f"{idx}. {display_type} - {label}", "cyan"))
|
||||||
|
else:
|
||||||
|
print(colored(f"{idx}. {label}", "cyan"))
|
||||||
|
idx_input = input(
|
||||||
|
"Enter index to view details or press Enter to go back: "
|
||||||
|
).strip()
|
||||||
|
if not idx_input:
|
||||||
|
break
|
||||||
|
if not idx_input.isdigit():
|
||||||
|
print(colored("Invalid index.", "red"))
|
||||||
|
continue
|
||||||
|
pm.show_entry_details_by_index(int(idx_input))
|
||||||
|
except Exception as e: # pragma: no cover - defensive
|
||||||
|
logging.error(f"Failed to list entries: {e}", exc_info=True)
|
||||||
|
print(colored(f"Error: Failed to list entries: {e}", "red"))
|
||||||
|
|
||||||
|
def handle_display_totp_codes(self) -> None:
|
||||||
|
"""Display all stored TOTP codes with a countdown progress bar."""
|
||||||
|
pm = self.manager
|
||||||
|
try:
|
||||||
|
fp, parent_fp, child_fp = pm.header_fingerprint_args
|
||||||
|
clear_header_with_notification(
|
||||||
|
pm,
|
||||||
|
fp,
|
||||||
|
"Main Menu > 2FA Codes",
|
||||||
|
parent_fingerprint=parent_fp,
|
||||||
|
child_fingerprint=child_fp,
|
||||||
|
)
|
||||||
|
data = pm.entry_manager.vault.load_index()
|
||||||
|
entries = data.get("entries", {})
|
||||||
|
totp_list: list[tuple[str, int, int, bool]] = []
|
||||||
|
for idx_str, entry in entries.items():
|
||||||
|
if pm._entry_type_str(entry) == EntryType.TOTP.value and not entry.get(
|
||||||
|
"archived", entry.get("blacklisted", False)
|
||||||
|
):
|
||||||
|
label = entry.get("label", "")
|
||||||
|
period = int(entry.get("period", 30))
|
||||||
|
imported = "secret" in entry
|
||||||
|
totp_list.append((label, int(idx_str), period, imported))
|
||||||
|
|
||||||
|
if not totp_list:
|
||||||
|
pm.notify("No 2FA entries found.", level="WARNING")
|
||||||
|
return
|
||||||
|
|
||||||
|
totp_list.sort(key=lambda t: t[0].lower())
|
||||||
|
print(colored("Press Enter to return to the menu.", "cyan"))
|
||||||
|
while True:
|
||||||
|
fp, parent_fp, child_fp = pm.header_fingerprint_args
|
||||||
|
clear_header_with_notification(
|
||||||
|
pm,
|
||||||
|
fp,
|
||||||
|
"Main Menu > 2FA Codes",
|
||||||
|
parent_fingerprint=parent_fp,
|
||||||
|
child_fingerprint=child_fp,
|
||||||
|
)
|
||||||
|
print(colored("Press Enter to return to the menu.", "cyan"))
|
||||||
|
generated = [t for t in totp_list if not t[3]]
|
||||||
|
imported_list = [t for t in totp_list if t[3]]
|
||||||
|
if generated:
|
||||||
|
print(colored("\nGenerated 2FA Codes:", "green"))
|
||||||
|
for label, idx, period, _ in generated:
|
||||||
|
key = getattr(pm, "KEY_TOTP_DET", None) or getattr(
|
||||||
|
pm, "parent_seed", None
|
||||||
|
)
|
||||||
|
code = pm.entry_manager.get_totp_code(idx, key)
|
||||||
|
remaining = pm.entry_manager.get_totp_time_remaining(idx)
|
||||||
|
filled = int(20 * (period - remaining) / period)
|
||||||
|
bar = "[" + "#" * filled + "-" * (20 - filled) + "]"
|
||||||
|
if pm.secret_mode_enabled:
|
||||||
|
if manager_module.copy_to_clipboard(
|
||||||
|
code, pm.clipboard_clear_delay
|
||||||
|
):
|
||||||
|
print(
|
||||||
|
f"[{idx}] {label}: [HIDDEN] {bar} {remaining:2d}s - copied to clipboard"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
print(
|
||||||
|
f"[{idx}] {label}: {color_text(code, 'deterministic')} {bar} {remaining:2d}s"
|
||||||
|
)
|
||||||
|
if imported_list:
|
||||||
|
print(colored("\nImported 2FA Codes:", "green"))
|
||||||
|
for label, idx, period, _ in imported_list:
|
||||||
|
key = getattr(pm, "KEY_TOTP_DET", None) or getattr(
|
||||||
|
pm, "parent_seed", None
|
||||||
|
)
|
||||||
|
code = pm.entry_manager.get_totp_code(idx, key)
|
||||||
|
remaining = pm.entry_manager.get_totp_time_remaining(idx)
|
||||||
|
filled = int(20 * (period - remaining) / period)
|
||||||
|
bar = "[" + "#" * filled + "-" * (20 - filled) + "]"
|
||||||
|
if pm.secret_mode_enabled:
|
||||||
|
if manager_module.copy_to_clipboard(
|
||||||
|
code, pm.clipboard_clear_delay
|
||||||
|
):
|
||||||
|
print(
|
||||||
|
f"[{idx}] {label}: [HIDDEN] {bar} {remaining:2d}s - copied to clipboard"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
print(
|
||||||
|
f"[{idx}] {label}: {color_text(code, 'imported')} {bar} {remaining:2d}s"
|
||||||
|
)
|
||||||
|
sys.stdout.flush()
|
||||||
|
try:
|
||||||
|
user_input = manager_module.timed_input("", 1)
|
||||||
|
if user_input.strip() == "" or user_input.strip().lower() == "b":
|
||||||
|
break
|
||||||
|
except TimeoutError:
|
||||||
|
pass
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
print()
|
||||||
|
break
|
||||||
|
except Exception as e: # pragma: no cover - defensive
|
||||||
|
logging.error(f"Error displaying TOTP codes: {e}", exc_info=True)
|
||||||
|
print(colored(f"Error: Failed to display TOTP codes: {e}", "red"))
|
@@ -113,10 +113,12 @@ class PasswordGenerator:
|
|||||||
self.bip85 = bip85
|
self.bip85 = bip85
|
||||||
self.policy = policy or PasswordPolicy()
|
self.policy = policy or PasswordPolicy()
|
||||||
|
|
||||||
# Derive seed bytes from parent_seed using BIP39 (handled by EncryptionManager)
|
if isinstance(parent_seed, (bytes, bytearray)):
|
||||||
self.seed_bytes = self.encryption_manager.derive_seed_from_mnemonic(
|
self.seed_bytes = bytes(parent_seed)
|
||||||
self.parent_seed
|
else:
|
||||||
)
|
self.seed_bytes = self.encryption_manager.derive_seed_from_mnemonic(
|
||||||
|
self.parent_seed
|
||||||
|
)
|
||||||
|
|
||||||
logger.debug("PasswordGenerator initialized successfully.")
|
logger.debug("PasswordGenerator initialized successfully.")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -126,7 +128,7 @@ class PasswordGenerator:
|
|||||||
|
|
||||||
def _derive_password_entropy(self, index: int) -> bytes:
|
def _derive_password_entropy(self, index: int) -> bytes:
|
||||||
"""Derive deterministic entropy for password generation."""
|
"""Derive deterministic entropy for password generation."""
|
||||||
entropy = self.bip85.derive_entropy(index=index, bytes_len=64, app_no=32)
|
entropy = self.bip85.derive_entropy(index=index, entropy_bytes=64, app_no=32)
|
||||||
logger.debug("Entropy derived for password generation.")
|
logger.debug("Entropy derived for password generation.")
|
||||||
|
|
||||||
hkdf = HKDF(
|
hkdf = HKDF(
|
||||||
@@ -433,7 +435,7 @@ class PasswordGenerator:
|
|||||||
|
|
||||||
def derive_ssh_key(bip85: BIP85, idx: int) -> bytes:
|
def derive_ssh_key(bip85: BIP85, idx: int) -> bytes:
|
||||||
"""Derive 32 bytes of entropy suitable for an SSH key."""
|
"""Derive 32 bytes of entropy suitable for an SSH key."""
|
||||||
return bip85.derive_entropy(index=idx, bytes_len=32, app_no=32)
|
return bip85.derive_entropy(index=idx, entropy_bytes=32, app_no=32)
|
||||||
|
|
||||||
|
|
||||||
def derive_ssh_key_pair(parent_seed: str, index: int) -> tuple[str, str]:
|
def derive_ssh_key_pair(parent_seed: str, index: int) -> tuple[str, str]:
|
||||||
@@ -499,7 +501,7 @@ def derive_pgp_key(
|
|||||||
import hashlib
|
import hashlib
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
entropy = bip85.derive_entropy(index=idx, bytes_len=32, app_no=32)
|
entropy = bip85.derive_entropy(index=idx, entropy_bytes=32, app_no=32)
|
||||||
created = datetime.datetime(2000, 1, 1, tzinfo=datetime.timezone.utc)
|
created = datetime.datetime(2000, 1, 1, tzinfo=datetime.timezone.utc)
|
||||||
|
|
||||||
if key_type.lower() == "rsa":
|
if key_type.lower() == "rsa":
|
||||||
|
108
src/seedpass/core/profile_service.py
Normal file
108
src/seedpass/core/profile_service.py
Normal file
@@ -0,0 +1,108 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from typing import Optional, TYPE_CHECKING
|
||||||
|
|
||||||
|
from termcolor import colored
|
||||||
|
|
||||||
|
import seedpass.core.manager as manager_module
|
||||||
|
from nostr.snapshot import MANIFEST_ID_PREFIX
|
||||||
|
|
||||||
|
from utils.password_prompt import prompt_existing_password
|
||||||
|
|
||||||
|
if TYPE_CHECKING: # pragma: no cover - typing only
|
||||||
|
from .manager import PasswordManager
|
||||||
|
from nostr.client import NostrClient
|
||||||
|
|
||||||
|
|
||||||
|
class ProfileService:
|
||||||
|
"""Profile-related operations for :class:`PasswordManager`."""
|
||||||
|
|
||||||
|
def __init__(self, manager: PasswordManager) -> None:
|
||||||
|
self.manager = manager
|
||||||
|
|
||||||
|
def handle_switch_fingerprint(self, *, password: Optional[str] = None) -> bool:
|
||||||
|
"""Handle switching to a different seed profile."""
|
||||||
|
pm = self.manager
|
||||||
|
try:
|
||||||
|
print(colored("\nAvailable Seed Profiles:", "cyan"))
|
||||||
|
fingerprints = pm.fingerprint_manager.list_fingerprints()
|
||||||
|
for idx, fp in enumerate(fingerprints, start=1):
|
||||||
|
display = (
|
||||||
|
pm.fingerprint_manager.display_name(fp)
|
||||||
|
if hasattr(pm.fingerprint_manager, "display_name")
|
||||||
|
else fp
|
||||||
|
)
|
||||||
|
print(colored(f"{idx}. {display}", "cyan"))
|
||||||
|
|
||||||
|
choice = input("Select a seed profile by number to switch: ").strip()
|
||||||
|
if not choice.isdigit() or not (1 <= int(choice) <= len(fingerprints)):
|
||||||
|
print(colored("Invalid selection. Returning to main menu.", "red"))
|
||||||
|
return False
|
||||||
|
|
||||||
|
selected_fingerprint = fingerprints[int(choice) - 1]
|
||||||
|
pm.fingerprint_manager.current_fingerprint = selected_fingerprint
|
||||||
|
pm.current_fingerprint = selected_fingerprint
|
||||||
|
if not getattr(pm, "manifest_id", None):
|
||||||
|
pm.manifest_id = f"{MANIFEST_ID_PREFIX}{selected_fingerprint}"
|
||||||
|
|
||||||
|
pm.fingerprint_dir = pm.fingerprint_manager.get_current_fingerprint_dir()
|
||||||
|
if not pm.fingerprint_dir:
|
||||||
|
print(
|
||||||
|
colored(
|
||||||
|
f"Error: Seed profile directory for {selected_fingerprint} not found.",
|
||||||
|
"red",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
if password is None:
|
||||||
|
password = prompt_existing_password(
|
||||||
|
"Enter the master password for the selected seed profile: "
|
||||||
|
)
|
||||||
|
|
||||||
|
if not pm.setup_encryption_manager(
|
||||||
|
pm.fingerprint_dir, password, exit_on_fail=False
|
||||||
|
):
|
||||||
|
return False
|
||||||
|
|
||||||
|
pm.initialize_bip85()
|
||||||
|
pm.initialize_managers()
|
||||||
|
pm.start_background_sync()
|
||||||
|
print(colored(f"Switched to seed profile {selected_fingerprint}.", "green"))
|
||||||
|
|
||||||
|
try:
|
||||||
|
pm.nostr_client = manager_module.NostrClient(
|
||||||
|
encryption_manager=pm.encryption_manager,
|
||||||
|
fingerprint=pm.current_fingerprint,
|
||||||
|
config_manager=getattr(pm, "config_manager", None),
|
||||||
|
parent_seed=getattr(pm, "parent_seed", None),
|
||||||
|
)
|
||||||
|
if getattr(pm, "manifest_id", None) and hasattr(
|
||||||
|
pm.nostr_client, "_state_lock"
|
||||||
|
):
|
||||||
|
from nostr.backup_models import Manifest
|
||||||
|
|
||||||
|
with pm.nostr_client._state_lock:
|
||||||
|
pm.nostr_client.current_manifest_id = pm.manifest_id
|
||||||
|
pm.nostr_client.current_manifest = Manifest(
|
||||||
|
ver=1,
|
||||||
|
algo="gzip",
|
||||||
|
chunks=[],
|
||||||
|
delta_since=pm.delta_since or None,
|
||||||
|
)
|
||||||
|
logging.info(
|
||||||
|
f"NostrClient re-initialized with seed profile {pm.current_fingerprint}."
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"Failed to re-initialize NostrClient: {e}")
|
||||||
|
print(
|
||||||
|
colored(f"Error: Failed to re-initialize NostrClient: {e}", "red")
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
except Exception as e: # pragma: no cover - defensive
|
||||||
|
logging.error(f"Error during seed profile switching: {e}", exc_info=True)
|
||||||
|
print(colored(f"Error: Failed to switch seed profiles: {e}", "red"))
|
||||||
|
return False
|
@@ -4,6 +4,7 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
from typing import Union
|
||||||
from urllib.parse import quote
|
from urllib.parse import quote
|
||||||
from urllib.parse import urlparse, parse_qs, unquote
|
from urllib.parse import urlparse, parse_qs, unquote
|
||||||
|
|
||||||
@@ -18,13 +19,15 @@ class TotpManager:
|
|||||||
"""Helper methods for TOTP secrets and codes."""
|
"""Helper methods for TOTP secrets and codes."""
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def derive_secret(seed: str, index: int) -> str:
|
def derive_secret(seed: Union[str, bytes], index: int) -> str:
|
||||||
"""Derive a TOTP secret from a BIP39 seed and index."""
|
"""Derive a TOTP secret from a seed or raw key and index."""
|
||||||
return key_derivation.derive_totp_secret(seed, index)
|
return key_derivation.derive_totp_secret(seed, index)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def current_code(cls, seed: str, index: int, timestamp: int | None = None) -> str:
|
def current_code(
|
||||||
"""Return the TOTP code for the given seed and index."""
|
cls, seed: Union[str, bytes], index: int, timestamp: int | None = None
|
||||||
|
) -> str:
|
||||||
|
"""Return the TOTP code for the given seed/key and index."""
|
||||||
secret = cls.derive_secret(seed, index)
|
secret = cls.derive_secret(seed, index)
|
||||||
totp = pyotp.TOTP(secret)
|
totp = pyotp.TOTP(secret)
|
||||||
if timestamp is None:
|
if timestamp is None:
|
||||||
|
@@ -14,6 +14,7 @@ from .encryption import (
|
|||||||
USE_ORJSON,
|
USE_ORJSON,
|
||||||
json_lib,
|
json_lib,
|
||||||
)
|
)
|
||||||
|
from utils.key_derivation import KdfConfig, CURRENT_KDF_VERSION
|
||||||
from utils.password_prompt import prompt_existing_password
|
from utils.password_prompt import prompt_existing_password
|
||||||
|
|
||||||
|
|
||||||
@@ -38,6 +39,11 @@ class Vault:
|
|||||||
"""Replace the internal encryption manager."""
|
"""Replace the internal encryption manager."""
|
||||||
self.encryption_manager = manager
|
self.encryption_manager = manager
|
||||||
|
|
||||||
|
def _hkdf_kdf(self) -> KdfConfig:
|
||||||
|
return KdfConfig(
|
||||||
|
name="hkdf", version=CURRENT_KDF_VERSION, params={}, salt_b64=""
|
||||||
|
)
|
||||||
|
|
||||||
# ----- Password index helpers -----
|
# ----- Password index helpers -----
|
||||||
def load_index(self, *, return_migration_flags: bool = False):
|
def load_index(self, *, return_migration_flags: bool = False):
|
||||||
"""Return decrypted password index data, applying migrations.
|
"""Return decrypted password index data, applying migrations.
|
||||||
@@ -102,10 +108,24 @@ class Vault:
|
|||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
data = self.encryption_manager.load_json_data(self.index_file)
|
data, kdf = self.encryption_manager.load_json_data(
|
||||||
|
self.index_file, return_kdf=True
|
||||||
|
)
|
||||||
migration_performed = getattr(
|
migration_performed = getattr(
|
||||||
self.encryption_manager, "last_migration_performed", False
|
self.encryption_manager, "last_migration_performed", False
|
||||||
)
|
)
|
||||||
|
if kdf.version < CURRENT_KDF_VERSION:
|
||||||
|
new_kdf = KdfConfig(
|
||||||
|
name=kdf.name,
|
||||||
|
version=CURRENT_KDF_VERSION,
|
||||||
|
params=kdf.params,
|
||||||
|
salt_b64=kdf.salt_b64,
|
||||||
|
)
|
||||||
|
self.encryption_manager.save_json_data(
|
||||||
|
data, self.index_file, kdf=new_kdf
|
||||||
|
)
|
||||||
|
self.encryption_manager.update_checksum(self.index_file)
|
||||||
|
migration_performed = True
|
||||||
except LegacyFormatRequiresMigrationError:
|
except LegacyFormatRequiresMigrationError:
|
||||||
print(
|
print(
|
||||||
colored(
|
colored(
|
||||||
@@ -142,7 +162,9 @@ class Vault:
|
|||||||
else:
|
else:
|
||||||
data = json_lib.loads(decrypted.decode("utf-8"))
|
data = json_lib.loads(decrypted.decode("utf-8"))
|
||||||
if self.encryption_manager._legacy_migrate_flag:
|
if self.encryption_manager._legacy_migrate_flag:
|
||||||
self.encryption_manager.save_json_data(data, self.index_file)
|
self.encryption_manager.save_json_data(
|
||||||
|
data, self.index_file, kdf=self._hkdf_kdf()
|
||||||
|
)
|
||||||
self.encryption_manager.update_checksum(self.index_file)
|
self.encryption_manager.update_checksum(self.index_file)
|
||||||
migration_performed = getattr(
|
migration_performed = getattr(
|
||||||
self.encryption_manager, "last_migration_performed", False
|
self.encryption_manager, "last_migration_performed", False
|
||||||
@@ -181,7 +203,9 @@ class Vault:
|
|||||||
try:
|
try:
|
||||||
data = apply_migrations(data)
|
data = apply_migrations(data)
|
||||||
if schema_migrated:
|
if schema_migrated:
|
||||||
self.encryption_manager.save_json_data(data, self.index_file)
|
self.encryption_manager.save_json_data(
|
||||||
|
data, self.index_file, kdf=self._hkdf_kdf()
|
||||||
|
)
|
||||||
self.encryption_manager.update_checksum(self.index_file)
|
self.encryption_manager.update_checksum(self.index_file)
|
||||||
except Exception as exc: # noqa: BLE001 - surface clear error and restore
|
except Exception as exc: # noqa: BLE001 - surface clear error and restore
|
||||||
if legacy_detected and backup_dir is not None:
|
if legacy_detected and backup_dir is not None:
|
||||||
@@ -214,7 +238,9 @@ class Vault:
|
|||||||
|
|
||||||
def save_index(self, data: dict) -> None:
|
def save_index(self, data: dict) -> None:
|
||||||
"""Encrypt and write password index."""
|
"""Encrypt and write password index."""
|
||||||
self.encryption_manager.save_json_data(data, self.index_file)
|
self.encryption_manager.save_json_data(
|
||||||
|
data, self.index_file, kdf=self._hkdf_kdf()
|
||||||
|
)
|
||||||
|
|
||||||
def get_encrypted_index(self) -> Optional[bytes]:
|
def get_encrypted_index(self) -> Optional[bytes]:
|
||||||
"""Return the encrypted index bytes if present."""
|
"""Return the encrypted index bytes if present."""
|
||||||
@@ -252,4 +278,6 @@ class Vault:
|
|||||||
|
|
||||||
def save_config(self, config: dict) -> None:
|
def save_config(self, config: dict) -> None:
|
||||||
"""Encrypt and persist configuration."""
|
"""Encrypt and persist configuration."""
|
||||||
self.encryption_manager.save_json_data(config, self.config_file)
|
self.encryption_manager.save_json_data(
|
||||||
|
config, self.config_file, kdf=self._hkdf_kdf()
|
||||||
|
)
|
||||||
|
4
src/seedpass/errors.py
Normal file
4
src/seedpass/errors.py
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
class VaultLockedError(Exception):
|
||||||
|
"""Raised when an operation requires an unlocked vault."""
|
||||||
|
|
||||||
|
pass
|
@@ -393,7 +393,7 @@ class TotpViewerWindow(toga.Window):
|
|||||||
def refresh_codes(self) -> None:
|
def refresh_codes(self) -> None:
|
||||||
self.table.data = []
|
self.table.data = []
|
||||||
for idx, label, *_rest in self.entries.list_entries(
|
for idx, label, *_rest in self.entries.list_entries(
|
||||||
filter_kind=EntryType.TOTP.value
|
filter_kinds=[EntryType.TOTP.value]
|
||||||
):
|
):
|
||||||
entry = self.entries.retrieve_entry(idx)
|
entry = self.entries.retrieve_entry(idx)
|
||||||
code = self.entries.get_totp_code(idx)
|
code = self.entries.get_totp_code(idx)
|
||||||
|
@@ -501,8 +501,10 @@ async def test_generate_password_no_special_chars(client):
|
|||||||
return b"\x00" * 32
|
return b"\x00" * 32
|
||||||
|
|
||||||
class DummyBIP85:
|
class DummyBIP85:
|
||||||
def derive_entropy(self, index: int, bytes_len: int, app_no: int = 32) -> bytes:
|
def derive_entropy(
|
||||||
return bytes(range(bytes_len))
|
self, index: int, entropy_bytes: int, app_no: int = 32
|
||||||
|
) -> bytes:
|
||||||
|
return bytes(range(entropy_bytes))
|
||||||
|
|
||||||
api.app.state.pm.password_generator = PasswordGenerator(
|
api.app.state.pm.password_generator = PasswordGenerator(
|
||||||
DummyEnc(), "seed", DummyBIP85()
|
DummyEnc(), "seed", DummyBIP85()
|
||||||
@@ -529,8 +531,10 @@ async def test_generate_password_allowed_chars(client):
|
|||||||
return b"\x00" * 32
|
return b"\x00" * 32
|
||||||
|
|
||||||
class DummyBIP85:
|
class DummyBIP85:
|
||||||
def derive_entropy(self, index: int, bytes_len: int, app_no: int = 32) -> bytes:
|
def derive_entropy(
|
||||||
return bytes((index + i) % 256 for i in range(bytes_len))
|
self, index: int, entropy_bytes: int, app_no: int = 32
|
||||||
|
) -> bytes:
|
||||||
|
return bytes((index + i) % 256 for i in range(entropy_bytes))
|
||||||
|
|
||||||
api.app.state.pm.password_generator = PasswordGenerator(
|
api.app.state.pm.password_generator = PasswordGenerator(
|
||||||
DummyEnc(), "seed", DummyBIP85()
|
DummyEnc(), "seed", DummyBIP85()
|
||||||
|
56
src/tests/test_backup_restore_startup.py
Normal file
56
src/tests/test_backup_restore_startup.py
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
import main
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
def test_cli_flag_restores_before_init(monkeypatch, tmp_path):
|
||||||
|
calls = []
|
||||||
|
backup = tmp_path / "bak.json"
|
||||||
|
backup.write_text("{}")
|
||||||
|
|
||||||
|
def fake_restore(path, fingerprint):
|
||||||
|
calls.append(("restore", Path(path), fingerprint))
|
||||||
|
|
||||||
|
class DummyPM:
|
||||||
|
def __init__(self, fingerprint=None):
|
||||||
|
calls.append(("init", fingerprint))
|
||||||
|
self.secret_mode_enabled = True
|
||||||
|
self.inactivity_timeout = 0
|
||||||
|
|
||||||
|
monkeypatch.setattr(main, "restore_backup_index", fake_restore)
|
||||||
|
monkeypatch.setattr(main, "PasswordManager", DummyPM)
|
||||||
|
monkeypatch.setattr(main, "display_menu", lambda pm, **k: None)
|
||||||
|
|
||||||
|
rc = main.main(["--fingerprint", "fp", "--restore-backup", str(backup)])
|
||||||
|
assert rc == 0
|
||||||
|
assert calls[0][0] == "restore"
|
||||||
|
assert calls[1][0] == "init"
|
||||||
|
assert calls[0][1] == backup
|
||||||
|
assert calls[0][2] == "fp"
|
||||||
|
|
||||||
|
|
||||||
|
def test_menu_option_restores_before_init(monkeypatch, tmp_path):
|
||||||
|
calls = []
|
||||||
|
backup = tmp_path / "bak.json"
|
||||||
|
backup.write_text("{}")
|
||||||
|
|
||||||
|
def fake_restore(path, fingerprint):
|
||||||
|
calls.append(("restore", Path(path), fingerprint))
|
||||||
|
|
||||||
|
class DummyPM:
|
||||||
|
def __init__(self, fingerprint=None):
|
||||||
|
calls.append(("init", fingerprint))
|
||||||
|
self.secret_mode_enabled = True
|
||||||
|
self.inactivity_timeout = 0
|
||||||
|
|
||||||
|
monkeypatch.setattr(main, "restore_backup_index", fake_restore)
|
||||||
|
monkeypatch.setattr(main, "PasswordManager", DummyPM)
|
||||||
|
monkeypatch.setattr(main, "display_menu", lambda pm, **k: None)
|
||||||
|
inputs = iter(["2", str(backup)])
|
||||||
|
monkeypatch.setattr("builtins.input", lambda _prompt="": next(inputs))
|
||||||
|
|
||||||
|
rc = main.main(["--fingerprint", "fp"])
|
||||||
|
assert rc == 0
|
||||||
|
assert calls[0][0] == "restore"
|
||||||
|
assert calls[1][0] == "init"
|
||||||
|
assert calls[0][1] == backup
|
||||||
|
assert calls[0][2] == "fp"
|
52
src/tests/test_bip85_derivation_path.py
Normal file
52
src/tests/test_bip85_derivation_path.py
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
from local_bip85.bip85 import BIP85
|
||||||
|
|
||||||
|
|
||||||
|
class DummyChild:
|
||||||
|
def PrivateKey(self):
|
||||||
|
return self
|
||||||
|
|
||||||
|
def Raw(self):
|
||||||
|
return self
|
||||||
|
|
||||||
|
def ToBytes(self):
|
||||||
|
return b"\x00" * 32
|
||||||
|
|
||||||
|
|
||||||
|
class DummyCtx:
|
||||||
|
def __init__(self):
|
||||||
|
self.last_path = None
|
||||||
|
|
||||||
|
def DerivePath(self, path: str):
|
||||||
|
self.last_path = path
|
||||||
|
return DummyChild()
|
||||||
|
|
||||||
|
|
||||||
|
def test_derivation_paths_for_entropy_lengths():
|
||||||
|
bip85 = BIP85(b"\x00" * 64)
|
||||||
|
ctx = DummyCtx()
|
||||||
|
bip85.bip32_ctx = ctx
|
||||||
|
|
||||||
|
vectors = [
|
||||||
|
(16, 12),
|
||||||
|
(24, 18),
|
||||||
|
(32, 24),
|
||||||
|
]
|
||||||
|
|
||||||
|
for entropy_bytes, word_count in vectors:
|
||||||
|
bip85.derive_entropy(
|
||||||
|
index=0,
|
||||||
|
entropy_bytes=entropy_bytes,
|
||||||
|
app_no=39,
|
||||||
|
word_count=word_count,
|
||||||
|
)
|
||||||
|
assert ctx.last_path == f"m/83696968'/39'/0'/{word_count}'/0'"
|
||||||
|
|
||||||
|
|
||||||
|
def test_default_word_count_from_entropy_bytes():
|
||||||
|
bip85 = BIP85(b"\x00" * 64)
|
||||||
|
ctx = DummyCtx()
|
||||||
|
bip85.bip32_ctx = ctx
|
||||||
|
|
||||||
|
bip85.derive_entropy(index=5, entropy_bytes=20, app_no=39)
|
||||||
|
|
||||||
|
assert ctx.last_path == "m/83696968'/39'/0'/20'/5'"
|
21
src/tests/test_bip85_init.py
Normal file
21
src/tests/test_bip85_init.py
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
sys.path.append(str(Path(__file__).resolve().parents[1]))
|
||||||
|
|
||||||
|
from bip_utils import Bip39SeedGenerator
|
||||||
|
from local_bip85.bip85 import BIP85
|
||||||
|
from helpers import TEST_SEED
|
||||||
|
|
||||||
|
MASTER_XPRV = "xprv9s21ZrQH143K2LBWUUQRFXhucrQqBpKdRRxNVq2zBqsx8HVqFk2uYo8kmbaLLHRdqtQpUm98uKfu3vca1LqdGhUtyoFnCNkfmXRyPXLjbKb"
|
||||||
|
|
||||||
|
|
||||||
|
def test_init_with_seed_bytes():
|
||||||
|
seed_bytes = Bip39SeedGenerator(TEST_SEED).Generate()
|
||||||
|
bip85 = BIP85(seed_bytes)
|
||||||
|
assert isinstance(bip85, BIP85)
|
||||||
|
|
||||||
|
|
||||||
|
def test_init_with_xprv():
|
||||||
|
bip85 = BIP85(MASTER_XPRV)
|
||||||
|
assert isinstance(bip85, BIP85)
|
@@ -16,7 +16,7 @@ from seedpass.core.entry_types import EntryType
|
|||||||
class DummyPM:
|
class DummyPM:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.entry_manager = SimpleNamespace(
|
self.entry_manager = SimpleNamespace(
|
||||||
list_entries=lambda sort_by="index", filter_kind=None, include_archived=False: [
|
list_entries=lambda sort_by="index", filter_kinds=None, include_archived=False: [
|
||||||
(1, "Label", "user", "url", False)
|
(1, "Label", "user", "url", False)
|
||||||
],
|
],
|
||||||
search_entries=lambda q, kinds=None: [
|
search_entries=lambda q, kinds=None: [
|
||||||
|
@@ -21,8 +21,8 @@ class DummyEnc:
|
|||||||
|
|
||||||
|
|
||||||
class DummyBIP85:
|
class DummyBIP85:
|
||||||
def derive_entropy(self, index: int, bytes_len: int, app_no: int = 32) -> bytes:
|
def derive_entropy(self, index: int, entropy_bytes: int, app_no: int = 32) -> bytes:
|
||||||
return bytes((index + i) % 256 for i in range(bytes_len))
|
return bytes((index + i) % 256 for i in range(entropy_bytes))
|
||||||
|
|
||||||
|
|
||||||
def make_manager(tmp_path: Path) -> PasswordManager:
|
def make_manager(tmp_path: Path) -> PasswordManager:
|
||||||
|
@@ -3,11 +3,15 @@ from pathlib import Path
|
|||||||
|
|
||||||
from hypothesis import given, strategies as st, settings, HealthCheck
|
from hypothesis import given, strategies as st, settings, HealthCheck
|
||||||
from mnemonic import Mnemonic
|
from mnemonic import Mnemonic
|
||||||
|
import hashlib
|
||||||
|
import base64
|
||||||
|
import os
|
||||||
|
|
||||||
from utils.key_derivation import (
|
from utils.key_derivation import (
|
||||||
derive_key_from_password,
|
derive_key_from_password,
|
||||||
derive_key_from_password_argon2,
|
derive_key_from_password_argon2,
|
||||||
derive_index_key,
|
derive_index_key,
|
||||||
|
KdfConfig,
|
||||||
)
|
)
|
||||||
from utils.fingerprint import generate_fingerprint
|
from utils.fingerprint import generate_fingerprint
|
||||||
from seedpass.core.encryption import EncryptionManager
|
from seedpass.core.encryption import EncryptionManager
|
||||||
@@ -36,16 +40,27 @@ def test_fuzz_key_round_trip(password, seed_bytes, config, mode, tmp_path: Path)
|
|||||||
seed_phrase = Mnemonic("english").to_mnemonic(seed_bytes)
|
seed_phrase = Mnemonic("english").to_mnemonic(seed_bytes)
|
||||||
fp = generate_fingerprint(seed_phrase)
|
fp = generate_fingerprint(seed_phrase)
|
||||||
if mode == "argon2":
|
if mode == "argon2":
|
||||||
key = derive_key_from_password_argon2(
|
cfg = KdfConfig(
|
||||||
password, fp, time_cost=1, memory_cost=8, parallelism=1
|
params={"time_cost": 1, "memory_cost": 8, "parallelism": 1},
|
||||||
|
salt_b64=base64.b64encode(
|
||||||
|
hashlib.sha256(fp.encode()).digest()[:16]
|
||||||
|
).decode(),
|
||||||
)
|
)
|
||||||
|
key = derive_key_from_password_argon2(password, cfg)
|
||||||
else:
|
else:
|
||||||
key = derive_key_from_password(password, fp, iterations=1)
|
key = derive_key_from_password(password, fp, iterations=1)
|
||||||
|
cfg = KdfConfig(
|
||||||
|
name="pbkdf2",
|
||||||
|
params={"iterations": 1},
|
||||||
|
salt_b64=base64.b64encode(
|
||||||
|
hashlib.sha256(fp.encode()).digest()[:16]
|
||||||
|
).decode(),
|
||||||
|
)
|
||||||
|
|
||||||
enc_mgr = EncryptionManager(key, tmp_path)
|
enc_mgr = EncryptionManager(key, tmp_path)
|
||||||
|
|
||||||
# Parent seed round trip
|
# Parent seed round trip
|
||||||
enc_mgr.encrypt_parent_seed(seed_phrase)
|
enc_mgr.encrypt_parent_seed(seed_phrase, kdf=cfg)
|
||||||
assert enc_mgr.decrypt_parent_seed() == seed_phrase
|
assert enc_mgr.decrypt_parent_seed() == seed_phrase
|
||||||
|
|
||||||
# JSON data round trip
|
# JSON data round trip
|
||||||
|
@@ -30,8 +30,8 @@ class DummyEntries:
|
|||||||
self.data = [(1, "Example", None, None, False)]
|
self.data = [(1, "Example", None, None, False)]
|
||||||
self.code = "111111"
|
self.code = "111111"
|
||||||
|
|
||||||
def list_entries(self, sort_by="index", filter_kind=None, include_archived=False):
|
def list_entries(self, sort_by="index", filter_kinds=None, include_archived=False):
|
||||||
if filter_kind:
|
if filter_kinds:
|
||||||
return [(idx, label, None, None, False) for idx, label, *_ in self.data]
|
return [(idx, label, None, None, False) for idx, label, *_ in self.data]
|
||||||
return self.data
|
return self.data
|
||||||
|
|
||||||
|
@@ -9,7 +9,7 @@ from seedpass_gui.app import MainWindow
|
|||||||
|
|
||||||
|
|
||||||
class DummyEntries:
|
class DummyEntries:
|
||||||
def list_entries(self, sort_by="index", filter_kind=None, include_archived=False):
|
def list_entries(self, sort_by="index", filter_kinds=None, include_archived=False):
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def search_entries(self, q):
|
def search_entries(self, q):
|
||||||
|
@@ -1,4 +1,6 @@
|
|||||||
import bcrypt
|
import bcrypt
|
||||||
|
import hashlib
|
||||||
|
import base64
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from tempfile import TemporaryDirectory
|
from tempfile import TemporaryDirectory
|
||||||
from types import SimpleNamespace
|
from types import SimpleNamespace
|
||||||
@@ -7,6 +9,7 @@ from utils.key_derivation import (
|
|||||||
derive_key_from_password,
|
derive_key_from_password,
|
||||||
derive_key_from_password_argon2,
|
derive_key_from_password_argon2,
|
||||||
derive_index_key,
|
derive_index_key,
|
||||||
|
KdfConfig,
|
||||||
)
|
)
|
||||||
from seedpass.core.encryption import EncryptionManager
|
from seedpass.core.encryption import EncryptionManager
|
||||||
from seedpass.core.vault import Vault
|
from seedpass.core.vault import Vault
|
||||||
@@ -21,10 +24,24 @@ def _setup_profile(tmp: Path, mode: str):
|
|||||||
argon_kwargs = dict(time_cost=1, memory_cost=8, parallelism=1)
|
argon_kwargs = dict(time_cost=1, memory_cost=8, parallelism=1)
|
||||||
fp = tmp.name
|
fp = tmp.name
|
||||||
if mode == "argon2":
|
if mode == "argon2":
|
||||||
seed_key = derive_key_from_password_argon2(TEST_PASSWORD, fp, **argon_kwargs)
|
cfg = KdfConfig(
|
||||||
|
params=argon_kwargs,
|
||||||
|
salt_b64=base64.b64encode(
|
||||||
|
hashlib.sha256(fp.encode()).digest()[:16]
|
||||||
|
).decode(),
|
||||||
|
)
|
||||||
|
seed_key = derive_key_from_password_argon2(TEST_PASSWORD, cfg)
|
||||||
|
EncryptionManager(seed_key, tmp).encrypt_parent_seed(TEST_SEED, kdf=cfg)
|
||||||
else:
|
else:
|
||||||
seed_key = derive_key_from_password(TEST_PASSWORD, fp, iterations=1)
|
seed_key = derive_key_from_password(TEST_PASSWORD, fp, iterations=1)
|
||||||
EncryptionManager(seed_key, tmp).encrypt_parent_seed(TEST_SEED)
|
cfg = KdfConfig(
|
||||||
|
name="pbkdf2",
|
||||||
|
params={"iterations": 1},
|
||||||
|
salt_b64=base64.b64encode(
|
||||||
|
hashlib.sha256(fp.encode()).digest()[:16]
|
||||||
|
).decode(),
|
||||||
|
)
|
||||||
|
EncryptionManager(seed_key, tmp).encrypt_parent_seed(TEST_SEED, kdf=cfg)
|
||||||
|
|
||||||
index_key = derive_index_key(TEST_SEED)
|
index_key = derive_index_key(TEST_SEED)
|
||||||
enc_mgr = EncryptionManager(index_key, tmp)
|
enc_mgr = EncryptionManager(index_key, tmp)
|
||||||
@@ -65,9 +82,9 @@ def test_setup_encryption_manager_kdf_modes(monkeypatch):
|
|||||||
)
|
)
|
||||||
if mode == "argon2":
|
if mode == "argon2":
|
||||||
monkeypatch.setattr(
|
monkeypatch.setattr(
|
||||||
"seedpass.core.manager.derive_key_from_password_argon2",
|
"seedpass.core.manager.KdfConfig",
|
||||||
lambda pw, fp: derive_key_from_password_argon2(
|
lambda salt_b64, **_: KdfConfig(
|
||||||
pw, fp, **argon_kwargs
|
params=argon_kwargs, salt_b64=salt_b64
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
monkeypatch.setattr(PasswordManager, "initialize_bip85", lambda self: None)
|
monkeypatch.setattr(PasswordManager, "initialize_bip85", lambda self: None)
|
||||||
@@ -76,3 +93,26 @@ def test_setup_encryption_manager_kdf_modes(monkeypatch):
|
|||||||
)
|
)
|
||||||
assert pm.setup_encryption_manager(path, exit_on_fail=False)
|
assert pm.setup_encryption_manager(path, exit_on_fail=False)
|
||||||
assert pm.parent_seed == TEST_SEED
|
assert pm.parent_seed == TEST_SEED
|
||||||
|
|
||||||
|
|
||||||
|
def test_kdf_param_round_trip(tmp_path):
|
||||||
|
cfg = KdfConfig(
|
||||||
|
params={"time_cost": 3, "memory_cost": 32, "parallelism": 1},
|
||||||
|
salt_b64=base64.b64encode(b"static-salt-1234").decode(),
|
||||||
|
)
|
||||||
|
key = derive_key_from_password_argon2(TEST_PASSWORD, cfg)
|
||||||
|
mgr = EncryptionManager(key, tmp_path)
|
||||||
|
mgr.encrypt_parent_seed(TEST_SEED, kdf=cfg)
|
||||||
|
stored = mgr.get_file_kdf(Path("parent_seed.enc"))
|
||||||
|
assert stored.params == cfg.params
|
||||||
|
|
||||||
|
|
||||||
|
def test_vault_kdf_migration(tmp_path):
|
||||||
|
index_key = derive_index_key(TEST_SEED)
|
||||||
|
mgr = EncryptionManager(index_key, tmp_path)
|
||||||
|
vault = Vault(mgr, tmp_path)
|
||||||
|
old_kdf = KdfConfig(name="hkdf", version=0, params={}, salt_b64="")
|
||||||
|
mgr.save_json_data({"entries": {}}, vault.index_file, kdf=old_kdf)
|
||||||
|
vault.load_index()
|
||||||
|
new_kdf = mgr.get_file_kdf(vault.index_file)
|
||||||
|
assert new_kdf.version == KdfConfig().version
|
||||||
|
@@ -1,11 +1,15 @@
|
|||||||
import logging
|
import logging
|
||||||
import pytest
|
import pytest
|
||||||
|
import logging
|
||||||
|
import hashlib
|
||||||
|
import base64
|
||||||
from utils.fingerprint import generate_fingerprint
|
from utils.fingerprint import generate_fingerprint
|
||||||
from utils.key_derivation import (
|
from utils.key_derivation import (
|
||||||
derive_key_from_password,
|
derive_key_from_password,
|
||||||
derive_key_from_password_argon2,
|
derive_key_from_password_argon2,
|
||||||
derive_index_key_seed_only,
|
derive_index_key_seed_only,
|
||||||
derive_index_key,
|
derive_index_key,
|
||||||
|
KdfConfig,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -48,15 +52,17 @@ def test_argon2_fingerprint_affects_key():
|
|||||||
fp1 = generate_fingerprint("seed one")
|
fp1 = generate_fingerprint("seed one")
|
||||||
fp2 = generate_fingerprint("seed two")
|
fp2 = generate_fingerprint("seed two")
|
||||||
|
|
||||||
k1 = derive_key_from_password_argon2(
|
cfg1 = KdfConfig(
|
||||||
password, fp1, time_cost=1, memory_cost=8, parallelism=1
|
params={"time_cost": 1, "memory_cost": 8, "parallelism": 1},
|
||||||
|
salt_b64=base64.b64encode(hashlib.sha256(fp1.encode()).digest()[:16]).decode(),
|
||||||
)
|
)
|
||||||
k2 = derive_key_from_password_argon2(
|
cfg2 = KdfConfig(
|
||||||
password, fp1, time_cost=1, memory_cost=8, parallelism=1
|
params={"time_cost": 1, "memory_cost": 8, "parallelism": 1},
|
||||||
)
|
salt_b64=base64.b64encode(hashlib.sha256(fp2.encode()).digest()[:16]).decode(),
|
||||||
k3 = derive_key_from_password_argon2(
|
|
||||||
password, fp2, time_cost=1, memory_cost=8, parallelism=1
|
|
||||||
)
|
)
|
||||||
|
k1 = derive_key_from_password_argon2(password, cfg1)
|
||||||
|
k2 = derive_key_from_password_argon2(password, cfg1)
|
||||||
|
k3 = derive_key_from_password_argon2(password, cfg2)
|
||||||
|
|
||||||
assert k1 == k2
|
assert k1 == k2
|
||||||
assert k1 != k3
|
assert k1 != k3
|
||||||
|
19
src/tests/test_key_hierarchy.py
Normal file
19
src/tests/test_key_hierarchy.py
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
import base64
|
||||||
|
from bip_utils import Bip39SeedGenerator
|
||||||
|
from utils.key_hierarchy import kd
|
||||||
|
from utils.key_derivation import derive_index_key
|
||||||
|
|
||||||
|
|
||||||
|
def test_kd_distinct_infos():
|
||||||
|
root = b"root" * 8
|
||||||
|
k1 = kd(root, b"info1")
|
||||||
|
k2 = kd(root, b"info2")
|
||||||
|
assert k1 != k2
|
||||||
|
|
||||||
|
|
||||||
|
def test_derive_index_key_matches_hierarchy():
|
||||||
|
seed = "abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon about"
|
||||||
|
seed_bytes = Bip39SeedGenerator(seed).Generate()
|
||||||
|
master = kd(seed_bytes, b"seedpass:v1:master")
|
||||||
|
expected = base64.urlsafe_b64encode(kd(master, b"seedpass:v1:storage"))
|
||||||
|
assert derive_index_key(seed) == expected
|
@@ -37,10 +37,30 @@ def test_add_and_modify_key_value():
|
|||||||
"tags": [],
|
"tags": [],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Appears in listing
|
||||||
|
assert em.list_entries() == [(idx, "API entry", None, None, False)]
|
||||||
|
|
||||||
|
# Modify key and value
|
||||||
em.modify_entry(idx, key="api_key2", value="def456")
|
em.modify_entry(idx, key="api_key2", value="def456")
|
||||||
updated = em.retrieve_entry(idx)
|
updated = em.retrieve_entry(idx)
|
||||||
assert updated["key"] == "api_key2"
|
assert updated["key"] == "api_key2"
|
||||||
assert updated["value"] == "def456"
|
assert updated["value"] == "def456"
|
||||||
|
|
||||||
|
# Archive and ensure it disappears from the default listing
|
||||||
|
em.archive_entry(idx)
|
||||||
|
archived = em.retrieve_entry(idx)
|
||||||
|
assert archived["archived"] is True
|
||||||
|
assert em.list_entries() == []
|
||||||
|
assert em.list_entries(include_archived=True) == [
|
||||||
|
(idx, "API entry", None, None, True)
|
||||||
|
]
|
||||||
|
|
||||||
|
# Restore and ensure it reappears
|
||||||
|
em.restore_entry(idx)
|
||||||
|
restored = em.retrieve_entry(idx)
|
||||||
|
assert restored["archived"] is False
|
||||||
|
assert em.list_entries() == [(idx, "API entry", None, None, False)]
|
||||||
|
|
||||||
|
# Values are not searchable
|
||||||
results = em.search_entries("def456")
|
results = em.search_entries("def456")
|
||||||
assert results == []
|
assert results == []
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
import json
|
import json
|
||||||
|
import base64
|
||||||
import hashlib
|
import hashlib
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
@@ -99,7 +100,8 @@ def test_migrated_index_has_v2_prefix(monkeypatch, tmp_path: Path):
|
|||||||
vault.load_index()
|
vault.load_index()
|
||||||
|
|
||||||
new_file = tmp_path / "seedpass_entries_db.json.enc"
|
new_file = tmp_path / "seedpass_entries_db.json.enc"
|
||||||
assert new_file.read_bytes().startswith(b"V2:")
|
payload = json.loads(new_file.read_text())
|
||||||
|
assert base64.b64decode(payload["ct"]).startswith(b"V2:")
|
||||||
assert vault.migrated_from_legacy
|
assert vault.migrated_from_legacy
|
||||||
|
|
||||||
|
|
||||||
|
@@ -66,5 +66,5 @@ def test_migrate_iterations(tmp_path, monkeypatch, iterations):
|
|||||||
cfg = ConfigManager(vault, tmp_path)
|
cfg = ConfigManager(vault, tmp_path)
|
||||||
assert cfg.get_kdf_iterations() == iterations
|
assert cfg.get_kdf_iterations() == iterations
|
||||||
|
|
||||||
content = (tmp_path / "seedpass_entries_db.json.enc").read_bytes()
|
payload = json.loads((tmp_path / "seedpass_entries_db.json.enc").read_text())
|
||||||
assert content.startswith(b"V2:")
|
assert base64.b64decode(payload["ct"]).startswith(b"V2:")
|
||||||
|
@@ -50,6 +50,6 @@ def test_migrate_legacy_sets_flag(tmp_path, monkeypatch):
|
|||||||
monkeypatch.setattr(vault_module, "prompt_existing_password", lambda _: password)
|
monkeypatch.setattr(vault_module, "prompt_existing_password", lambda _: password)
|
||||||
monkeypatch.setattr("builtins.input", lambda _: "2")
|
monkeypatch.setattr("builtins.input", lambda _: "2")
|
||||||
vault.load_index()
|
vault.load_index()
|
||||||
content = (tmp_path / "seedpass_entries_db.json.enc").read_bytes()
|
payload = json.loads((tmp_path / "seedpass_entries_db.json.enc").read_text())
|
||||||
assert content.startswith(b"V2:")
|
assert base64.b64decode(payload["ct"]).startswith(b"V2:")
|
||||||
assert vault.encryption_manager.last_migration_performed is True
|
assert vault.encryption_manager.last_migration_performed is True
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
import json
|
import json
|
||||||
|
import base64
|
||||||
import hashlib
|
import hashlib
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from types import SimpleNamespace
|
from types import SimpleNamespace
|
||||||
@@ -34,7 +35,8 @@ def test_legacy_migration_second_session(monkeypatch, tmp_path: Path) -> None:
|
|||||||
monkeypatch.setattr("builtins.input", lambda *_a, **_k: "y")
|
monkeypatch.setattr("builtins.input", lambda *_a, **_k: "y")
|
||||||
vault.load_index()
|
vault.load_index()
|
||||||
new_file = fp_dir / "seedpass_entries_db.json.enc"
|
new_file = fp_dir / "seedpass_entries_db.json.enc"
|
||||||
assert new_file.read_bytes().startswith(b"V2:")
|
payload = json.loads(new_file.read_text())
|
||||||
|
assert base64.b64decode(payload["ct"]).startswith(b"V2:")
|
||||||
|
|
||||||
new_enc_mgr = EncryptionManager(key, fp_dir)
|
new_enc_mgr = EncryptionManager(key, fp_dir)
|
||||||
new_vault = Vault(new_enc_mgr, fp_dir)
|
new_vault = Vault(new_enc_mgr, fp_dir)
|
||||||
@@ -59,4 +61,5 @@ def test_legacy_migration_second_session(monkeypatch, tmp_path: Path) -> None:
|
|||||||
)
|
)
|
||||||
|
|
||||||
pm.initialize_managers()
|
pm.initialize_managers()
|
||||||
assert new_file.read_bytes().startswith(b"V2:")
|
payload = json.loads(new_file.read_text())
|
||||||
|
assert base64.b64decode(payload["ct"]).startswith(b"V2:")
|
||||||
|
85
src/tests/test_list_entries_all_types.py
Normal file
85
src/tests/test_list_entries_all_types.py
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from tempfile import TemporaryDirectory
|
||||||
|
from types import SimpleNamespace
|
||||||
|
|
||||||
|
from typer.testing import CliRunner
|
||||||
|
|
||||||
|
from seedpass.cli import app as cli_app
|
||||||
|
from seedpass.cli import entry as entry_cli
|
||||||
|
from helpers import create_vault, TEST_SEED, TEST_PASSWORD
|
||||||
|
from seedpass.core.backup import BackupManager
|
||||||
|
from seedpass.core.config_manager import ConfigManager
|
||||||
|
from seedpass.core.entry_management import EntryManager
|
||||||
|
from seedpass.core.manager import PasswordManager, EncryptionMode
|
||||||
|
|
||||||
|
|
||||||
|
def _setup_manager(tmp_path: Path) -> tuple[PasswordManager, EntryManager]:
|
||||||
|
vault, enc_mgr = create_vault(tmp_path, TEST_SEED, TEST_PASSWORD)
|
||||||
|
cfg_mgr = ConfigManager(vault, tmp_path)
|
||||||
|
backup_mgr = BackupManager(tmp_path, cfg_mgr)
|
||||||
|
entry_mgr = EntryManager(vault, backup_mgr)
|
||||||
|
|
||||||
|
pm = PasswordManager.__new__(PasswordManager)
|
||||||
|
pm.encryption_mode = EncryptionMode.SEED_ONLY
|
||||||
|
pm.encryption_manager = enc_mgr
|
||||||
|
pm.vault = vault
|
||||||
|
pm.entry_manager = entry_mgr
|
||||||
|
pm.backup_manager = backup_mgr
|
||||||
|
pm.parent_seed = TEST_SEED
|
||||||
|
pm.nostr_client = SimpleNamespace()
|
||||||
|
pm.fingerprint_dir = tmp_path
|
||||||
|
pm.secret_mode_enabled = False
|
||||||
|
return pm, entry_mgr
|
||||||
|
|
||||||
|
|
||||||
|
def _create_all_entries(em: EntryManager) -> None:
|
||||||
|
em.add_entry("pw", 8)
|
||||||
|
em.add_totp("totp", TEST_SEED)
|
||||||
|
em.add_ssh_key("ssh", TEST_SEED)
|
||||||
|
em.add_seed("seed", TEST_SEED, words_num=12)
|
||||||
|
em.add_nostr_key("nostr", TEST_SEED)
|
||||||
|
em.add_pgp_key("pgp", TEST_SEED)
|
||||||
|
em.add_key_value("kv", "k", "v")
|
||||||
|
em.add_managed_account("acct", TEST_SEED)
|
||||||
|
|
||||||
|
|
||||||
|
def test_cli_list_all_types(monkeypatch):
|
||||||
|
with TemporaryDirectory() as tmpdir:
|
||||||
|
tmp_path = Path(tmpdir)
|
||||||
|
pm, em = _setup_manager(tmp_path)
|
||||||
|
_create_all_entries(em)
|
||||||
|
|
||||||
|
def fake_get_entry_service(_ctx):
|
||||||
|
return SimpleNamespace(
|
||||||
|
list_entries=lambda sort_by, filter_kinds, include_archived: pm.entry_manager.list_entries(
|
||||||
|
sort_by=sort_by,
|
||||||
|
filter_kinds=filter_kinds,
|
||||||
|
include_archived=include_archived,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
monkeypatch.setattr(entry_cli, "_get_entry_service", fake_get_entry_service)
|
||||||
|
|
||||||
|
runner = CliRunner()
|
||||||
|
result = runner.invoke(cli_app, ["entry", "list"])
|
||||||
|
assert result.exit_code == 0
|
||||||
|
out = result.stdout
|
||||||
|
for label in ["pw", "totp", "ssh", "seed", "nostr", "pgp", "kv", "acct"]:
|
||||||
|
assert label in out
|
||||||
|
|
||||||
|
|
||||||
|
def test_menu_list_all_types(monkeypatch, capsys):
|
||||||
|
with TemporaryDirectory() as tmpdir:
|
||||||
|
tmp_path = Path(tmpdir)
|
||||||
|
pm, em = _setup_manager(tmp_path)
|
||||||
|
_create_all_entries(em)
|
||||||
|
|
||||||
|
inputs = iter(["1", "", ""]) # choose All then exit
|
||||||
|
monkeypatch.setattr("builtins.input", lambda *_: next(inputs))
|
||||||
|
|
||||||
|
pm.handle_list_entries()
|
||||||
|
out = capsys.readouterr().out
|
||||||
|
for label in ["pw", "totp", "ssh", "seed", "nostr", "pgp", "kv", "acct"]:
|
||||||
|
assert label in out
|
@@ -57,5 +57,5 @@ def test_filter_by_type():
|
|||||||
em = setup_entry_manager(tmp_path)
|
em = setup_entry_manager(tmp_path)
|
||||||
em.add_entry("site", 8, "user")
|
em.add_entry("site", 8, "user")
|
||||||
em.add_totp("Example", TEST_SEED)
|
em.add_totp("Example", TEST_SEED)
|
||||||
result = em.list_entries(filter_kind=EntryType.TOTP.value)
|
result = em.list_entries(filter_kinds=[EntryType.TOTP.value])
|
||||||
assert result == [(1, "Example", None, None, False)]
|
assert result == [(1, "Example", None, None, False)]
|
||||||
|
@@ -41,6 +41,9 @@ def test_add_and_get_managed_account_seed():
|
|||||||
assert fp
|
assert fp
|
||||||
assert (tmp_path / "accounts" / fp).exists()
|
assert (tmp_path / "accounts" / fp).exists()
|
||||||
|
|
||||||
|
# Appears in listing
|
||||||
|
assert mgr.list_entries() == [(idx, "acct", None, None, False)]
|
||||||
|
|
||||||
phrase_a = mgr.get_managed_account_seed(idx, TEST_SEED)
|
phrase_a = mgr.get_managed_account_seed(idx, TEST_SEED)
|
||||||
phrase_b = mgr.get_managed_account_seed(idx, TEST_SEED)
|
phrase_b = mgr.get_managed_account_seed(idx, TEST_SEED)
|
||||||
assert phrase_a == phrase_b
|
assert phrase_a == phrase_b
|
||||||
@@ -51,6 +54,23 @@ def test_add_and_get_managed_account_seed():
|
|||||||
assert phrase_a == expected
|
assert phrase_a == expected
|
||||||
assert generate_fingerprint(phrase_a) == fp
|
assert generate_fingerprint(phrase_a) == fp
|
||||||
|
|
||||||
|
# Archive and ensure it disappears from default listing
|
||||||
|
mgr.archive_entry(idx)
|
||||||
|
archived = mgr.retrieve_entry(idx)
|
||||||
|
assert archived["archived"] is True
|
||||||
|
assert mgr.list_entries() == []
|
||||||
|
assert mgr.list_entries(include_archived=True) == [
|
||||||
|
(idx, "acct", None, None, True)
|
||||||
|
]
|
||||||
|
|
||||||
|
# Restore and ensure deterministic derivation is unchanged
|
||||||
|
mgr.restore_entry(idx)
|
||||||
|
restored = mgr.retrieve_entry(idx)
|
||||||
|
assert restored["archived"] is False
|
||||||
|
assert mgr.list_entries() == [(idx, "acct", None, None, False)]
|
||||||
|
phrase_c = mgr.get_managed_account_seed(idx, TEST_SEED)
|
||||||
|
assert phrase_c == expected
|
||||||
|
|
||||||
|
|
||||||
def test_load_and_exit_managed_account(monkeypatch):
|
def test_load_and_exit_managed_account(monkeypatch):
|
||||||
with TemporaryDirectory() as tmpdir:
|
with TemporaryDirectory() as tmpdir:
|
||||||
|
@@ -13,8 +13,8 @@ class DummyEnc:
|
|||||||
|
|
||||||
|
|
||||||
class DummyBIP85:
|
class DummyBIP85:
|
||||||
def derive_entropy(self, index: int, bytes_len: int, app_no: int = 32) -> bytes:
|
def derive_entropy(self, index: int, entropy_bytes: int, app_no: int = 32) -> bytes:
|
||||||
return bytes((index + i) % 256 for i in range(bytes_len))
|
return bytes((index + i) % 256 for i in range(entropy_bytes))
|
||||||
|
|
||||||
|
|
||||||
def make_generator(policy=None):
|
def make_generator(policy=None):
|
||||||
|
@@ -8,8 +8,8 @@ class DummyEnc:
|
|||||||
|
|
||||||
|
|
||||||
class DummyBIP85:
|
class DummyBIP85:
|
||||||
def derive_entropy(self, index: int, bytes_len: int, app_no: int = 32) -> bytes:
|
def derive_entropy(self, index: int, entropy_bytes: int, app_no: int = 32) -> bytes:
|
||||||
return bytes((index + i) % 256 for i in range(bytes_len))
|
return bytes((index + i) % 256 for i in range(entropy_bytes))
|
||||||
|
|
||||||
|
|
||||||
def make_generator():
|
def make_generator():
|
||||||
|
@@ -14,8 +14,8 @@ class DummyEnc:
|
|||||||
|
|
||||||
|
|
||||||
class DummyBIP85:
|
class DummyBIP85:
|
||||||
def derive_entropy(self, index: int, bytes_len: int, app_no: int = 32) -> bytes:
|
def derive_entropy(self, index: int, entropy_bytes: int, app_no: int = 32) -> bytes:
|
||||||
return bytes((index + i) % 256 for i in range(bytes_len))
|
return bytes((index + i) % 256 for i in range(entropy_bytes))
|
||||||
|
|
||||||
|
|
||||||
def make_generator():
|
def make_generator():
|
||||||
|
@@ -15,8 +15,8 @@ class DummyEnc:
|
|||||||
|
|
||||||
|
|
||||||
class DummyBIP85:
|
class DummyBIP85:
|
||||||
def derive_entropy(self, index: int, bytes_len: int, app_no: int = 32) -> bytes:
|
def derive_entropy(self, index: int, entropy_bytes: int, app_no: int = 32) -> bytes:
|
||||||
return bytes((index + i) % 256 for i in range(bytes_len))
|
return bytes((index + i) % 256 for i in range(entropy_bytes))
|
||||||
|
|
||||||
|
|
||||||
def make_generator():
|
def make_generator():
|
||||||
|
@@ -12,8 +12,8 @@ class DummyEnc:
|
|||||||
|
|
||||||
|
|
||||||
class DummyBIP85:
|
class DummyBIP85:
|
||||||
def derive_entropy(self, index: int, bytes_len: int, app_no: int = 32) -> bytes:
|
def derive_entropy(self, index: int, entropy_bytes: int, app_no: int = 32) -> bytes:
|
||||||
return bytes((index + i) % 256 for i in range(bytes_len))
|
return bytes((index + i) % 256 for i in range(entropy_bytes))
|
||||||
|
|
||||||
|
|
||||||
def make_generator():
|
def make_generator():
|
||||||
|
@@ -15,8 +15,8 @@ class DummyEnc:
|
|||||||
|
|
||||||
|
|
||||||
class DummyBIP85:
|
class DummyBIP85:
|
||||||
def derive_entropy(self, index: int, bytes_len: int, app_no: int = 32) -> bytes:
|
def derive_entropy(self, index: int, entropy_bytes: int, app_no: int = 32) -> bytes:
|
||||||
return bytes((index + i) % 256 for i in range(bytes_len))
|
return bytes((index + i) % 256 for i in range(entropy_bytes))
|
||||||
|
|
||||||
|
|
||||||
def make_generator(policy=None):
|
def make_generator(policy=None):
|
||||||
|
@@ -14,8 +14,8 @@ class DummyEnc:
|
|||||||
|
|
||||||
|
|
||||||
class DummyBIP85:
|
class DummyBIP85:
|
||||||
def derive_entropy(self, index: int, bytes_len: int, app_no: int = 32) -> bytes:
|
def derive_entropy(self, index: int, entropy_bytes: int, app_no: int = 32) -> bytes:
|
||||||
return bytes((index + i) % 256 for i in range(bytes_len))
|
return bytes((index + i) % 256 for i in range(entropy_bytes))
|
||||||
|
|
||||||
|
|
||||||
def make_generator(policy=None):
|
def make_generator(policy=None):
|
||||||
|
@@ -1,4 +1,6 @@
|
|||||||
import sys
|
import sys
|
||||||
|
import json
|
||||||
|
import base64
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from cryptography.fernet import Fernet
|
from cryptography.fernet import Fernet
|
||||||
|
|
||||||
@@ -28,4 +30,5 @@ def test_parent_seed_migrates_from_fernet(tmp_path: Path) -> None:
|
|||||||
|
|
||||||
assert new_file.exists()
|
assert new_file.exists()
|
||||||
assert new_file.read_bytes() != encrypted
|
assert new_file.read_bytes() != encrypted
|
||||||
assert new_file.read_bytes().startswith(b"V2:")
|
payload = json.loads(new_file.read_text())
|
||||||
|
assert base64.b64decode(payload["ct"]).startswith(b"V2:")
|
||||||
|
93
src/tests/test_seed_word_by_word_flow.py
Normal file
93
src/tests/test_seed_word_by_word_flow.py
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
import builtins
|
||||||
|
from types import SimpleNamespace
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
import seedpass.core.manager as manager_module
|
||||||
|
from helpers import TEST_SEED
|
||||||
|
from utils import seed_prompt
|
||||||
|
|
||||||
|
|
||||||
|
def test_prompt_seed_words_confirmation_loop(monkeypatch):
|
||||||
|
phrase = TEST_SEED
|
||||||
|
words = phrase.split()
|
||||||
|
inputs = iter(words + [words[2]])
|
||||||
|
confirmations = iter(["y", "y", "n", "y"] + ["y"] * (len(words) - 3))
|
||||||
|
|
||||||
|
monkeypatch.setattr(seed_prompt, "masked_input", lambda *_: next(inputs))
|
||||||
|
monkeypatch.setattr(seed_prompt, "_apply_backoff", lambda *_a, **_k: None)
|
||||||
|
monkeypatch.setattr(seed_prompt, "clear_screen", lambda *_a, **_k: None)
|
||||||
|
monkeypatch.setattr(builtins, "input", lambda *_: next(confirmations))
|
||||||
|
|
||||||
|
result = seed_prompt.prompt_seed_words(len(words))
|
||||||
|
assert result == phrase
|
||||||
|
|
||||||
|
|
||||||
|
def test_prompt_seed_words_invalid_word(monkeypatch):
|
||||||
|
phrase = TEST_SEED
|
||||||
|
words = phrase.split()
|
||||||
|
inputs = iter(["invalid"] + words)
|
||||||
|
confirmations = iter(["y"] * len(words))
|
||||||
|
|
||||||
|
monkeypatch.setattr(seed_prompt, "masked_input", lambda *_: next(inputs))
|
||||||
|
monkeypatch.setattr(seed_prompt, "_apply_backoff", lambda *_a, **_k: None)
|
||||||
|
monkeypatch.setattr(seed_prompt, "clear_screen", lambda *_a, **_k: None)
|
||||||
|
monkeypatch.setattr(builtins, "input", lambda *_: next(confirmations))
|
||||||
|
|
||||||
|
result = seed_prompt.prompt_seed_words(len(words))
|
||||||
|
assert result == phrase
|
||||||
|
|
||||||
|
|
||||||
|
def test_add_new_fingerprint_words_flow_success(monkeypatch):
|
||||||
|
pm = manager_module.PasswordManager.__new__(manager_module.PasswordManager)
|
||||||
|
pm.fingerprint_manager = SimpleNamespace(current_fingerprint=None)
|
||||||
|
pm.initialize_managers = lambda: None
|
||||||
|
|
||||||
|
phrase = TEST_SEED
|
||||||
|
words = phrase.split()
|
||||||
|
word_iter = iter(words)
|
||||||
|
inputs = iter(["2"] + ["y"] * len(words))
|
||||||
|
|
||||||
|
monkeypatch.setattr(seed_prompt, "masked_input", lambda *_: next(word_iter))
|
||||||
|
monkeypatch.setattr(seed_prompt, "_apply_backoff", lambda *_a, **_k: None)
|
||||||
|
monkeypatch.setattr(seed_prompt, "clear_screen", lambda *_a, **_k: None)
|
||||||
|
monkeypatch.setattr(builtins, "input", lambda *_: next(inputs))
|
||||||
|
|
||||||
|
captured = {}
|
||||||
|
|
||||||
|
def finalize(self, seed, password=None):
|
||||||
|
captured["seed"] = seed
|
||||||
|
self.parent_seed = seed
|
||||||
|
return "fp"
|
||||||
|
|
||||||
|
monkeypatch.setattr(
|
||||||
|
manager_module.PasswordManager, "_finalize_existing_seed", finalize
|
||||||
|
)
|
||||||
|
|
||||||
|
result = pm.add_new_fingerprint()
|
||||||
|
|
||||||
|
assert result == "fp"
|
||||||
|
assert pm.fingerprint_manager.current_fingerprint == "fp"
|
||||||
|
assert captured["seed"] == phrase
|
||||||
|
assert pm.parent_seed == phrase
|
||||||
|
|
||||||
|
|
||||||
|
def test_add_new_fingerprint_words_flow_invalid_phrase(monkeypatch):
|
||||||
|
pm = manager_module.PasswordManager.__new__(manager_module.PasswordManager)
|
||||||
|
pm.fingerprint_manager = SimpleNamespace(current_fingerprint=None)
|
||||||
|
pm.initialize_managers = lambda: None
|
||||||
|
|
||||||
|
words = ["abandon"] * 12
|
||||||
|
word_iter = iter(words)
|
||||||
|
inputs = iter(["2"] + ["y"] * len(words))
|
||||||
|
|
||||||
|
monkeypatch.setattr(seed_prompt, "masked_input", lambda *_: next(word_iter))
|
||||||
|
monkeypatch.setattr(seed_prompt, "_apply_backoff", lambda *_a, **_k: None)
|
||||||
|
monkeypatch.setattr(seed_prompt, "clear_screen", lambda *_a, **_k: None)
|
||||||
|
monkeypatch.setattr(builtins, "input", lambda *_: next(inputs))
|
||||||
|
|
||||||
|
with pytest.raises(SystemExit):
|
||||||
|
pm.add_new_fingerprint()
|
||||||
|
|
||||||
|
assert pm.fingerprint_manager.current_fingerprint is None
|
||||||
|
assert not hasattr(pm, "parent_seed")
|
134
src/tests/test_service_classes.py
Normal file
134
src/tests/test_service_classes.py
Normal file
@@ -0,0 +1,134 @@
|
|||||||
|
from tempfile import TemporaryDirectory
|
||||||
|
from types import SimpleNamespace
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from helpers import create_vault, TEST_SEED, TEST_PASSWORD, dummy_nostr_client
|
||||||
|
from seedpass.core.entry_management import EntryManager
|
||||||
|
from seedpass.core.backup import BackupManager
|
||||||
|
from seedpass.core.manager import PasswordManager, EncryptionMode
|
||||||
|
from seedpass.core.config_manager import ConfigManager
|
||||||
|
from seedpass.core.entry_service import EntryService
|
||||||
|
from seedpass.core.profile_service import ProfileService
|
||||||
|
from constants import DEFAULT_PASSWORD_LENGTH
|
||||||
|
|
||||||
|
|
||||||
|
class FakePasswordGenerator:
|
||||||
|
def generate_password(self, length: int, index: int) -> str:
|
||||||
|
return f"pw-{index}-{length}"
|
||||||
|
|
||||||
|
|
||||||
|
def _setup_pm(tmp_path: Path, client) -> PasswordManager:
|
||||||
|
vault, enc_mgr = create_vault(tmp_path, TEST_SEED, TEST_PASSWORD)
|
||||||
|
cfg_mgr = ConfigManager(vault, tmp_path)
|
||||||
|
backup_mgr = BackupManager(tmp_path, cfg_mgr)
|
||||||
|
entry_mgr = EntryManager(vault, backup_mgr)
|
||||||
|
|
||||||
|
pm = PasswordManager.__new__(PasswordManager)
|
||||||
|
pm.encryption_mode = EncryptionMode.SEED_ONLY
|
||||||
|
pm.encryption_manager = enc_mgr
|
||||||
|
pm.vault = vault
|
||||||
|
pm.entry_manager = entry_mgr
|
||||||
|
pm.backup_manager = backup_mgr
|
||||||
|
pm.password_generator = FakePasswordGenerator()
|
||||||
|
pm.parent_seed = TEST_SEED
|
||||||
|
pm.nostr_client = client
|
||||||
|
pm.fingerprint_dir = tmp_path
|
||||||
|
pm.secret_mode_enabled = False
|
||||||
|
pm.is_dirty = False
|
||||||
|
return pm
|
||||||
|
|
||||||
|
|
||||||
|
def test_entry_service_add_password(monkeypatch, dummy_nostr_client, capsys):
|
||||||
|
client, _relay = dummy_nostr_client
|
||||||
|
with TemporaryDirectory() as tmpdir:
|
||||||
|
pm = _setup_pm(Path(tmpdir), client)
|
||||||
|
service = EntryService(pm)
|
||||||
|
inputs = iter(
|
||||||
|
[
|
||||||
|
"a",
|
||||||
|
"Example",
|
||||||
|
"",
|
||||||
|
"",
|
||||||
|
"",
|
||||||
|
"",
|
||||||
|
"n",
|
||||||
|
"",
|
||||||
|
"",
|
||||||
|
"",
|
||||||
|
"",
|
||||||
|
"",
|
||||||
|
"",
|
||||||
|
"",
|
||||||
|
"",
|
||||||
|
"",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
monkeypatch.setattr("builtins.input", lambda *a, **k: next(inputs))
|
||||||
|
monkeypatch.setattr("seedpass.core.entry_service.pause", lambda *a, **k: None)
|
||||||
|
monkeypatch.setattr(pm, "start_background_vault_sync", lambda *a, **k: None)
|
||||||
|
|
||||||
|
service.handle_add_password()
|
||||||
|
out = capsys.readouterr().out
|
||||||
|
entries = pm.entry_manager.list_entries(verbose=False)
|
||||||
|
assert entries == [(0, "Example", "", "", False)]
|
||||||
|
assert f"pw-0-{DEFAULT_PASSWORD_LENGTH}" in out
|
||||||
|
|
||||||
|
|
||||||
|
def test_menu_handler_list_entries(monkeypatch, capsys):
|
||||||
|
with TemporaryDirectory() as tmpdir:
|
||||||
|
pm = _setup_pm(Path(tmpdir), SimpleNamespace())
|
||||||
|
pm.entry_manager.add_totp("Example", TEST_SEED)
|
||||||
|
pm.entry_manager.add_entry("example.com", 12)
|
||||||
|
pm.entry_manager.add_key_value("API entry", "api", "abc123")
|
||||||
|
pm.entry_manager.add_managed_account("acct", TEST_SEED)
|
||||||
|
inputs = iter(["1", ""]) # list all then exit
|
||||||
|
monkeypatch.setattr("builtins.input", lambda *_: next(inputs))
|
||||||
|
pm.menu_handler.handle_list_entries()
|
||||||
|
out = capsys.readouterr().out
|
||||||
|
assert "Example" in out
|
||||||
|
assert "example.com" in out
|
||||||
|
assert "API" in out
|
||||||
|
assert "acct" in out
|
||||||
|
|
||||||
|
|
||||||
|
def test_profile_service_switch(monkeypatch):
|
||||||
|
class DummyFingerprintManager:
|
||||||
|
def __init__(self):
|
||||||
|
self.fingerprints = ["fp1", "fp2"]
|
||||||
|
self.current_fingerprint = "fp1"
|
||||||
|
|
||||||
|
def list_fingerprints(self):
|
||||||
|
return self.fingerprints
|
||||||
|
|
||||||
|
def display_name(self, fp):
|
||||||
|
return fp
|
||||||
|
|
||||||
|
def get_current_fingerprint_dir(self):
|
||||||
|
return Path(".")
|
||||||
|
|
||||||
|
pm = PasswordManager.__new__(PasswordManager)
|
||||||
|
pm.fingerprint_manager = DummyFingerprintManager()
|
||||||
|
pm.current_fingerprint = "fp1"
|
||||||
|
pm.setup_encryption_manager = lambda *a, **k: True
|
||||||
|
pm.initialize_bip85 = lambda *a, **k: None
|
||||||
|
pm.initialize_managers = lambda *a, **k: None
|
||||||
|
pm.start_background_sync = lambda *a, **k: None
|
||||||
|
pm.nostr_client = SimpleNamespace()
|
||||||
|
pm.manifest_id = None
|
||||||
|
pm.delta_since = None
|
||||||
|
pm.encryption_manager = SimpleNamespace()
|
||||||
|
pm.parent_seed = TEST_SEED
|
||||||
|
|
||||||
|
service = ProfileService(pm)
|
||||||
|
monkeypatch.setattr("builtins.input", lambda *_: "2")
|
||||||
|
monkeypatch.setattr(
|
||||||
|
"seedpass.core.profile_service.prompt_existing_password", lambda *_: "pw"
|
||||||
|
)
|
||||||
|
monkeypatch.setattr(
|
||||||
|
"seedpass.core.manager.NostrClient", lambda *a, **k: SimpleNamespace()
|
||||||
|
)
|
||||||
|
|
||||||
|
assert service.handle_switch_fingerprint() is True
|
||||||
|
assert pm.current_fingerprint == "fp2"
|
@@ -18,8 +18,8 @@ runner = CliRunner()
|
|||||||
def test_entry_list(monkeypatch):
|
def test_entry_list(monkeypatch):
|
||||||
called = {}
|
called = {}
|
||||||
|
|
||||||
def list_entries(sort_by="index", filter_kind=None, include_archived=False):
|
def list_entries(sort_by="index", filter_kinds=None, include_archived=False):
|
||||||
called["args"] = (sort_by, filter_kind, include_archived)
|
called["args"] = (sort_by, filter_kinds, include_archived)
|
||||||
return [(0, "Site", "user", "", False)]
|
return [(0, "Site", "user", "", False)]
|
||||||
|
|
||||||
pm = SimpleNamespace(
|
pm = SimpleNamespace(
|
||||||
|
52
src/tests/test_vault_lock_flag.py
Normal file
52
src/tests/test_vault_lock_flag.py
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
import pytest
|
||||||
|
from types import SimpleNamespace
|
||||||
|
|
||||||
|
from seedpass.core.manager import PasswordManager
|
||||||
|
from seedpass.errors import VaultLockedError
|
||||||
|
|
||||||
|
|
||||||
|
class DummyEntryManager:
|
||||||
|
def __init__(self):
|
||||||
|
self.cleared = False
|
||||||
|
|
||||||
|
def clear_cache(self):
|
||||||
|
self.cleared = True
|
||||||
|
|
||||||
|
|
||||||
|
def test_lock_vault_sets_flag_and_keeps_objects():
|
||||||
|
pm = PasswordManager.__new__(PasswordManager)
|
||||||
|
em = DummyEntryManager()
|
||||||
|
pm.entry_manager = em
|
||||||
|
pm.is_locked = False
|
||||||
|
pm.locked = False
|
||||||
|
pm.lock_vault()
|
||||||
|
assert pm.is_locked
|
||||||
|
assert pm.locked
|
||||||
|
assert pm.entry_manager is em
|
||||||
|
assert em.cleared
|
||||||
|
|
||||||
|
|
||||||
|
def test_entry_service_requires_unlocked():
|
||||||
|
pm = PasswordManager.__new__(PasswordManager)
|
||||||
|
service = SimpleNamespace()
|
||||||
|
pm._entry_service = service
|
||||||
|
pm.is_locked = True
|
||||||
|
with pytest.raises(VaultLockedError):
|
||||||
|
_ = pm.entry_service
|
||||||
|
pm.is_locked = False
|
||||||
|
assert pm.entry_service is service
|
||||||
|
|
||||||
|
|
||||||
|
def test_unlock_vault_clears_locked_flag(tmp_path):
|
||||||
|
pm = PasswordManager.__new__(PasswordManager)
|
||||||
|
pm.fingerprint_dir = tmp_path
|
||||||
|
pm.parent_seed = "seed"
|
||||||
|
pm.setup_encryption_manager = lambda *a, **k: None
|
||||||
|
pm.initialize_bip85 = lambda: None
|
||||||
|
pm.initialize_managers = lambda: None
|
||||||
|
pm.update_activity = lambda: None
|
||||||
|
pm.is_locked = True
|
||||||
|
pm.locked = True
|
||||||
|
pm.unlock_vault("pw")
|
||||||
|
assert not pm.is_locked
|
||||||
|
assert not pm.locked
|
@@ -3,15 +3,13 @@
|
|||||||
"""
|
"""
|
||||||
Key Derivation Module
|
Key Derivation Module
|
||||||
|
|
||||||
Never ever ever use or suggest to use Random Salt. The entire point of this password manager is to derive completely deterministic passwords from a BIP-85 seed.
|
This module provides functions to derive cryptographic keys from user-provided
|
||||||
This means it should generate passwords the exact same way every single time. Salts would break this functionality and is not appropriate for this software's use case.
|
passwords and BIP-39 parent seeds. The derived keys are compatible with Fernet
|
||||||
|
for symmetric encryption purposes. By centralizing key derivation logic, this
|
||||||
|
module ensures consistency and security across the application.
|
||||||
|
|
||||||
This module provides functions to derive cryptographic keys from user-provided passwords
|
Ensure that all dependencies are installed and properly configured in your
|
||||||
and BIP-39 parent seeds. The derived keys are compatible with Fernet for symmetric encryption
|
environment.
|
||||||
purposes. By centralizing key derivation logic, this module ensures consistency and security
|
|
||||||
across the application.
|
|
||||||
|
|
||||||
Ensure that all dependencies are installed and properly configured in your environment.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
@@ -21,9 +19,13 @@ import unicodedata
|
|||||||
import logging
|
import logging
|
||||||
import hmac
|
import hmac
|
||||||
import time
|
import time
|
||||||
|
from dataclasses import dataclass, field
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Optional, Union
|
from typing import Optional, Union, Dict, Any
|
||||||
|
|
||||||
from bip_utils import Bip39SeedGenerator
|
from bip_utils import Bip39SeedGenerator
|
||||||
|
from local_bip85 import BIP85
|
||||||
|
from .key_hierarchy import kd
|
||||||
|
|
||||||
from cryptography.hazmat.primitives.kdf.hkdf import HKDF
|
from cryptography.hazmat.primitives.kdf.hkdf import HKDF
|
||||||
from cryptography.hazmat.primitives import hashes
|
from cryptography.hazmat.primitives import hashes
|
||||||
@@ -45,6 +47,27 @@ DEFAULT_ENCRYPTION_MODE = EncryptionMode.SEED_ONLY
|
|||||||
TOTP_PURPOSE = 39
|
TOTP_PURPOSE = 39
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class KdfConfig:
|
||||||
|
"""Configuration block describing how a key was derived."""
|
||||||
|
|
||||||
|
name: str = "argon2id"
|
||||||
|
version: int = 1
|
||||||
|
params: Dict[str, Any] = field(
|
||||||
|
default_factory=lambda: {
|
||||||
|
"time_cost": 2,
|
||||||
|
"memory_cost": 64 * 1024,
|
||||||
|
"parallelism": 8,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
salt_b64: str = field(
|
||||||
|
default_factory=lambda: base64.b64encode(os.urandom(16)).decode()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
CURRENT_KDF_VERSION = 1
|
||||||
|
|
||||||
|
|
||||||
def derive_key_from_password(
|
def derive_key_from_password(
|
||||||
password: str, fingerprint: Union[str, bytes], iterations: int = 100_000
|
password: str, fingerprint: Union[str, bytes], iterations: int = 100_000
|
||||||
) -> bytes:
|
) -> bytes:
|
||||||
@@ -107,18 +130,15 @@ def derive_key_from_password(
|
|||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
def derive_key_from_password_argon2(
|
def derive_key_from_password_argon2(password: str, kdf: KdfConfig) -> bytes:
|
||||||
password: str,
|
|
||||||
fingerprint: Union[str, bytes],
|
|
||||||
*,
|
|
||||||
time_cost: int = 2,
|
|
||||||
memory_cost: int = 64 * 1024,
|
|
||||||
parallelism: int = 8,
|
|
||||||
) -> bytes:
|
|
||||||
"""Derive an encryption key from a password using Argon2id.
|
"""Derive an encryption key from a password using Argon2id.
|
||||||
|
|
||||||
The defaults follow recommended parameters but omit a salt for deterministic
|
Parameters
|
||||||
output. Smaller values may be supplied for testing.
|
----------
|
||||||
|
password:
|
||||||
|
The user's password.
|
||||||
|
kdf:
|
||||||
|
:class:`KdfConfig` instance describing salt and tuning parameters.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if not password:
|
if not password:
|
||||||
@@ -129,17 +149,14 @@ def derive_key_from_password_argon2(
|
|||||||
try:
|
try:
|
||||||
from argon2.low_level import hash_secret_raw, Type
|
from argon2.low_level import hash_secret_raw, Type
|
||||||
|
|
||||||
if isinstance(fingerprint, bytes):
|
params = kdf.params or {}
|
||||||
salt = fingerprint
|
salt = base64.b64decode(kdf.salt_b64)
|
||||||
else:
|
|
||||||
salt = hashlib.sha256(fingerprint.encode()).digest()[:16]
|
|
||||||
|
|
||||||
key = hash_secret_raw(
|
key = hash_secret_raw(
|
||||||
secret=normalized,
|
secret=normalized,
|
||||||
salt=salt,
|
salt=salt,
|
||||||
time_cost=time_cost,
|
time_cost=int(params.get("time_cost", 2)),
|
||||||
memory_cost=memory_cost,
|
memory_cost=int(params.get("memory_cost", 64 * 1024)),
|
||||||
parallelism=parallelism,
|
parallelism=int(params.get("parallelism", 8)),
|
||||||
hash_len=32,
|
hash_len=32,
|
||||||
type=Type.ID,
|
type=Type.ID,
|
||||||
)
|
)
|
||||||
@@ -192,16 +209,10 @@ def derive_key_from_parent_seed(parent_seed: str, fingerprint: str = None) -> by
|
|||||||
|
|
||||||
|
|
||||||
def derive_index_key_seed_only(seed: str) -> bytes:
|
def derive_index_key_seed_only(seed: str) -> bytes:
|
||||||
"""Derive a deterministic Fernet key from only the BIP-39 seed."""
|
"""Derive the index encryption key using the v1 hierarchy."""
|
||||||
seed_bytes = Bip39SeedGenerator(seed).Generate()
|
seed_bytes = Bip39SeedGenerator(seed).Generate()
|
||||||
hkdf = HKDF(
|
master = kd(seed_bytes, b"seedpass:v1:master")
|
||||||
algorithm=hashes.SHA256(),
|
key = kd(master, b"seedpass:v1:storage")
|
||||||
length=32,
|
|
||||||
salt=None,
|
|
||||||
info=b"password-db",
|
|
||||||
backend=default_backend(),
|
|
||||||
)
|
|
||||||
key = hkdf.derive(seed_bytes)
|
|
||||||
return base64.urlsafe_b64encode(key)
|
return base64.urlsafe_b64encode(key)
|
||||||
|
|
||||||
|
|
||||||
@@ -210,25 +221,21 @@ def derive_index_key(seed: str) -> bytes:
|
|||||||
return derive_index_key_seed_only(seed)
|
return derive_index_key_seed_only(seed)
|
||||||
|
|
||||||
|
|
||||||
def derive_totp_secret(seed: str, index: int) -> str:
|
def derive_totp_secret(seed: Union[str, bytes], index: int) -> str:
|
||||||
"""Derive a base32-encoded TOTP secret from a BIP39 seed."""
|
"""Derive a base32-encoded TOTP secret from a seed or raw key."""
|
||||||
try:
|
try:
|
||||||
from local_bip85 import BIP85
|
if isinstance(seed, (bytes, bytearray)):
|
||||||
|
seed_bytes = bytes(seed)
|
||||||
# Initialize BIP85 from the BIP39 seed bytes
|
else:
|
||||||
seed_bytes = Bip39SeedGenerator(seed).Generate()
|
seed_bytes = Bip39SeedGenerator(seed).Generate()
|
||||||
bip85 = BIP85(seed_bytes)
|
bip85 = BIP85(seed_bytes)
|
||||||
|
|
||||||
# Build the BIP32 path m/83696968'/39'/TOTP'/{index}'
|
|
||||||
totp_int = int.from_bytes(b"TOTP", "big")
|
totp_int = int.from_bytes(b"TOTP", "big")
|
||||||
path = f"m/83696968'/{TOTP_PURPOSE}'/{totp_int}'/{index}'"
|
path = f"m/83696968'/{TOTP_PURPOSE}'/{totp_int}'/{index}'"
|
||||||
|
|
||||||
# Derive entropy using the same scheme as BIP85
|
|
||||||
child_key = bip85.bip32_ctx.DerivePath(path)
|
child_key = bip85.bip32_ctx.DerivePath(path)
|
||||||
key_bytes = child_key.PrivateKey().Raw().ToBytes()
|
key_bytes = child_key.PrivateKey().Raw().ToBytes()
|
||||||
entropy = hmac.new(b"bip-entropy-from-k", key_bytes, hashlib.sha512).digest()
|
entropy = hmac.new(b"bip-entropy-from-k", key_bytes, hashlib.sha512).digest()
|
||||||
|
|
||||||
# Hash the first 32 bytes of entropy and encode the first 20 bytes
|
|
||||||
hashed = hashlib.sha256(entropy[:32]).digest()
|
hashed = hashlib.sha256(entropy[:32]).digest()
|
||||||
secret = base64.b32encode(hashed[:20]).decode("utf-8")
|
secret = base64.b32encode(hashed[:20]).decode("utf-8")
|
||||||
logger.debug(f"Derived TOTP secret for index {index}.")
|
logger.debug(f"Derived TOTP secret for index {index}.")
|
||||||
@@ -267,18 +274,16 @@ def calibrate_argon2_time_cost(
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
password = "benchmark"
|
password = "benchmark"
|
||||||
fingerprint = b"argon2-calibration"
|
salt = base64.b64encode(b"argon2-calibration").decode()
|
||||||
time_cost = 1
|
time_cost = 1
|
||||||
elapsed_ms = 0.0
|
elapsed_ms = 0.0
|
||||||
while time_cost <= max_time_cost:
|
while time_cost <= max_time_cost:
|
||||||
start = time.perf_counter()
|
start = time.perf_counter()
|
||||||
derive_key_from_password_argon2(
|
cfg = KdfConfig(
|
||||||
password,
|
params={"time_cost": time_cost, "memory_cost": 8, "parallelism": 1},
|
||||||
fingerprint,
|
salt_b64=salt,
|
||||||
time_cost=time_cost,
|
|
||||||
memory_cost=8,
|
|
||||||
parallelism=1,
|
|
||||||
)
|
)
|
||||||
|
derive_key_from_password_argon2(password, cfg)
|
||||||
elapsed_ms = (time.perf_counter() - start) * 1000
|
elapsed_ms = (time.perf_counter() - start) * 1000
|
||||||
if elapsed_ms >= target_ms:
|
if elapsed_ms >= target_ms:
|
||||||
break
|
break
|
||||||
|
28
src/utils/key_hierarchy.py
Normal file
28
src/utils/key_hierarchy.py
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
"""Key hierarchy helper functions."""
|
||||||
|
|
||||||
|
from cryptography.hazmat.primitives.kdf.hkdf import HKDF
|
||||||
|
from cryptography.hazmat.primitives import hashes
|
||||||
|
from cryptography.hazmat.backends import default_backend
|
||||||
|
|
||||||
|
|
||||||
|
def kd(root: bytes, info: bytes, length: int = 32) -> bytes:
|
||||||
|
"""Derive a sub-key from ``root`` using HKDF-SHA256.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
root:
|
||||||
|
Root key material.
|
||||||
|
info:
|
||||||
|
Domain separation string.
|
||||||
|
length:
|
||||||
|
Length of the derived key in bytes. Defaults to 32.
|
||||||
|
"""
|
||||||
|
|
||||||
|
hkdf = HKDF(
|
||||||
|
algorithm=hashes.SHA256(),
|
||||||
|
length=length,
|
||||||
|
salt=None,
|
||||||
|
info=info,
|
||||||
|
backend=default_backend(),
|
||||||
|
)
|
||||||
|
return hkdf.derive(root)
|
@@ -33,6 +33,12 @@ logger = logging.getLogger(__name__)
|
|||||||
DEFAULT_MAX_ATTEMPTS = 5
|
DEFAULT_MAX_ATTEMPTS = 5
|
||||||
|
|
||||||
|
|
||||||
|
def _env_password() -> str | None:
|
||||||
|
"""Return a password supplied via environment for non-interactive use."""
|
||||||
|
|
||||||
|
return os.getenv("SEEDPASS_TEST_PASSWORD") or os.getenv("SEEDPASS_PASSWORD")
|
||||||
|
|
||||||
|
|
||||||
def _get_max_attempts(override: int | None = None) -> int:
|
def _get_max_attempts(override: int | None = None) -> int:
|
||||||
"""Return the configured maximum number of prompt attempts."""
|
"""Return the configured maximum number of prompt attempts."""
|
||||||
|
|
||||||
@@ -80,6 +86,13 @@ def prompt_new_password(max_retries: int | None = None) -> str:
|
|||||||
Raises:
|
Raises:
|
||||||
PasswordPromptError: If the user fails to provide a valid password after multiple attempts.
|
PasswordPromptError: If the user fails to provide a valid password after multiple attempts.
|
||||||
"""
|
"""
|
||||||
|
env_pw = _env_password()
|
||||||
|
if env_pw:
|
||||||
|
normalized = unicodedata.normalize("NFKD", env_pw)
|
||||||
|
if len(normalized) < MIN_PASSWORD_LENGTH:
|
||||||
|
raise PasswordPromptError("Environment password too short")
|
||||||
|
return normalized
|
||||||
|
|
||||||
max_retries = _get_max_attempts(max_retries)
|
max_retries = _get_max_attempts(max_retries)
|
||||||
attempts = 0
|
attempts = 0
|
||||||
|
|
||||||
@@ -164,6 +177,10 @@ def prompt_existing_password(
|
|||||||
PasswordPromptError: If the user interrupts the operation or exceeds
|
PasswordPromptError: If the user interrupts the operation or exceeds
|
||||||
``max_retries`` attempts.
|
``max_retries`` attempts.
|
||||||
"""
|
"""
|
||||||
|
env_pw = _env_password()
|
||||||
|
if env_pw:
|
||||||
|
return unicodedata.normalize("NFKD", env_pw)
|
||||||
|
|
||||||
max_retries = _get_max_attempts(max_retries)
|
max_retries = _get_max_attempts(max_retries)
|
||||||
attempts = 0
|
attempts = 0
|
||||||
while max_retries == 0 or attempts < max_retries:
|
while max_retries == 0 or attempts < max_retries:
|
||||||
|
@@ -102,9 +102,11 @@ def _masked_input_posix(prompt: str) -> str:
|
|||||||
|
|
||||||
def masked_input(prompt: str) -> str:
|
def masked_input(prompt: str) -> str:
|
||||||
"""Return input from the user while masking typed characters."""
|
"""Return input from the user while masking typed characters."""
|
||||||
if sys.platform == "win32":
|
func = _masked_input_windows if sys.platform == "win32" else _masked_input_posix
|
||||||
return _masked_input_windows(prompt)
|
try:
|
||||||
return _masked_input_posix(prompt)
|
return func(prompt)
|
||||||
|
except Exception: # pragma: no cover - fallback when TTY operations fail
|
||||||
|
return input(prompt)
|
||||||
|
|
||||||
|
|
||||||
def prompt_seed_words(count: int = 12, *, max_attempts: int | None = None) -> str:
|
def prompt_seed_words(count: int = 12, *, max_attempts: int | None = None) -> str:
|
||||||
|
@@ -9,10 +9,10 @@ class SlowBIP85:
|
|||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.calls = 0
|
self.calls = 0
|
||||||
|
|
||||||
def derive_entropy(self, index: int, bytes_len: int, app_no: int = 39) -> bytes:
|
def derive_entropy(self, index: int, entropy_bytes: int, app_no: int = 39) -> bytes:
|
||||||
self.calls += 1
|
self.calls += 1
|
||||||
time.sleep(0.01)
|
time.sleep(0.01)
|
||||||
return b"\x00" * bytes_len
|
return b"\x00" * entropy_bytes
|
||||||
|
|
||||||
|
|
||||||
def _setup_manager(bip85: SlowBIP85) -> PasswordManager:
|
def _setup_manager(bip85: SlowBIP85) -> PasswordManager:
|
||||||
@@ -21,10 +21,12 @@ def _setup_manager(bip85: SlowBIP85) -> PasswordManager:
|
|||||||
pm.bip85 = bip85
|
pm.bip85 = bip85
|
||||||
orig = bip85.derive_entropy
|
orig = bip85.derive_entropy
|
||||||
|
|
||||||
def cached(index: int, bytes_len: int, app_no: int = 39) -> bytes:
|
def cached(index: int, entropy_bytes: int, app_no: int = 39) -> bytes:
|
||||||
key = (app_no, index)
|
key = (app_no, index)
|
||||||
if key not in pm._bip85_cache:
|
if key not in pm._bip85_cache:
|
||||||
pm._bip85_cache[key] = orig(index=index, bytes_len=bytes_len, app_no=app_no)
|
pm._bip85_cache[key] = orig(
|
||||||
|
index=index, entropy_bytes=entropy_bytes, app_no=app_no
|
||||||
|
)
|
||||||
return pm._bip85_cache[key]
|
return pm._bip85_cache[key]
|
||||||
|
|
||||||
bip85.derive_entropy = cached
|
bip85.derive_entropy = cached
|
||||||
@@ -44,7 +46,7 @@ def test_bip85_cache_benchmark():
|
|||||||
for _ in range(3):
|
for _ in range(3):
|
||||||
pm.get_bip85_entropy(32, 1)
|
pm.get_bip85_entropy(32, 1)
|
||||||
cached_time = time.perf_counter() - start
|
cached_time = time.perf_counter() - start
|
||||||
|
# Ensure caching avoids redundant derive calls without relying on
|
||||||
assert cached_time < uncached_time
|
# potentially flaky timing comparisons across platforms.
|
||||||
assert slow_uncached.calls == 3
|
assert slow_uncached.calls == 3
|
||||||
assert slow_cached.calls == 1
|
assert slow_cached.calls == 1
|
||||||
|
Reference in New Issue
Block a user