mirror of
https://github.com/PR0M3TH3AN/SeedPass.git
synced 2025-09-08 07:18:47 +00:00
Compare commits
252 Commits
main
...
15df3f10a6
Author | SHA1 | Date | |
---|---|---|---|
![]() |
15df3f10a6 | ||
![]() |
b451097c65 | ||
![]() |
9cacd1b13d | ||
![]() |
b97d60778b | ||
![]() |
bbb26ca55a | ||
![]() |
d6e03d5e7a | ||
![]() |
26632c0e70 | ||
![]() |
06ca51993a | ||
![]() |
1b6b0ab5c5 | ||
![]() |
87999b1888 | ||
![]() |
6928b4ddbf | ||
![]() |
73183d53a5 | ||
![]() |
c9ad16f150 | ||
![]() |
bd86bdbb3a | ||
![]() |
8d5374ef5b | ||
![]() |
468608a369 | ||
![]() |
56e652089a | ||
![]() |
c353c04472 | ||
![]() |
2559920a14 | ||
![]() |
57935bdfc1 | ||
![]() |
55fdee522c | ||
![]() |
af4eb72385 | ||
![]() |
90c304ff6e | ||
![]() |
7b1ef2abe2 | ||
![]() |
5194adf145 | ||
![]() |
8f74ac27f4 | ||
![]() |
1232630dba | ||
![]() |
62983df69c | ||
![]() |
b4238791aa | ||
![]() |
d1fccbc4f2 | ||
![]() |
50532597b8 | ||
![]() |
bb733bb194 | ||
![]() |
785acf938c | ||
![]() |
4973095a5c | ||
![]() |
69f1619816 | ||
![]() |
e1b821bc55 | ||
![]() |
a21efa91db | ||
![]() |
5109f96ce7 | ||
![]() |
19577163cf | ||
![]() |
b0e4ab9bc6 | ||
![]() |
3ff3e4e1d6 | ||
![]() |
08c4453326 | ||
![]() |
fddc169433 | ||
![]() |
28f552313f | ||
![]() |
294eef9725 | ||
![]() |
5d9281156b | ||
![]() |
c4297731b9 | ||
![]() |
f4df398738 | ||
![]() |
b0a2f17cc8 | ||
![]() |
b9525db9ae | ||
![]() |
199d02ab72 | ||
![]() |
c1a018e484 | ||
![]() |
4a537b7063 | ||
![]() |
d7ff7b2354 | ||
![]() |
f57bfcd7fa | ||
![]() |
d390bf8620 | ||
![]() |
fdd2530635 | ||
![]() |
fd9d3fa51b | ||
![]() |
a5f719363e | ||
![]() |
4c4394b026 | ||
![]() |
75df28dd60 | ||
![]() |
ccdc442bb8 | ||
![]() |
d492f76116 | ||
![]() |
064292df01 | ||
![]() |
1d91044e59 | ||
![]() |
c95a69e562 | ||
![]() |
593b173e95 | ||
![]() |
3ef0446e26 | ||
![]() |
b33dc5148d | ||
![]() |
344c2c82e7 | ||
![]() |
d98a158c83 | ||
![]() |
8737905e93 | ||
![]() |
d2c00eb0d6 | ||
![]() |
171f92167e | ||
![]() |
bbfe0c50a9 | ||
![]() |
61e3c2accc | ||
![]() |
00edb44442 | ||
![]() |
ba3c57ceb8 | ||
![]() |
28382cc649 | ||
![]() |
38a392a7c9 | ||
![]() |
8c9e325c76 | ||
![]() |
d77fb142a0 | ||
![]() |
bfc181c32b | ||
![]() |
26a4a74131 | ||
![]() |
5fe1b651a8 | ||
![]() |
051454ff2e | ||
![]() |
87f1e35487 | ||
![]() |
45304b41c2 | ||
![]() |
b15c0c17b7 | ||
![]() |
173a697b88 | ||
![]() |
5cdb4ecac5 | ||
![]() |
d2b8b6cb65 | ||
![]() |
7521014a61 | ||
![]() |
0e0ea183c8 | ||
![]() |
48e0632771 | ||
![]() |
d92385eff9 | ||
![]() |
92142a3e1b | ||
![]() |
ed7763195e | ||
![]() |
8079cd05b9 | ||
![]() |
979ba6f678 | ||
![]() |
af53e7f12c | ||
![]() |
54314cc5b3 | ||
![]() |
fd419fb943 | ||
![]() |
f571ded60c | ||
![]() |
b3b703985d | ||
![]() |
363b54b656 | ||
![]() |
1a35bb42bd | ||
![]() |
8cc2e75741 | ||
![]() |
edcf2787ee | ||
![]() |
072db52650 | ||
![]() |
a864da5751 | ||
![]() |
37a1d4b4cf | ||
![]() |
f0a7fb7da1 | ||
![]() |
b49e37b6e1 | ||
![]() |
94d0b80dce | ||
![]() |
4f11db5aa4 | ||
![]() |
099c24921f | ||
![]() |
7725701b50 | ||
![]() |
b795d1236a | ||
![]() |
6888fa2431 | ||
![]() |
1870614d8a | ||
![]() |
34f19e1b2b | ||
![]() |
41848fbcc3 | ||
![]() |
2aae6db22d | ||
![]() |
f36c12122e | ||
![]() |
68eaa34d76 | ||
![]() |
c2d80aa438 | ||
![]() |
87cf2d837b | ||
![]() |
ade2d99572 | ||
![]() |
91bea60928 | ||
![]() |
dc7673c7e0 | ||
![]() |
726a8f7aa4 | ||
![]() |
181f486afb | ||
![]() |
5e8375aad5 | ||
![]() |
20ee8a891b | ||
![]() |
fa4826fe2d | ||
![]() |
90b60a6682 | ||
![]() |
3744cf9f30 | ||
![]() |
2949cc22c9 | ||
![]() |
9c5e6a12a0 | ||
![]() |
89cbef1aa4 | ||
![]() |
d21ad78a02 | ||
![]() |
6260e81eaa | ||
![]() |
a78d587307 | ||
![]() |
19881dbeeb | ||
![]() |
224143eb76 | ||
![]() |
1f669746db | ||
![]() |
0d883b2736 | ||
![]() |
6a20728db4 | ||
![]() |
8e703e3282 | ||
![]() |
9cc7e4d0d7 | ||
![]() |
036e2e59be | ||
![]() |
3823603712 | ||
![]() |
f16a771a6c | ||
![]() |
1a194aec04 | ||
![]() |
f70f70e749 | ||
![]() |
4d7f28b400 | ||
![]() |
054ffd3383 | ||
![]() |
2b22fd7d5e | ||
![]() |
9cfd40ce7b | ||
![]() |
fdfdbc883b | ||
![]() |
264caff711 | ||
![]() |
b03530afba | ||
![]() |
8b8416c09f | ||
![]() |
9d71db0cf2 | ||
![]() |
68d8e03927 | ||
![]() |
0dda7ebe5b | ||
![]() |
9dbe22d332 | ||
![]() |
6d110679c5 | ||
![]() |
30da26f086 | ||
![]() |
d58c836fe6 | ||
![]() |
c64ca912b8 | ||
![]() |
f8f43dc2b5 | ||
![]() |
b40a7416ab | ||
![]() |
b5024d99de | ||
![]() |
aeee3b91d9 | ||
![]() |
292b443158 | ||
![]() |
7fc098e8f2 | ||
![]() |
42f9f0c4bb | ||
![]() |
bc8307f611 | ||
![]() |
bf129e5dca | ||
![]() |
2b959aa33f | ||
![]() |
cc077a9762 | ||
![]() |
d7a39c88d3 | ||
![]() |
1ca84ba946 | ||
![]() |
738667ca2d | ||
![]() |
6fa9f0839e | ||
![]() |
2f95944318 | ||
![]() |
6a31ec7e99 | ||
![]() |
f03a890776 | ||
![]() |
942cb1d89a | ||
![]() |
e655369eee | ||
![]() |
1301b79279 | ||
![]() |
e5ebfdcad4 | ||
![]() |
8e78a72257 | ||
![]() |
041e40bc1b | ||
![]() |
49675211e4 | ||
![]() |
30261094d2 | ||
![]() |
911fd6705d | ||
![]() |
7bb67030cb | ||
![]() |
8568e38d36 | ||
![]() |
675adfb84b | ||
![]() |
f0f7aee9e6 | ||
![]() |
aa688bc49a | ||
![]() |
77c4c33818 | ||
![]() |
d868d2204b | ||
![]() |
3a19ef9c2a | ||
![]() |
68341db0fe | ||
![]() |
3dc10ae448 | ||
![]() |
23a3ae3928 | ||
![]() |
f664a6c40f | ||
![]() |
44ce005cdc | ||
![]() |
01fe849f90 | ||
![]() |
d75cc760d3 | ||
![]() |
42aa945b00 | ||
![]() |
a9c5deb800 | ||
![]() |
b72452a734 | ||
![]() |
2c44f51fc4 | ||
![]() |
59c06041fd | ||
![]() |
b0db9806b3 | ||
![]() |
6f885bd65e | ||
![]() |
c3ed4c08ee | ||
![]() |
68f47052c3 | ||
![]() |
a16310b04b | ||
![]() |
1e544a7d41 | ||
![]() |
cb37783354 | ||
![]() |
5423c41b06 | ||
![]() |
2794b67d83 | ||
![]() |
aad41929bf | ||
![]() |
4f09ad5c26 | ||
![]() |
3cdf391742 | ||
![]() |
032caed3d0 | ||
![]() |
2294656f36 | ||
![]() |
9d9f8a8bae | ||
![]() |
9d80f7b607 | ||
![]() |
e5f1158101 | ||
![]() |
c7df96aac5 | ||
![]() |
5acd1d489d | ||
![]() |
f66e8b4776 | ||
![]() |
10a03384d0 | ||
![]() |
7631d32bc6 | ||
![]() |
6dabbaa31e | ||
![]() |
4228d82295 | ||
![]() |
ccca399b09 | ||
![]() |
36061493ac | ||
![]() |
f1bf65385c | ||
![]() |
906e3921a2 | ||
![]() |
7aeba78245 | ||
![]() |
087b3bd657 | ||
![]() |
186e39cc91 | ||
![]() |
8c9fe07609 | ||
![]() |
2f0eb44a44 |
10
.github/dependabot.yml
vendored
Normal file
10
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
6
.github/workflows/briefcase.yml
vendored
6
.github/workflows/briefcase.yml
vendored
@@ -16,8 +16,10 @@ jobs:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r src/requirements.txt
|
||||
pip install briefcase
|
||||
pip install pip-tools briefcase
|
||||
pip-compile --generate-hashes --output-file=requirements.lock src/requirements.txt
|
||||
git diff --exit-code requirements.lock
|
||||
pip install --require-hashes -r requirements.lock
|
||||
- name: Build with Briefcase
|
||||
run: briefcase build
|
||||
- name: Upload artifacts
|
||||
|
27
.github/workflows/dependency-audit.yml
vendored
Normal file
27
.github/workflows/dependency-audit.yml
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
name: Dependency Audit
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 0 * * 0'
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
audit:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install pip-tools pip-audit
|
||||
pip-compile --generate-hashes --output-file=requirements.lock src/requirements.txt
|
||||
git diff --exit-code requirements.lock
|
||||
pip install --require-hashes -r requirements.lock
|
||||
- name: Run pip-audit
|
||||
run: pip-audit -r requirements.lock --ignore-vuln GHSA-wj6h-64fc-37mp
|
30
.github/workflows/python-ci.yml
vendored
30
.github/workflows/python-ci.yml
vendored
@@ -9,6 +9,20 @@ on:
|
||||
- cron: '0 3 * * *'
|
||||
|
||||
jobs:
|
||||
secret-scan:
|
||||
name: Secret Scan
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'pull_request' || github.event_name == 'schedule'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Run gitleaks
|
||||
uses: gitleaks/gitleaks-action@v2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITLEAKS_CONFIG: .gitleaks.toml
|
||||
|
||||
build:
|
||||
strategy:
|
||||
matrix:
|
||||
@@ -59,18 +73,18 @@ jobs:
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('src/requirements.txt') }}
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('requirements.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pip-
|
||||
- name: Set up Python dependencies
|
||||
id: deps
|
||||
- name: Verify lockfile and install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r src/requirements.txt
|
||||
- name: Run pip-audit
|
||||
run: |
|
||||
pip install pip-audit
|
||||
pip-audit -r requirements.lock --ignore-vuln GHSA-wj6h-64fc-37mp
|
||||
pip install pip-tools
|
||||
pip-compile --generate-hashes --output-file=requirements.lock src/requirements.txt
|
||||
git diff --exit-code requirements.lock
|
||||
pip install --require-hashes -r requirements.lock
|
||||
- name: Run dependency scan
|
||||
run: scripts/dependency_scan.sh --ignore-vuln GHSA-wj6h-64fc-37mp
|
||||
- name: Determine stress args
|
||||
shell: bash
|
||||
run: |
|
||||
|
40
.github/workflows/tests.yml
vendored
Normal file
40
.github/workflows/tests.yml
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
name: Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["**"]
|
||||
pull_request:
|
||||
branches: ["**"]
|
||||
|
||||
jobs:
|
||||
test:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
python-version: ["3.10", "3.11", "3.12"]
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Poetry
|
||||
run: pipx install poetry
|
||||
- name: Install dependencies
|
||||
run: poetry install
|
||||
- name: Check formatting
|
||||
run: poetry run black --check .
|
||||
- name: Run security audit
|
||||
run: |
|
||||
poetry run pip-audit || echo "::warning::pip-audit found vulnerabilities"
|
||||
shell: bash
|
||||
- name: Run tests with coverage
|
||||
run: |
|
||||
poetry run coverage run -m pytest
|
||||
poetry run coverage xml
|
||||
- name: Upload coverage report
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: coverage-${{ matrix.os }}-py${{ matrix.python-version }}
|
||||
path: coverage.xml
|
||||
|
8
.gitleaks.toml
Normal file
8
.gitleaks.toml
Normal file
@@ -0,0 +1,8 @@
|
||||
title = "SeedPass gitleaks config"
|
||||
|
||||
[allowlist]
|
||||
description = "Paths and patterns to ignore when scanning for secrets"
|
||||
# Add file paths that contain test data or other non-sensitive strings
|
||||
paths = []
|
||||
# Add regular expressions that match false positive secrets
|
||||
regexes = []
|
15
AGENTS.md
15
AGENTS.md
@@ -9,7 +9,7 @@ This project is written in **Python**. Follow these instructions when working wi
|
||||
```bash
|
||||
python3 -m venv venv
|
||||
source venv/bin/activate
|
||||
pip install -r src/requirements.txt
|
||||
pip install --require-hashes -r requirements.lock
|
||||
```
|
||||
|
||||
2. Run the test suite using **pytest**:
|
||||
@@ -39,6 +39,19 @@ This project is written in **Python**. Follow these instructions when working wi
|
||||
|
||||
Following these practices helps keep the code base consistent and secure.
|
||||
|
||||
## Deterministic Artifact Generation
|
||||
|
||||
- All generated artifacts (passwords, keys, TOTP secrets, etc.) must be fully deterministic across runs and platforms.
|
||||
- Randomness is only permitted for security primitives (e.g., encryption nonces, in-memory keys) and must never influence derived artifacts.
|
||||
|
||||
## Legacy Index Migration
|
||||
|
||||
- Always provide a migration path for index archives and import/export routines.
|
||||
- Support older SeedPass versions whose indexes lacked salts or password-based encryption by detecting legacy formats and upgrading them to the current schema.
|
||||
- Ensure migrations unlock older account indexes and allow Nostr synchronization.
|
||||
- Add regression tests covering these migrations whenever the index format or encryption changes.
|
||||
|
||||
|
||||
## Integrating New Entry Types
|
||||
|
||||
SeedPass supports multiple `kind` values in its JSON entry files. When adding a
|
||||
|
189
README.md
189
README.md
@@ -12,6 +12,10 @@
|
||||
|
||||
This software was not developed by an experienced security expert and should be used with caution. There may be bugs and missing features. Each vault chunk is limited to 50 KB and SeedPass periodically publishes a new snapshot to keep accumulated deltas small. The security of the program's memory management and logs has not been evaluated and may leak sensitive information. Loss or exposure of the parent seed places all derived passwords, accounts, and other artifacts at risk.
|
||||
|
||||
**🚨 Breaking Change**
|
||||
|
||||
Recent releases derive passwords and other artifacts using a fully deterministic algorithm that behaves consistently across Python versions. This improvement means artifacts generated with earlier versions of SeedPass will not match those produced now. Regenerate any previously derived data or retain the old version if you need to reproduce older passwords or keys.
|
||||
|
||||
---
|
||||
### Supported OS
|
||||
|
||||
@@ -28,6 +32,7 @@ SeedPass now uses the `portalocker` library for cross-platform file locking. No
|
||||
- [2. Create a Virtual Environment](#2-create-a-virtual-environment)
|
||||
- [3. Activate the Virtual Environment](#3-activate-the-virtual-environment)
|
||||
- [4. Install Dependencies](#4-install-dependencies)
|
||||
- [Optional GUI](#optional-gui)
|
||||
- [Usage](#usage)
|
||||
- [Running the Application](#running-the-application)
|
||||
- [Managing Multiple Seeds](#managing-multiple-seeds)
|
||||
@@ -36,6 +41,7 @@ SeedPass now uses the `portalocker` library for cross-platform file locking. No
|
||||
- [Building a standalone executable](#building-a-standalone-executable)
|
||||
- [Packaging with Briefcase](#packaging-with-briefcase)
|
||||
- [Security Considerations](#security-considerations)
|
||||
- [Dependency Updates](#dependency-updates)
|
||||
- [Contributing](#contributing)
|
||||
- [License](#license)
|
||||
- [Contact](#contact)
|
||||
@@ -44,7 +50,7 @@ SeedPass now uses the `portalocker` library for cross-platform file locking. No
|
||||
|
||||
- **Deterministic Password Generation:** Utilize BIP-85 for generating deterministic and secure passwords.
|
||||
- **Encrypted Storage:** All seeds, login passwords, and sensitive index data are encrypted locally.
|
||||
- **Nostr Integration:** Post and retrieve your encrypted password index to/from the Nostr network.
|
||||
- **Nostr Integration:** Post and retrieve your encrypted password index to/from the Nostr network. See [Nostr Setup](docs/nostr_setup.md) for relay configuration and event details.
|
||||
- **Chunked Snapshots:** Encrypted vaults are compressed and split into 50 KB chunks published as `kind 30071` events with a `kind 30070` manifest and `kind 30072` deltas. The manifest's `delta_since` field stores the UNIX timestamp of the latest delta event.
|
||||
- **Automatic Checksum Generation:** The script generates and verifies a SHA-256 checksum to detect tampering.
|
||||
- **Multiple Seed Profiles:** Manage separate seed profiles and switch between them seamlessly.
|
||||
@@ -77,32 +83,31 @@ before fading.
|
||||
|
||||
SeedPass follows a layered design. The **`seedpass.core`** package exposes the
|
||||
`PasswordManager` along with service classes (e.g. `VaultService` and
|
||||
`EntryService`) that implement the main API used across interfaces.
|
||||
The command line tool in **`seedpass.cli`** is a thin adapter built with Typer
|
||||
that delegates operations to this API layer.
|
||||
`EntryService`) that implement the main API used across interfaces. Both the
|
||||
command line tool in **`seedpass.cli`** and the FastAPI server in
|
||||
**`seedpass.api`** delegate operations to this core. The BeeWare desktop
|
||||
interface (`seedpass_gui.app`) and an optional browser extension reuse these
|
||||
services, with the extension communicating through the API layer.
|
||||
|
||||
The BeeWare desktop interface lives in **`seedpass_gui.app`** and can be
|
||||
started with either `seedpass-gui` or `python -m seedpass_gui`. It reuses the
|
||||
same service objects to unlock the vault, list entries and search through them.
|
||||
|
||||
An optional browser extension can communicate with the FastAPI server exposed by
|
||||
`seedpass.api` to manage entries from within the browser.
|
||||
Nostr synchronisation lives in the **`nostr`** modules. The core services call
|
||||
into these modules to publish or retrieve encrypted snapshots and deltas from
|
||||
configured relays.
|
||||
|
||||
```mermaid
|
||||
graph TD
|
||||
core["seedpass.core"]
|
||||
cli["CLI"]
|
||||
api["FastAPI server"]
|
||||
gui["BeeWare GUI"]
|
||||
ext["Browser Extension"]
|
||||
core["seedpass.core"]
|
||||
nostr["Nostr client"]
|
||||
relays["Nostr relays"]
|
||||
|
||||
cli --> core
|
||||
gui --> core
|
||||
api --> core
|
||||
ext --> api
|
||||
core --> nostr
|
||||
nostr --> relays
|
||||
```
|
||||
|
||||
See `docs/ARCHITECTURE.md` for details.
|
||||
See `docs/ARCHITECTURE.md` and [Nostr Setup](docs/nostr_setup.md) for details.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
@@ -114,7 +119,7 @@ See `docs/ARCHITECTURE.md` for details.
|
||||
### Quick Installer
|
||||
|
||||
Use the automated installer to download SeedPass and its dependencies in one step.
|
||||
The scripts also install the correct BeeWare backend for your platform automatically.
|
||||
The scripts can also install the BeeWare backend for your platform when requested (use `-IncludeGui` on Windows).
|
||||
If the GTK `gi` bindings are missing, the installer attempts to install the
|
||||
necessary system packages using `apt`, `yum`, `pacman`, or Homebrew.
|
||||
|
||||
@@ -132,11 +137,19 @@ Make sure the command ends right after `-b beta` with **no trailing parenthesis*
|
||||
```powershell
|
||||
Set-ExecutionPolicy Bypass -Scope Process -Force; [System.Net.ServicePointManager]::SecurityProtocol = [System.Net.ServicePointManager]::SecurityProtocol -bor 3072; $scriptContent = (New-Object System.Net.WebClient).DownloadString('https://raw.githubusercontent.com/PR0M3TH3AN/SeedPass/main/scripts/install.ps1'); & ([scriptblock]::create($scriptContent))
|
||||
```
|
||||
*Install with the optional GUI:*
|
||||
```powershell
|
||||
Set-ExecutionPolicy Bypass -Scope Process -Force; [System.Net.ServicePointManager]::SecurityProtocol = [System.Net.ServicePointManager]::SecurityProtocol -bor 3072; $scriptContent = (New-Object System.Net.WebClient).DownloadString('https://raw.githubusercontent.com/PR0M3TH3AN/SeedPass/main/scripts/install.ps1'); & ([scriptblock]::create($scriptContent)) -IncludeGui
|
||||
```
|
||||
Before running the script, install **Python 3.11** or **3.12** from [python.org](https://www.python.org/downloads/windows/) and tick **"Add Python to PATH"**. You should also install the [Visual Studio Build Tools](https://visualstudio.microsoft.com/visual-cpp-build-tools/) with the **C++ build tools** workload so dependencies compile correctly.
|
||||
The Windows installer will attempt to install Git automatically if it is not already available. It also tries to install Python 3 using `winget`, `choco`, or `scoop` when Python is missing and recognizes the `py` launcher if `python` isn't on your PATH. If these tools are unavailable you'll see a link to download Python directly from <https://www.python.org/downloads/windows/>. When Python 3.13 or newer is detected without the Microsoft C++ build tools, the installer now attempts to download Python 3.12 automatically so you don't have to compile packages from source.
|
||||
|
||||
**Note:** If this fallback fails, install Python 3.12 manually or install the [Microsoft Visual C++ Build Tools](https://visualstudio.microsoft.com/visual-cpp-build-tools/) and rerun the installer.
|
||||
|
||||
#### Installer Dependency Checks
|
||||
|
||||
The installer verifies that core build tooling—C/C++ build tools, Rust, CMake, and the imaging/GTK libraries—are available before completing. Pass `--no-gui` to skip installing GUI packages. On Linux, ensure `xclip` or `wl-clipboard` is installed for clipboard support.
|
||||
|
||||
#### Windows Nostr Sync Troubleshooting
|
||||
|
||||
When backing up or restoring from Nostr on Windows, a few issues are common:
|
||||
@@ -197,7 +210,7 @@ Follow these steps to set up SeedPass on your local machine.
|
||||
|
||||
```bash
|
||||
python -m pip install --upgrade pip
|
||||
python -m pip install -r src/requirements.txt
|
||||
python -m pip install --require-hashes -r requirements.lock
|
||||
python -m pip install -e .
|
||||
```
|
||||
// 🔧 merged conflicting changes from codex/locate-command-usage-issue-in-seedpass vs beta
|
||||
@@ -205,10 +218,48 @@ After reinstalling, run `which seedpass` on Linux/macOS or `where seedpass` on W
|
||||
|
||||
#### Linux Clipboard Support
|
||||
|
||||
On Linux, `pyperclip` relies on external utilities like `xclip` or `xsel`. SeedPass will attempt to install **xclip** automatically if neither tool is available. If the automatic installation fails, you can install it manually:
|
||||
On Linux, `pyperclip` relies on external utilities like `xclip` or `xsel`. SeedPass no longer installs these tools automatically. To enable clipboard features such as secret mode, install **xclip** manually:
|
||||
|
||||
```bash
|
||||
sudo apt-get install xclip
|
||||
sudo apt install xclip
|
||||
```
|
||||
|
||||
After installing `xclip`, restart SeedPass to enable clipboard support.
|
||||
|
||||
### Optional GUI
|
||||
|
||||
SeedPass ships with a GTK-based desktop interface that is still in development
|
||||
and not currently functional. Install the packages for your platform before
|
||||
adding the Python GUI dependencies.
|
||||
|
||||
- **Debian/Ubuntu**
|
||||
```bash
|
||||
sudo apt install libgirepository1.0-dev libcairo2-dev libpango1.0-dev libwebkit2gtk-4.0-dev
|
||||
```
|
||||
- **Fedora**
|
||||
```bash
|
||||
sudo dnf install gobject-introspection-devel cairo-devel pango-devel webkit2gtk4.0-devel
|
||||
```
|
||||
- **Arch Linux**
|
||||
```bash
|
||||
sudo pacman -S gobject-introspection cairo pango webkit2gtk
|
||||
```
|
||||
- **macOS (Homebrew)**
|
||||
```bash
|
||||
brew install pygobject3 gtk+3 adwaita-icon-theme librsvg webkitgtk
|
||||
```
|
||||
|
||||
With the system requirements in place, install the Python GUI extras:
|
||||
|
||||
```bash
|
||||
pip install .[gui]
|
||||
```
|
||||
|
||||
CLI-only users can skip these steps and install just the core package for a
|
||||
lightweight setup:
|
||||
|
||||
```bash
|
||||
pip install .
|
||||
```
|
||||
|
||||
## Quick Start
|
||||
@@ -227,10 +278,10 @@ You can then launch SeedPass and create a backup:
|
||||
seedpass
|
||||
|
||||
# Export your index
|
||||
seedpass export --file "~/seedpass_backup.json"
|
||||
seedpass vault export --file "~/seedpass_backup.json"
|
||||
|
||||
# Later you can restore it
|
||||
seedpass import --file "~/seedpass_backup.json"
|
||||
seedpass vault import --file "~/seedpass_backup.json"
|
||||
|
||||
# Quickly find or retrieve entries
|
||||
seedpass search "github"
|
||||
@@ -264,10 +315,17 @@ python -m seedpass_gui
|
||||
seedpass-gui
|
||||
```
|
||||
|
||||
Only `toga-core` and the headless `toga-dummy` backend are included by default.
|
||||
The quick installer automatically installs the correct BeeWare backend so the
|
||||
GUI works out of the box. If you set up SeedPass manually, install the backend
|
||||
for your platform:
|
||||
GUI dependencies are optional. Install them alongside SeedPass with:
|
||||
|
||||
```bash
|
||||
pip install "seedpass[gui]"
|
||||
|
||||
# or when working from a local checkout
|
||||
pip install -e .[gui]
|
||||
```
|
||||
|
||||
After installing the optional GUI extras, add the BeeWare backend for your
|
||||
platform:
|
||||
|
||||
```bash
|
||||
# Linux
|
||||
@@ -377,6 +435,16 @@ For a full list of commands see [docs/advanced_cli.md](docs/advanced_cli.md). Th
|
||||
```
|
||||
*(or `python src/main.py` when running directly from the repository)*
|
||||
|
||||
To restore a previously backed up index at launch, provide the backup path
|
||||
and fingerprint:
|
||||
|
||||
```bash
|
||||
seedpass --restore-backup /path/to/backup.json.enc --fingerprint <fp>
|
||||
```
|
||||
|
||||
Without the flag, the startup prompt offers a **Restore from backup** option
|
||||
before the vault is initialized.
|
||||
|
||||
2. **Follow the Prompts:**
|
||||
|
||||
- **Seed Profile Selection:** If you have existing seed profiles, you'll be prompted to select one or add a new one.
|
||||
@@ -558,21 +626,28 @@ The default configuration uses **50,000** PBKDF2 iterations. Increase this value
|
||||
### Recovery
|
||||
|
||||
If you previously backed up your vault to Nostr you can restore it during the
|
||||
initial setup:
|
||||
initial setup. You must provide both your 12‑word master seed and the master
|
||||
password that encrypted the vault; without the correct password the retrieved
|
||||
data cannot be decrypted.
|
||||
|
||||
Alternatively, a local backup file can be loaded at startup. Launch the
|
||||
application with `--restore-backup <file> --fingerprint <fp>` or choose the
|
||||
**Restore from backup** option presented before the vault initializes.
|
||||
|
||||
1. Start SeedPass and choose option **4** when prompted to set up a seed.
|
||||
2. Paste your BIP-85 seed phrase when asked.
|
||||
3. SeedPass initializes the profile and attempts to download the encrypted vault
|
||||
from the configured relays.
|
||||
4. A success message confirms the vault was restored. If no data is found a
|
||||
2. Paste your BIP‑85 seed phrase when asked.
|
||||
3. Enter the master password associated with that seed.
|
||||
4. SeedPass initializes the profile and attempts to download the encrypted
|
||||
vault from the configured relays.
|
||||
5. A success message confirms the vault was restored. If no data is found a
|
||||
failure message is shown and a new empty vault is created.
|
||||
|
||||
## Running Tests
|
||||
|
||||
SeedPass includes a small suite of unit tests located under `src/tests`. **Before running `pytest`, be sure to install the test requirements.** Activate your virtual environment and run `pip install -r src/requirements.txt` to ensure all testing dependencies are available. Then run the tests with **pytest**. Use `-vv` to see INFO-level log messages from each passing test:
|
||||
SeedPass includes a small suite of unit tests located under `src/tests`. **Before running `pytest`, be sure to install the test requirements.** Activate your virtual environment and run `pip install --require-hashes -r requirements.lock` to ensure all testing dependencies are available. Then run the tests with **pytest**. Use `-vv` to see INFO-level log messages from each passing test:
|
||||
|
||||
```bash
|
||||
pip install -r src/requirements.txt
|
||||
pip install --require-hashes -r requirements.lock
|
||||
pytest -vv
|
||||
```
|
||||
|
||||
@@ -636,7 +711,7 @@ Mutation testing is disabled in the GitHub workflow due to reliability issues an
|
||||
|
||||
1. Install all development dependencies:
|
||||
```bash
|
||||
pip install -r src/requirements.txt
|
||||
pip install --require-hashes -r requirements.lock
|
||||
```
|
||||
|
||||
2. When `src/runtime_requirements.txt` changes, rerun:
|
||||
@@ -706,7 +781,7 @@ You can also launch the GUI directly with `seedpass gui` or `seedpass-gui`.
|
||||
- **Backup Your Data:** Regularly back up your encrypted data and checksum files to prevent data loss.
|
||||
- **Backup the Settings PIN:** Your settings PIN is stored in the encrypted configuration file. Keep a copy of this file or remember the PIN, as losing it will require deleting the file and reconfiguring your relays.
|
||||
- **Protect Your Passwords:** Do not share your master password or seed phrases with anyone and ensure they are strong and unique.
|
||||
- **Revealing the Parent Seed:** The `vault reveal-parent-seed` command and `/api/v1/parent-seed` endpoint print your seed in plain text. Run them only in a secure environment.
|
||||
- **Backing Up the Parent Seed:** Use the CLI `vault reveal-parent-seed` command or the `/api/v1/vault/backup-parent-seed` endpoint with explicit confirmation to create an encrypted backup. The API does not return the seed directly.
|
||||
- **No PBKDF2 Salt Needed:** SeedPass deliberately omits an explicit PBKDF2 salt. Every password is derived from a unique 512-bit BIP-85 child seed, which already provides stronger per-password uniqueness than a conventional 128-bit salt.
|
||||
- **Checksum Verification:** Always verify the script's checksum to ensure its integrity and protect against unauthorized modifications.
|
||||
- **Potential Bugs and Limitations:** Be aware that the software may contain bugs and lacks certain features. Snapshot chunks are capped at 50 KB and the client rotates snapshots after enough delta events accumulate. The security of memory management and logs has not been thoroughly evaluated and may pose risks of leaking sensitive information.
|
||||
@@ -716,6 +791,50 @@ You can also launch the GUI directly with `seedpass gui` or `seedpass-gui`.
|
||||
- **KDF Iteration Caution:** Lowering `kdf_iterations` makes password cracking easier, while a high `backup_interval` leaves fewer recent backups.
|
||||
- **Offline Mode:** When enabled, SeedPass skips all Nostr operations so your vault stays local until syncing is turned back on.
|
||||
- **Quick Unlock:** Stores a hashed copy of your password in the encrypted config so you only need to enter it once per session. Avoid this on shared computers.
|
||||
- **Prompt Rate Limiting:** Seed and password prompts enforce a configurable attempt limit with exponential backoff to slow brute-force attacks. Adjust or disable the limit for testing via the `--max-prompt-attempts` CLI option or the `SEEDPASS_MAX_PROMPT_ATTEMPTS` environment variable.
|
||||
|
||||
### Secure Deployment
|
||||
|
||||
Always deploy SeedPass behind HTTPS. Place a TLS‑terminating reverse proxy such as Nginx in front of the FastAPI server or configure Uvicorn with certificate files. Example Nginx snippet:
|
||||
|
||||
```
|
||||
server {
|
||||
listen 443 ssl;
|
||||
ssl_certificate /etc/letsencrypt/live/example.com/fullchain.pem;
|
||||
ssl_certificate_key /etc/letsencrypt/live/example.com/privkey.pem;
|
||||
|
||||
location / {
|
||||
proxy_pass http://127.0.0.1:8000;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
For local testing, Uvicorn can run with TLS directly:
|
||||
|
||||
```
|
||||
uvicorn seedpass.api:app --ssl-certfile=cert.pem --ssl-keyfile=key.pem
|
||||
```
|
||||
|
||||
## Dependency Updates
|
||||
|
||||
Automated dependency updates are handled by [Dependabot](https://docs.github.com/en/code-security/dependabot).
|
||||
Every week, Dependabot checks Python packages and GitHub Actions used by this repository and opens pull requests when updates are available.
|
||||
|
||||
To review and merge these updates:
|
||||
|
||||
1. Review the changelog and release notes in the Dependabot pull request.
|
||||
2. Run the test suite locally:
|
||||
```bash
|
||||
python3 -m venv venv
|
||||
source venv/bin/activate
|
||||
pip install --require-hashes -r requirements.lock
|
||||
pytest
|
||||
```
|
||||
3. Merge the pull request once all checks pass.
|
||||
|
||||
A scheduled **Dependency Audit** workflow also runs [`pip-audit`](https://github.com/pypa/pip-audit) weekly to detect vulnerable packages. Address any reported issues promptly to keep dependencies secure.
|
||||
|
||||
## Contributing
|
||||
|
||||
|
@@ -7,19 +7,19 @@ This guide covers how to start the SeedPass API, authenticate requests, and inte
|
||||
|
||||
## Starting the API
|
||||
|
||||
Run `seedpass api start` from your terminal. The command prints a one‑time token used for authentication:
|
||||
Run `seedpass api start` from your terminal. The command prints a short‑lived JWT token used for authentication:
|
||||
|
||||
```bash
|
||||
$ seedpass api start
|
||||
API token: abcdef1234567890
|
||||
API token: eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9...
|
||||
```
|
||||
|
||||
Keep this token secret. Every request must include it in the `Authorization` header using the `Bearer` scheme.
|
||||
Keep this token secret and avoid logging it. Tokens expire after a few minutes and every request must include one in the `Authorization` header using the `Bearer` scheme.
|
||||
|
||||
## Endpoints
|
||||
|
||||
- `GET /api/v1/entry?query=<text>` – Search entries matching a query.
|
||||
- `GET /api/v1/entry/{id}` – Retrieve a single entry by its index.
|
||||
- `GET /api/v1/entry/{id}` – Retrieve a single entry by its index. Requires an `X-SeedPass-Password` header.
|
||||
- `POST /api/v1/entry` – Create a new entry of any supported type.
|
||||
- `PUT /api/v1/entry/{id}` – Modify an existing entry.
|
||||
- `PUT /api/v1/config/{key}` – Update a configuration value.
|
||||
@@ -31,18 +31,17 @@ Keep this token secret. Every request must include it in the `Authorization` hea
|
||||
- `POST /api/v1/fingerprint` – Add a new seed fingerprint.
|
||||
- `DELETE /api/v1/fingerprint/{fp}` – Remove a fingerprint.
|
||||
- `POST /api/v1/fingerprint/select` – Switch the active fingerprint.
|
||||
- `GET /api/v1/totp/export` – Export all TOTP entries as JSON.
|
||||
- `GET /api/v1/totp` – Return current TOTP codes and remaining time.
|
||||
- `GET /api/v1/totp/export` – Export all TOTP entries as JSON. Requires an `X-SeedPass-Password` header.
|
||||
- `GET /api/v1/totp` – Return current TOTP codes and remaining time. Requires an `X-SeedPass-Password` header.
|
||||
- `GET /api/v1/stats` – Return statistics about the active seed profile.
|
||||
- `GET /api/v1/notifications` – Retrieve and clear queued notifications. Messages appear in the persistent notification box but remain queued until fetched.
|
||||
- `GET /api/v1/parent-seed` – Reveal the parent seed or save it with `?file=`.
|
||||
- `GET /api/v1/nostr/pubkey` – Fetch the Nostr public key for the active seed.
|
||||
- `POST /api/v1/checksum/verify` – Verify the checksum of the running script.
|
||||
- `POST /api/v1/checksum/update` – Update the stored script checksum.
|
||||
- `POST /api/v1/change-password` – Change the master password for the active profile.
|
||||
- `POST /api/v1/vault/import` – Import a vault backup from a file or path.
|
||||
- `POST /api/v1/vault/export` – Export the vault and download the encrypted file.
|
||||
- `POST /api/v1/vault/backup-parent-seed` – Save an encrypted backup of the parent seed.
|
||||
- `POST /api/v1/vault/export` – Export the vault and download the encrypted file. Requires an additional `X-SeedPass-Password` header.
|
||||
- `POST /api/v1/vault/backup-parent-seed` – Save an encrypted backup of the parent seed. Requires a `confirm` flag in the request body and an `X-SeedPass-Password` header.
|
||||
- `POST /api/v1/vault/lock` – Lock the vault and clear sensitive data from memory.
|
||||
- `GET /api/v1/relays` – List configured Nostr relays.
|
||||
- `POST /api/v1/relays` – Add a relay URL.
|
||||
@@ -50,7 +49,30 @@ Keep this token secret. Every request must include it in the `Authorization` hea
|
||||
- `POST /api/v1/relays/reset` – Reset the relay list to defaults.
|
||||
- `POST /api/v1/shutdown` – Stop the server gracefully.
|
||||
|
||||
**Security Warning:** Accessing `/api/v1/parent-seed` exposes your master seed in plain text. Use it only from a trusted environment.
|
||||
|
||||
## Secure Deployment
|
||||
|
||||
Always run the API behind HTTPS. Use a reverse proxy such as Nginx or Caddy to terminate TLS and forward requests to SeedPass. Example Nginx configuration:
|
||||
|
||||
```
|
||||
server {
|
||||
listen 443 ssl;
|
||||
ssl_certificate /etc/letsencrypt/live/example.com/fullchain.pem;
|
||||
ssl_certificate_key /etc/letsencrypt/live/example.com/privkey.pem;
|
||||
|
||||
location / {
|
||||
proxy_pass http://127.0.0.1:8000;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
For local testing, Uvicorn can serve TLS directly:
|
||||
|
||||
```
|
||||
uvicorn seedpass.api:app --ssl-certfile=cert.pem --ssl-keyfile=key.pem
|
||||
```
|
||||
|
||||
## Example Requests
|
||||
|
||||
@@ -58,7 +80,7 @@ Send requests with the token in the header:
|
||||
|
||||
```bash
|
||||
curl -H "Authorization: Bearer <token>" \
|
||||
"http://127.0.0.1:8000/api/v1/entry?query=email"
|
||||
"https://127.0.0.1:8000/api/v1/entry?query=email"
|
||||
```
|
||||
|
||||
### Creating an Entry
|
||||
@@ -149,8 +171,9 @@ curl -X POST http://127.0.0.1:8000/api/v1/fingerprint/select \
|
||||
Download an encrypted vault backup via `POST /api/v1/vault/export`:
|
||||
|
||||
```bash
|
||||
curl -X POST http://127.0.0.1:8000/api/v1/vault/export \
|
||||
curl -X POST https://127.0.0.1:8000/api/v1/vault/export \
|
||||
-H "Authorization: Bearer <token>" \
|
||||
-H "X-SeedPass-Password: <master-password>" \
|
||||
-o backup.json
|
||||
```
|
||||
|
||||
@@ -180,8 +203,9 @@ Trigger an encrypted seed backup with `/api/v1/vault/backup-parent-seed`:
|
||||
```bash
|
||||
curl -X POST http://127.0.0.1:8000/api/v1/vault/backup-parent-seed \
|
||||
-H "Authorization: Bearer <token>" \
|
||||
-H "X-SeedPass-Password: <master password>" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"path": "seed_backup.enc"}'
|
||||
-d '{"path": "seed_backup.enc", "confirm": true}'
|
||||
```
|
||||
|
||||
### Retrieving Vault Statistics
|
||||
|
@@ -3,6 +3,8 @@
|
||||
SeedPass stores its password index in an encrypted JSON file. Each index contains
|
||||
a `schema_version` field so the application knows how to upgrade older files.
|
||||
|
||||
> **Note:** Recent releases derive passwords and other artifacts using a new deterministic algorithm that works consistently across Python versions. Artifacts produced with older versions will not match outputs from this release and must be regenerated.
|
||||
|
||||
## How migrations work
|
||||
|
||||
When the vault loads the index, `Vault.load_index()` checks the version and
|
||||
|
@@ -14,10 +14,17 @@ python -m seedpass_gui
|
||||
seedpass-gui
|
||||
```
|
||||
|
||||
Only `toga-core` and the headless `toga-dummy` backend ship with the project.
|
||||
The installation scripts automatically install the correct BeeWare backend so
|
||||
the GUI works out of the box. If you set up SeedPass manually, install the
|
||||
backend for your platform:
|
||||
GUI dependencies are optional. Install them alongside SeedPass with:
|
||||
|
||||
```bash
|
||||
pip install "seedpass[gui]"
|
||||
|
||||
# or when working from a local checkout
|
||||
pip install -e .[gui]
|
||||
```
|
||||
|
||||
After installing the optional GUI extras, add the BeeWare backend for your
|
||||
platform:
|
||||
|
||||
```bash
|
||||
# Linux
|
||||
|
@@ -10,6 +10,10 @@
|
||||
|
||||
This software was not developed by an experienced security expert and should be used with caution. There may be bugs and missing features. Each vault chunk is limited to 50 KB and SeedPass periodically publishes a new snapshot to keep accumulated deltas small. The security of the program's memory management and logs has not been evaluated and may leak sensitive information. Loss or exposure of the parent seed places all derived passwords, accounts, and other artifacts at risk.
|
||||
|
||||
**🚨 Breaking Change**
|
||||
|
||||
Recent releases derive passwords and other artifacts using a fully deterministic algorithm that behaves consistently across Python versions. This improvement means artifacts generated with earlier versions of SeedPass will not match those produced now. Regenerate any previously derived data or retain the old version if you need to reproduce older passwords or keys.
|
||||
|
||||
---
|
||||
### Supported OS
|
||||
|
||||
@@ -46,6 +50,7 @@ maintainable while enabling a consistent experience on multiple platforms.
|
||||
- [Running the Application](#running-the-application)
|
||||
- [Managing Multiple Seeds](#managing-multiple-seeds)
|
||||
- [Additional Entry Types](#additional-entry-types)
|
||||
- [Recovery](#recovery)
|
||||
- [Security Considerations](#security-considerations)
|
||||
- [Contributing](#contributing)
|
||||
- [License](#license)
|
||||
@@ -115,6 +120,11 @@ isn't on your PATH. If these tools are unavailable you'll see a link to download
|
||||
the installer now attempts to download Python 3.12 automatically so you don't have to compile packages from source.
|
||||
|
||||
**Note:** If this fallback fails, install Python 3.12 manually or install the [Microsoft Visual C++ Build Tools](https://visualstudio.microsoft.com/visual-cpp-build-tools/) and rerun the installer.
|
||||
|
||||
#### Installer Dependency Checks
|
||||
|
||||
The installer verifies that core build tooling—C/C++ build tools, Rust, CMake, and the imaging/GTK libraries—are available before completing. Pass `--no-gui` to skip installing GUI packages. On Linux, ensure `xclip` or `wl-clipboard` is installed for clipboard support.
|
||||
|
||||
### Uninstall
|
||||
|
||||
Run the matching uninstaller if you need to remove a previous installation or clean up an old `seedpass` command:
|
||||
@@ -185,20 +195,22 @@ When upgrading pip, use `python -m pip` inside the virtual environment so that p
|
||||
|
||||
```bash
|
||||
python -m pip install --upgrade pip
|
||||
python -m pip install -r src/requirements.txt
|
||||
python -m pip install --require-hashes -r requirements.lock
|
||||
python -m pip install -e .
|
||||
```
|
||||
|
||||
#### Linux Clipboard Support
|
||||
|
||||
On Linux, `pyperclip` relies on external utilities like `xclip` or `xsel`.
|
||||
SeedPass will attempt to install **xclip** automatically if neither tool is
|
||||
available. If the automatic installation fails, you can install it manually:
|
||||
SeedPass does not install these tools automatically. To use clipboard features
|
||||
such as secret mode, install **xclip** manually:
|
||||
|
||||
```bash
|
||||
sudo apt-get install xclip
|
||||
sudo apt install xclip
|
||||
```
|
||||
|
||||
After installing `xclip`, restart SeedPass to enable clipboard support.
|
||||
|
||||
## Quick Start
|
||||
|
||||
After installing dependencies, activate your virtual environment and install
|
||||
@@ -403,6 +415,22 @@ SeedPass allows you to manage multiple seed profiles (previously referred to as
|
||||
|
||||
**Note:** The term "seed profile" is used to represent different sets of seeds you can manage within SeedPass. This provides an intuitive way to handle multiple identities or sets of passwords.
|
||||
|
||||
|
||||
### Recovery
|
||||
|
||||
If you previously backed up your vault to Nostr you can restore it during the
|
||||
initial setup. You must provide both your 12 -word master seed and the master
|
||||
password that encrypted the vault; without the correct password the retrieved
|
||||
data cannot be decrypted.
|
||||
|
||||
1. Start SeedPass and choose option **4** when prompted to set up a seed.
|
||||
2. Paste your BIP‑85 seed phrase when asked.
|
||||
3. Enter the master password associated with that seed.
|
||||
4. SeedPass initializes the profile and attempts to download the encrypted
|
||||
vault from the configured relays.
|
||||
5. A success message confirms the vault was restored. If no data is found a
|
||||
failure message is shown and a new empty vault is created.
|
||||
|
||||
### Configuration File and Settings
|
||||
|
||||
SeedPass keeps per-profile settings in an encrypted file named `seedpass_config.json.enc` inside each profile directory under `~/.seedpass/`. This file stores your chosen Nostr relays and the optional settings PIN. New profiles start with the following default relays:
|
||||
@@ -450,11 +478,11 @@ Back in the Settings menu you can:
|
||||
|
||||
## Running Tests
|
||||
|
||||
SeedPass includes a small suite of unit tests located under `src/tests`. **Before running `pytest`, be sure to install the test requirements.** Activate your virtual environment and run `pip install -r src/requirements.txt` to ensure all testing dependencies are available. Then run the tests with **pytest**. Use `-vv` to see INFO-level log messages from each passing test:
|
||||
SeedPass includes a small suite of unit tests located under `src/tests`. **Before running `pytest`, be sure to install the test requirements.** Activate your virtual environment and run `pip install --require-hashes -r requirements.lock` to ensure all testing dependencies are available. Then run the tests with **pytest**. Use `-vv` to see INFO-level log messages from each passing test:
|
||||
|
||||
|
||||
```bash
|
||||
pip install -r src/requirements.txt
|
||||
pip install --require-hashes -r requirements.lock
|
||||
pytest -vv
|
||||
```
|
||||
|
||||
@@ -531,7 +559,7 @@ Mutation testing is disabled in the GitHub workflow due to reliability issues an
|
||||
- **Backup Your Data:** Regularly back up your encrypted data and checksum files to prevent data loss.
|
||||
- **Backup the Settings PIN:** Your settings PIN is stored in the encrypted configuration file. Keep a copy of this file or remember the PIN, as losing it will require deleting the file and reconfiguring your relays.
|
||||
- **Protect Your Passwords:** Do not share your master password or seed phrases with anyone and ensure they are strong and unique.
|
||||
- **Revealing the Parent Seed:** The `vault reveal-parent-seed` command and `/api/v1/parent-seed` endpoint print your seed in plain text. Run them only in a secure environment.
|
||||
- **Backing Up the Parent Seed:** Use the CLI `vault reveal-parent-seed` command or the `/api/v1/vault/backup-parent-seed` endpoint with explicit confirmation to create an encrypted backup. The API does not return the seed directly.
|
||||
- **No PBKDF2 Salt Needed:** SeedPass deliberately omits an explicit PBKDF2 salt. Every password is derived from a unique 512-bit BIP-85 child seed, which already provides stronger per-password uniqueness than a conventional 128-bit salt.
|
||||
- **Checksum Verification:** Always verify the script's checksum to ensure its integrity and protect against unauthorized modifications.
|
||||
- **Potential Bugs and Limitations:** Be aware that the software may contain bugs and lacks certain features. Snapshot chunks are capped at 50 KB and the client rotates snapshots after enough delta events accumulate. The security of memory management and logs has not been thoroughly evaluated and may pose risks of leaking sensitive information.
|
||||
|
33
docs/nostr_setup.md
Normal file
33
docs/nostr_setup.md
Normal file
@@ -0,0 +1,33 @@
|
||||
# Nostr Setup
|
||||
|
||||
This guide explains how SeedPass uses the Nostr protocol for encrypted vault backups and how to configure relays.
|
||||
|
||||
## Relay Configuration
|
||||
|
||||
SeedPass communicates with the Nostr network through a list of relays. You can manage these relays from the CLI:
|
||||
|
||||
```bash
|
||||
seedpass nostr list-relays # show configured relays
|
||||
seedpass nostr add-relay <url> # add a relay URL
|
||||
seedpass nostr remove-relay <n> # remove relay by index
|
||||
```
|
||||
|
||||
At least one relay is required for publishing and retrieving backups. Choose relays you trust to remain online and avoid those that charge high fees or aggressively rate‑limit connections.
|
||||
|
||||
## Manifest and Delta Events
|
||||
|
||||
Backups are published as parameterised replaceable events:
|
||||
|
||||
- **Kind 30070 – Manifest:** describes the snapshot and lists chunk IDs. The optional `delta_since` field stores the UNIX timestamp of the latest delta event.
|
||||
- **Kind 30071 – Snapshot Chunk:** each 50 KB fragment of the compressed, encrypted vault.
|
||||
- **Kind 30072 – Delta:** captures changes since the last snapshot.
|
||||
|
||||
When restoring, SeedPass downloads the most recent manifest and applies any newer delta events.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
- **No events found:** ensure the relays are reachable and that the correct fingerprint is selected.
|
||||
- **Connection failures:** some relays only support WebSocket over TLS; verify you are using `wss://` URLs where required.
|
||||
- **Stale data:** if deltas accumulate without a fresh snapshot, run `seedpass nostr sync` to publish an updated snapshot.
|
||||
|
||||
Increasing log verbosity with `--verbose` can also help diagnose relay or network issues.
|
38
docs/packaging.md
Normal file
38
docs/packaging.md
Normal file
@@ -0,0 +1,38 @@
|
||||
# Packaging SeedPass
|
||||
|
||||
This guide describes how to build platform-native packages for SeedPass using [BeeWare Briefcase](https://briefcase.readthedocs.io/).
|
||||
|
||||
## Prerequisites
|
||||
|
||||
* Python 3.12 with development headers (`python3-dev` on Debian/Ubuntu).
|
||||
* Briefcase installed in your virtual environment:
|
||||
|
||||
```bash
|
||||
pip install briefcase
|
||||
```
|
||||
|
||||
## Linux
|
||||
|
||||
The helper script in `packaging/build-linux.sh` performs `briefcase create`, `build`, and `package` for the current project.
|
||||
|
||||
```bash
|
||||
./packaging/build-linux.sh
|
||||
```
|
||||
|
||||
Briefcase outputs its build artifacts in `build/seedpass-gui/ubuntu/noble/`. These files can be bundled in container formats such as Flatpak or Snap. Example manifests are included:
|
||||
|
||||
* `packaging/flatpak/seedpass.yml` targets the `org.gnome.Platform` runtime and copies the Briefcase build into the Flatpak bundle.
|
||||
* `packaging/snapcraft.yaml` stages the Briefcase build and lists GTK libraries in `stage-packages` so the Snap includes its GUI dependencies.
|
||||
|
||||
## macOS and Windows
|
||||
|
||||
Scripts are provided to document the commands expected on each platform. They must be run on their respective operating systems:
|
||||
|
||||
* `packaging/build-macos.sh`
|
||||
* `packaging/build-windows.ps1`
|
||||
|
||||
Each script runs Briefcase's `create`, `build`, and `package` steps with `--no-input`.
|
||||
|
||||
## Reproducible Releases
|
||||
|
||||
The `packaging/` directory contains the scripts and manifests needed to regenerate desktop packages. Invoke the appropriate script on the target OS, then use the supplied Flatpak or Snap manifest to bundle additional dependencies for Linux.
|
17
docs/secret-scanning.md
Normal file
17
docs/secret-scanning.md
Normal file
@@ -0,0 +1,17 @@
|
||||
# Secret Scanning
|
||||
|
||||
SeedPass uses [Gitleaks](https://github.com/gitleaks/gitleaks) to scan the repository for accidentally committed secrets. The scan runs automatically for pull requests and on a nightly schedule. Any findings will cause the build to fail.
|
||||
|
||||
## Suppressing False Positives
|
||||
|
||||
If a file or string triggers the scanner but does not contain a real secret, add it to the allowlist in `.gitleaks.toml`.
|
||||
|
||||
```toml
|
||||
[allowlist]
|
||||
# Ignore specific files
|
||||
paths = ["path/to/file.txt"]
|
||||
# Ignore strings that match a regular expression
|
||||
regexes = ["""dummy_api_key"""]
|
||||
```
|
||||
|
||||
Commit the updated `.gitleaks.toml` to stop future alerts for the allowed items.
|
30
docs/security.md
Normal file
30
docs/security.md
Normal file
@@ -0,0 +1,30 @@
|
||||
# Security Testing and Calibration
|
||||
|
||||
This project includes fuzz tests and a calibration routine to tune Argon2 parameters for your hardware.
|
||||
|
||||
## Running Fuzz Tests
|
||||
|
||||
The fuzz tests exercise encryption and decryption with random data using [Hypothesis](https://hypothesis.readthedocs.io/).
|
||||
Activate the project's virtual environment and run:
|
||||
|
||||
```bash
|
||||
pytest src/tests/test_encryption_fuzz.py
|
||||
```
|
||||
|
||||
Running the entire test suite will also execute these fuzz tests.
|
||||
|
||||
## Calibrating Argon2 Time Cost
|
||||
|
||||
Argon2 performance varies by device. To calibrate the `time_cost` parameter, run the helper function:
|
||||
|
||||
```bash
|
||||
python - <<'PY'
|
||||
from seedpass.core.config_manager import ConfigManager
|
||||
from utils.key_derivation import calibrate_argon2_time_cost
|
||||
|
||||
# assuming ``cfg`` is a ConfigManager for your profile
|
||||
calibrate_argon2_time_cost(cfg)
|
||||
PY
|
||||
```
|
||||
|
||||
The selected `time_cost` is stored in the profile's configuration and used for subsequent key derivations.
|
@@ -202,6 +202,8 @@ flowchart TD
|
||||
<p>SeedPass allows you to manage multiple seed profiles (fingerprints). You can switch between different seeds to compartmentalize your passwords.</p>
|
||||
<h3 class="subsection-title">Nostr Relay Integration</h3>
|
||||
<p>SeedPass publishes your encrypted vault to Nostr in 50 KB chunks using parameterised replaceable events. A manifest describes each snapshot while deltas record updates. When too many deltas accumulate, a new snapshot is rotated in automatically.</p>
|
||||
<h3 class="subsection-title">Recovery from Nostr</h3>
|
||||
<p>Restoring a vault on a new device requires both your 12 word master seed and the master password that encrypted the vault. Without the correct password the downloaded archive cannot be decrypted.</p>
|
||||
<h3 class="subsection-title">Checksum Verification</h3>
|
||||
<p>Built-in checksum verification ensures your SeedPass installation hasn't been tampered with.</p>
|
||||
<h3 class="subsection-title">Interactive TUI</h3>
|
||||
|
5
packaging/build-linux.sh
Executable file
5
packaging/build-linux.sh
Executable file
@@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
briefcase create linux --no-input
|
||||
briefcase build linux --no-input
|
||||
briefcase package linux --no-input
|
5
packaging/build-macos.sh
Executable file
5
packaging/build-macos.sh
Executable file
@@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
briefcase create macos --no-input
|
||||
briefcase build macos --no-input
|
||||
briefcase package macos --no-input
|
3
packaging/build-windows.ps1
Normal file
3
packaging/build-windows.ps1
Normal file
@@ -0,0 +1,3 @@
|
||||
briefcase create windows --no-input
|
||||
briefcase build windows --no-input
|
||||
briefcase package windows --no-input
|
18
packaging/flatpak/seedpass.yml
Normal file
18
packaging/flatpak/seedpass.yml
Normal file
@@ -0,0 +1,18 @@
|
||||
app-id: io.seedpass.SeedPass
|
||||
runtime: org.gnome.Platform
|
||||
runtime-version: '46'
|
||||
sdk: org.gnome.Sdk
|
||||
command: seedpass-gui
|
||||
modules:
|
||||
- name: seedpass
|
||||
buildsystem: simple
|
||||
build-commands:
|
||||
- mkdir -p /app/bin
|
||||
- cp -r ../../build/seedpass-gui/ubuntu/noble/* /app/bin/
|
||||
sources:
|
||||
- type: dir
|
||||
path: ../../
|
||||
finish-args:
|
||||
- --share=network
|
||||
- --socket=fallback-x11
|
||||
- --socket=wayland
|
22
packaging/snapcraft.yaml
Normal file
22
packaging/snapcraft.yaml
Normal file
@@ -0,0 +1,22 @@
|
||||
name: seedpass
|
||||
base: core22
|
||||
version: '0.1.0'
|
||||
summary: Deterministic password manager
|
||||
description: |
|
||||
SeedPass deterministically generates passwords using BIP-39 seeds.
|
||||
grade: devel
|
||||
confinement: strict
|
||||
apps:
|
||||
seedpass-gui:
|
||||
command: bin/seedpass-gui
|
||||
plugs:
|
||||
- network
|
||||
- x11
|
||||
parts:
|
||||
seedpass:
|
||||
plugin: dump
|
||||
source: build/seedpass-gui/ubuntu/noble/app
|
||||
stage-packages:
|
||||
- libgtk-3-0
|
||||
- libglib2.0-0
|
||||
- libgdk-pixbuf2.0-0
|
3397
poetry.lock
generated
Normal file
3397
poetry.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,12 +1,57 @@
|
||||
[project]
|
||||
[tool.poetry]
|
||||
name = "seedpass"
|
||||
version = "0.1.0"
|
||||
description = "Deterministic password manager with a BeeWare GUI"
|
||||
authors = []
|
||||
|
||||
[build-system]
|
||||
requires = ["setuptools>=61", "wheel"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.10,<3.13"
|
||||
colorama = ">=0.4.6"
|
||||
termcolor = ">=1.1.0"
|
||||
cryptography = ">=40.0.2"
|
||||
bip-utils = ">=2.5.0"
|
||||
bech32 = "1.2.0"
|
||||
coincurve = ">=18.0.0"
|
||||
mnemonic = "*"
|
||||
aiohttp = ">=3.12.15"
|
||||
bcrypt = "*"
|
||||
portalocker = ">=2.8"
|
||||
nostr-sdk = ">=0.43"
|
||||
websocket-client = "1.7.0"
|
||||
websockets = ">=15.0.0"
|
||||
tomli = "*"
|
||||
pgpy = "0.6.0"
|
||||
pyotp = ">=2.8.0"
|
||||
pyperclip = "*"
|
||||
qrcode = ">=8.2"
|
||||
typer = ">=0.12.3"
|
||||
fastapi = ">=0.116.0"
|
||||
uvicorn = ">=0.35.0"
|
||||
httpx = ">=0.28.1"
|
||||
requests = ">=2.32"
|
||||
python-multipart = ">=0.0.20"
|
||||
orjson = "*"
|
||||
argon2-cffi = "*"
|
||||
PyJWT = ">=2.8.0"
|
||||
slowapi = "^0.1.9"
|
||||
toga-core = { version = ">=0.5.2", optional = true }
|
||||
pillow = { version = "*", optional = true }
|
||||
|
||||
[project.scripts]
|
||||
[tool.poetry.extras]
|
||||
gui = ["toga-core", "pillow"]
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
pytest = "^8.2"
|
||||
coverage = "^7.5"
|
||||
black = "^24.3"
|
||||
pip-audit = "^2.7"
|
||||
pytest-xdist = "^3.5"
|
||||
hypothesis = "^6.98"
|
||||
freezegun = "^1.5"
|
||||
toga-dummy = ">=0.5.2"
|
||||
Pillow = "^10.4"
|
||||
|
||||
[tool.poetry.scripts]
|
||||
seedpass = "seedpass.cli:app"
|
||||
seedpass-gui = "seedpass_gui.app:main"
|
||||
|
||||
@@ -15,10 +60,15 @@ python_version = "3.11"
|
||||
strict = true
|
||||
mypy_path = "src"
|
||||
|
||||
[tool.briefcase]
|
||||
project_name = "SeedPass"
|
||||
bundle = "io.seedpass"
|
||||
version = "0.1.0"
|
||||
|
||||
[tool.briefcase.app.seedpass-gui]
|
||||
formal-name = "SeedPass"
|
||||
description = "Deterministic password manager with a BeeWare GUI"
|
||||
sources = ["src"]
|
||||
sources = ["src/seedpass_gui"]
|
||||
requires = [
|
||||
"toga-core>=0.5.2",
|
||||
"colorama>=0.4.6",
|
||||
@@ -28,7 +78,7 @@ requires = [
|
||||
"bech32==1.2.0",
|
||||
"coincurve>=18.0.0",
|
||||
"mnemonic",
|
||||
"aiohttp>=3.12.14",
|
||||
"aiohttp>=3.12.15",
|
||||
"bcrypt",
|
||||
"portalocker>=2.8",
|
||||
"nostr-sdk>=0.43",
|
||||
@@ -44,8 +94,14 @@ requires = [
|
||||
"uvicorn>=0.35.0",
|
||||
"httpx>=0.28.1",
|
||||
"requests>=2.32",
|
||||
"python-multipart",
|
||||
"python-multipart>=0.0.20",
|
||||
"orjson",
|
||||
"argon2-cffi",
|
||||
]
|
||||
icon = "logo/png/SeedPass-Logo-24.png"
|
||||
license = { file = "LICENSE" }
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
|
@@ -3,7 +3,7 @@ addopts = -n auto
|
||||
log_cli = true
|
||||
log_cli_level = WARNING
|
||||
log_level = WARNING
|
||||
testpaths = src/tests
|
||||
testpaths = src/tests tests
|
||||
markers =
|
||||
network: tests that require network connectivity
|
||||
stress: long running stress tests
|
||||
|
2048
requirements.lock
2048
requirements.lock
File diff suppressed because it is too large
Load Diff
9
scripts/dependency_scan.sh
Executable file
9
scripts/dependency_scan.sh
Executable file
@@ -0,0 +1,9 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Run pip-audit against the pinned requirements
|
||||
if ! command -v pip-audit >/dev/null 2>&1; then
|
||||
python -m pip install --quiet pip-audit
|
||||
fi
|
||||
|
||||
pip-audit -r requirements.lock "$@"
|
@@ -79,7 +79,7 @@ def initialize_profile(
|
||||
profile_dir = APP_DIR / fingerprint
|
||||
profile_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
seed_key = derive_key_from_password(DEFAULT_PASSWORD)
|
||||
seed_key = derive_key_from_password(DEFAULT_PASSWORD, fingerprint)
|
||||
seed_mgr = EncryptionManager(seed_key, profile_dir)
|
||||
seed_file = profile_dir / "parent_seed.enc"
|
||||
clear_path = profile_dir / "seed_phrase.txt"
|
||||
|
@@ -2,10 +2,12 @@
|
||||
# SeedPass Universal Installer for Windows
|
||||
#
|
||||
# Supports installing from a specific branch using the -Branch parameter.
|
||||
# Example: .\install.ps1 -Branch beta
|
||||
# Use -IncludeGui to install the optional BeeWare GUI backend.
|
||||
# Example: .\install.ps1 -Branch beta -IncludeGui
|
||||
|
||||
param(
|
||||
[string]$Branch = "main" # The git branch to install from
|
||||
[string]$Branch = "main", # The git branch to install from
|
||||
[switch]$IncludeGui # Install BeeWare GUI components
|
||||
)
|
||||
|
||||
# --- Configuration ---
|
||||
@@ -249,20 +251,30 @@ if ($LASTEXITCODE -ne 0) {
|
||||
Write-Error "Failed to upgrade pip"
|
||||
}
|
||||
|
||||
& "$VenvDir\Scripts\python.exe" -m pip install -r "src\requirements.txt"
|
||||
& "$VenvDir\Scripts\python.exe" -m pip install --require-hashes -r "requirements.lock"
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Write-Warning "Failed to install Python dependencies. If errors mention C++, install Microsoft C++ Build Tools: https://visualstudio.microsoft.com/visual-cpp-build-tools/"
|
||||
Write-Error "Dependency installation failed."
|
||||
}
|
||||
|
||||
& "$VenvDir\Scripts\python.exe" -m pip install -e .
|
||||
if ($IncludeGui) {
|
||||
& "$VenvDir\Scripts\python.exe" -m pip install -e .[gui]
|
||||
} else {
|
||||
& "$VenvDir\Scripts\python.exe" -m pip install -e .
|
||||
}
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Write-Error "Failed to install SeedPass package"
|
||||
}
|
||||
|
||||
Write-Info "Installing BeeWare GUI backend..."
|
||||
& "$VenvDir\Scripts\python.exe" -m pip install toga-winforms
|
||||
if ($LASTEXITCODE -ne 0) { Write-Warning "Failed to install GUI backend" }
|
||||
if ($IncludeGui) {
|
||||
Write-Info "Installing BeeWare GUI backend..."
|
||||
try {
|
||||
& "$VenvDir\Scripts\python.exe" -m pip install toga-winforms
|
||||
if ($LASTEXITCODE -ne 0) { throw "toga-winforms installation failed" }
|
||||
} catch {
|
||||
Write-Warning "Failed to install GUI backend. Install Microsoft C++ Build Tools from https://visualstudio.microsoft.com/visual-cpp-build-tools/ and rerun the installer."
|
||||
}
|
||||
}
|
||||
|
||||
# 5. Create launcher script
|
||||
Write-Info "Creating launcher script..."
|
||||
|
@@ -5,7 +5,9 @@
|
||||
# Supports installing from a specific branch using the -b or --branch flag.
|
||||
# Example: ./install.sh -b beta
|
||||
|
||||
set -e
|
||||
set -euo pipefail
|
||||
IFS=$'\n\t'
|
||||
trap 'echo "[ERROR] Line $LINENO failed"; exit 1' ERR
|
||||
|
||||
# --- Configuration ---
|
||||
REPO_URL="https://github.com/PR0M3TH3AN/SeedPass.git"
|
||||
@@ -15,41 +17,51 @@ VENV_DIR="$INSTALL_DIR/venv"
|
||||
LAUNCHER_DIR="$HOME/.local/bin"
|
||||
LAUNCHER_PATH="$LAUNCHER_DIR/seedpass"
|
||||
BRANCH="main" # Default branch
|
||||
INSTALL_GUI=true
|
||||
|
||||
# --- Helper Functions ---
|
||||
print_info() { echo -e "\033[1;34m[INFO]\033[0m $1"; }
|
||||
print_success() { echo -e "\033[1;32m[SUCCESS]\033[0m $1"; }
|
||||
print_warning() { echo -e "\033[1;33m[WARNING]\033[0m $1"; }
|
||||
print_error() { echo -e "\033[1;31m[ERROR]\033[0m $1" >&2; exit 1; }
|
||||
print_info() { echo -e "\033[1;34m[INFO]\033[0m" "$1"; }
|
||||
print_success() { echo -e "\033[1;32m[SUCCESS]\033[0m" "$1"; }
|
||||
print_warning() { echo -e "\033[1;33m[WARNING]\033[0m" "$1"; }
|
||||
print_error() { echo -e "\033[1;31m[ERROR]\033[0m" "$1" >&2; exit 1; }
|
||||
|
||||
# Install build dependencies for Gtk/GObject if available via the system package manager
|
||||
install_dependencies() {
|
||||
print_info "Installing system packages required for Gtk bindings..."
|
||||
if command -v apt-get &>/dev/null; then
|
||||
sudo apt-get update && sudo apt-get install -y \
|
||||
build-essential pkg-config libcairo2 libcairo2-dev \
|
||||
libgirepository1.0-dev gobject-introspection \
|
||||
gir1.2-gtk-3.0 python3-dev libffi-dev libssl-dev xclip
|
||||
sudo apt-get update && sudo apt-get install -y \\
|
||||
build-essential pkg-config libcairo2 libcairo2-dev \\
|
||||
libgirepository1.0-dev gobject-introspection \\
|
||||
gir1.2-gtk-3.0 libgtk-3-dev python3-dev libffi-dev libssl-dev \\
|
||||
cmake rustc cargo zlib1g-dev libjpeg-dev libpng-dev \\
|
||||
libfreetype6-dev xclip wl-clipboard
|
||||
elif command -v yum &>/dev/null; then
|
||||
sudo yum install -y @'Development Tools' cairo cairo-devel \
|
||||
gobject-introspection-devel gtk3-devel python3-devel \
|
||||
libffi-devel openssl-devel xclip
|
||||
sudo yum install -y @'Development Tools' cairo cairo-devel \\
|
||||
gobject-introspection-devel gtk3-devel python3-devel \\
|
||||
libffi-devel openssl-devel cmake rust cargo zlib-devel \\
|
||||
libjpeg-turbo-devel libpng-devel freetype-devel xclip \\
|
||||
wl-clipboard
|
||||
elif command -v dnf &>/dev/null; then
|
||||
sudo dnf groupinstall -y "Development Tools" && sudo dnf install -y \
|
||||
cairo cairo-devel gobject-introspection-devel gtk3-devel \
|
||||
python3-devel libffi-devel openssl-devel xclip
|
||||
sudo dnf groupinstall -y "Development Tools" && sudo dnf install -y \\
|
||||
cairo cairo-devel gobject-introspection-devel gtk3-devel \\
|
||||
python3-devel libffi-devel openssl-devel cmake rust cargo \\
|
||||
zlib-devel libjpeg-turbo-devel libpng-devel freetype-devel \\
|
||||
xclip wl-clipboard
|
||||
elif command -v pacman &>/dev/null; then
|
||||
sudo pacman -Syu --noconfirm base-devel pkgconf cairo \
|
||||
gobject-introspection gtk3 python xclip
|
||||
sudo pacman -Syu --noconfirm base-devel pkgconf cmake rustup \\
|
||||
gtk3 gobject-introspection cairo libjpeg-turbo zlib \\
|
||||
libpng freetype xclip wl-clipboard && rustup default stable
|
||||
elif command -v brew &>/dev/null; then
|
||||
brew install pkg-config cairo gobject-introspection gtk+3
|
||||
brew install pkg-config cairo gobject-introspection gtk+3 cmake rustup-init && \\
|
||||
rustup-init -y
|
||||
else
|
||||
print_warning "Unsupported package manager. Please install Gtk/GObject dependencies manually."
|
||||
fi
|
||||
}
|
||||
usage() {
|
||||
echo "Usage: $0 [-b | --branch <branch_name>] [-h | --help]"
|
||||
echo "Usage: $0 [-b | --branch <branch_name>] [--no-gui] [-h | --help]"
|
||||
echo " -b, --branch Specify the git branch to install (default: main)"
|
||||
echo " --no-gui Skip graphical interface dependencies (default: include GUI)"
|
||||
echo " -h, --help Display this help message"
|
||||
exit 0
|
||||
}
|
||||
@@ -70,6 +82,10 @@ main() {
|
||||
-h|--help)
|
||||
usage
|
||||
;;
|
||||
--no-gui)
|
||||
INSTALL_GUI=false
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
print_error "Unknown parameter passed: $1"; usage
|
||||
;;
|
||||
@@ -110,12 +126,14 @@ main() {
|
||||
fi
|
||||
|
||||
# 3. Install OS-specific dependencies
|
||||
print_info "Checking for Gtk development libraries..."
|
||||
if ! python3 -c "import gi" &>/dev/null; then
|
||||
print_warning "Gtk introspection bindings not found. Installing dependencies..."
|
||||
install_dependencies
|
||||
else
|
||||
print_info "Gtk bindings already available."
|
||||
if [ "$INSTALL_GUI" = true ]; then
|
||||
print_info "Checking for Gtk development libraries..."
|
||||
if command -v pkg-config &>/dev/null && pkg-config --exists girepository-2.0; then
|
||||
print_info "Gtk bindings already available."
|
||||
else
|
||||
print_warning "Gtk introspection bindings not found. Installing dependencies..."
|
||||
install_dependencies
|
||||
fi
|
||||
fi
|
||||
|
||||
# 4. Clone or update the repository
|
||||
@@ -139,18 +157,42 @@ main() {
|
||||
source "$VENV_DIR/bin/activate"
|
||||
|
||||
# 6. Install/Update Python dependencies
|
||||
print_info "Installing/updating Python dependencies from src/requirements.txt..."
|
||||
print_info "Installing/updating Python dependencies from requirements.lock..."
|
||||
pip install --upgrade pip
|
||||
pip install -r src/requirements.txt
|
||||
pip install -e .
|
||||
print_info "Installing platform-specific Toga backend..."
|
||||
if [ "$OS_NAME" = "Linux" ]; then
|
||||
print_info "Installing toga-gtk for Linux..."
|
||||
pip install toga-gtk
|
||||
elif [ "$OS_NAME" = "Darwin" ]; then
|
||||
print_info "Installing toga-cocoa for macOS..."
|
||||
pip install toga-cocoa
|
||||
pip install --require-hashes -r requirements.lock
|
||||
if [ "$INSTALL_GUI" = true ]; then
|
||||
GUI_READY=true
|
||||
if [ "$OS_NAME" = "Linux" ]; then
|
||||
if ! (command -v pkg-config &>/dev/null && pkg-config --exists girepository-2.0); then
|
||||
print_warning "GTK libraries (girepository-2.0) not found. Install them with: sudo apt install libgirepository1.0-dev"
|
||||
read -r -p "Continue with GUI installation anyway? (y/N) " CONTINUE_GUI
|
||||
if [[ ! "$CONTINUE_GUI" =~ ^[Yy]$ ]]; then
|
||||
GUI_READY=false
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
if [ "$GUI_READY" = true ]; then
|
||||
pip install -e .[gui]
|
||||
print_info "Installing platform-specific Toga backend..."
|
||||
if [ "$OS_NAME" = "Linux" ]; then
|
||||
print_info "Installing toga-gtk for Linux..."
|
||||
pip install toga-gtk
|
||||
elif [ "$OS_NAME" = "Darwin" ]; then
|
||||
print_info "Installing toga-cocoa for macOS..."
|
||||
pip install toga-cocoa
|
||||
fi
|
||||
else
|
||||
print_warning "Skipping GUI installation."
|
||||
pip install -e .
|
||||
fi
|
||||
else
|
||||
pip install -e .
|
||||
fi
|
||||
|
||||
if ! "$VENV_DIR/bin/python" -c "import seedpass.cli; print('ok')"; then
|
||||
print_error "SeedPass CLI import check failed."
|
||||
fi
|
||||
|
||||
deactivate
|
||||
|
||||
# 7. Create launcher script
|
||||
|
@@ -1,17 +1,15 @@
|
||||
# bip85/__init__.py
|
||||
|
||||
import logging
|
||||
import traceback
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
from .bip85 import BIP85
|
||||
|
||||
if logger.isEnabledFor(logging.DEBUG):
|
||||
logger.info("BIP85 module imported successfully.")
|
||||
except Exception as e:
|
||||
if logger.isEnabledFor(logging.DEBUG):
|
||||
logger.error(f"Failed to import BIP85 module: {e}", exc_info=True)
|
||||
except Exception as exc:
|
||||
logger.error("Failed to import BIP85 module: %s", exc, exc_info=True)
|
||||
raise ImportError(
|
||||
"BIP85 dependencies are missing. Install 'bip_utils', 'cryptography', and 'colorama'."
|
||||
) from exc
|
||||
|
||||
__all__ = ["BIP85"]
|
||||
|
@@ -18,7 +18,8 @@ import hashlib
|
||||
import hmac
|
||||
import logging
|
||||
import os
|
||||
import traceback
|
||||
from typing import Union
|
||||
|
||||
from colorama import Fore
|
||||
|
||||
from bip_utils import Bip32Slip10Secp256k1, Bip39MnemonicGenerator, Bip39Languages
|
||||
@@ -38,13 +39,19 @@ class Bip85Error(Exception):
|
||||
|
||||
|
||||
class BIP85:
|
||||
def __init__(self, seed_bytes: bytes | str):
|
||||
"""Initialize from BIP39 seed bytes or BIP32 xprv string."""
|
||||
def __init__(self, seed_or_xprv: Union[bytes, str]):
|
||||
"""Initialize from seed bytes or an ``xprv`` string.
|
||||
|
||||
Parameters:
|
||||
seed_or_xprv (Union[bytes, str]): Either raw BIP39 seed bytes
|
||||
or a BIP32 extended private key (``xprv``) string.
|
||||
"""
|
||||
|
||||
try:
|
||||
if isinstance(seed_bytes, (bytes, bytearray)):
|
||||
self.bip32_ctx = Bip32Slip10Secp256k1.FromSeed(seed_bytes)
|
||||
if isinstance(seed_or_xprv, (bytes, bytearray)):
|
||||
self.bip32_ctx = Bip32Slip10Secp256k1.FromSeed(seed_or_xprv)
|
||||
else:
|
||||
self.bip32_ctx = Bip32Slip10Secp256k1.FromExtendedKey(seed_bytes)
|
||||
self.bip32_ctx = Bip32Slip10Secp256k1.FromExtendedKey(seed_or_xprv)
|
||||
logging.debug("BIP32 context initialized successfully.")
|
||||
except Exception as e:
|
||||
logging.error(f"Error initializing BIP32 context: {e}", exc_info=True)
|
||||
@@ -52,26 +59,34 @@ class BIP85:
|
||||
raise Bip85Error(f"Error initializing BIP32 context: {e}")
|
||||
|
||||
def derive_entropy(
|
||||
self, index: int, bytes_len: int, app_no: int = 39, words_len: int | None = None
|
||||
self,
|
||||
index: int,
|
||||
entropy_bytes: int,
|
||||
app_no: int = 39,
|
||||
word_count: int | None = None,
|
||||
) -> bytes:
|
||||
"""
|
||||
Derives entropy using BIP-85 HMAC-SHA512 method.
|
||||
"""Derive entropy using the BIP-85 HMAC-SHA512 method.
|
||||
|
||||
Parameters:
|
||||
index (int): Index for the child entropy.
|
||||
bytes_len (int): Number of bytes to derive for the entropy.
|
||||
app_no (int): Application number (default 39 for BIP39)
|
||||
entropy_bytes (int): Number of bytes of entropy to derive.
|
||||
app_no (int): Application number (default 39 for BIP39).
|
||||
word_count (int | None): Number of words used in the derivation path
|
||||
for BIP39. If ``None`` and ``app_no`` is ``39``, ``word_count``
|
||||
defaults to ``entropy_bytes``. The final segment of the
|
||||
derivation path becomes ``m/83696968'/39'/0'/word_count'/index'``.
|
||||
|
||||
Returns:
|
||||
bytes: Derived entropy.
|
||||
bytes: Derived entropy of length ``entropy_bytes``.
|
||||
|
||||
Raises:
|
||||
SystemExit: If derivation fails or entropy length is invalid.
|
||||
SystemExit: If derivation fails or the derived entropy length is
|
||||
invalid.
|
||||
"""
|
||||
if app_no == 39:
|
||||
if words_len is None:
|
||||
words_len = bytes_len
|
||||
path = f"m/83696968'/{app_no}'/0'/{words_len}'/{index}'"
|
||||
if word_count is None:
|
||||
word_count = entropy_bytes
|
||||
path = f"m/83696968'/{app_no}'/0'/{word_count}'/{index}'"
|
||||
elif app_no == 32:
|
||||
path = f"m/83696968'/{app_no}'/{index}'"
|
||||
else:
|
||||
@@ -87,17 +102,17 @@ class BIP85:
|
||||
hmac_result = hmac.new(hmac_key, k, hashlib.sha512).digest()
|
||||
logging.debug(f"HMAC-SHA512 result: {hmac_result.hex()}")
|
||||
|
||||
entropy = hmac_result[:bytes_len]
|
||||
entropy = hmac_result[:entropy_bytes]
|
||||
|
||||
if len(entropy) != bytes_len:
|
||||
if len(entropy) != entropy_bytes:
|
||||
logging.error(
|
||||
f"Derived entropy length is {len(entropy)} bytes; expected {bytes_len} bytes."
|
||||
f"Derived entropy length is {len(entropy)} bytes; expected {entropy_bytes} bytes."
|
||||
)
|
||||
print(
|
||||
f"{Fore.RED}Error: Derived entropy length is {len(entropy)} bytes; expected {bytes_len} bytes."
|
||||
f"{Fore.RED}Error: Derived entropy length is {len(entropy)} bytes; expected {entropy_bytes} bytes."
|
||||
)
|
||||
raise Bip85Error(
|
||||
f"Derived entropy length is {len(entropy)} bytes; expected {bytes_len} bytes."
|
||||
f"Derived entropy length is {len(entropy)} bytes; expected {entropy_bytes} bytes."
|
||||
)
|
||||
|
||||
logging.debug(f"Derived entropy: {entropy.hex()}")
|
||||
@@ -108,14 +123,17 @@ class BIP85:
|
||||
raise Bip85Error(f"Error deriving entropy: {e}")
|
||||
|
||||
def derive_mnemonic(self, index: int, words_num: int) -> str:
|
||||
bytes_len = {12: 16, 18: 24, 24: 32}.get(words_num)
|
||||
if not bytes_len:
|
||||
entropy_bytes = {12: 16, 18: 24, 24: 32}.get(words_num)
|
||||
if not entropy_bytes:
|
||||
logging.error(f"Unsupported number of words: {words_num}")
|
||||
print(f"{Fore.RED}Error: Unsupported number of words: {words_num}")
|
||||
raise Bip85Error(f"Unsupported number of words: {words_num}")
|
||||
|
||||
entropy = self.derive_entropy(
|
||||
index=index, bytes_len=bytes_len, app_no=39, words_len=words_num
|
||||
index=index,
|
||||
entropy_bytes=entropy_bytes,
|
||||
app_no=39,
|
||||
word_count=words_num,
|
||||
)
|
||||
try:
|
||||
mnemonic = Bip39MnemonicGenerator(Bip39Languages.ENGLISH).FromEntropy(
|
||||
@@ -131,7 +149,7 @@ class BIP85:
|
||||
def derive_symmetric_key(self, index: int = 0, app_no: int = 2) -> bytes:
|
||||
"""Derive 32 bytes of entropy for symmetric key usage."""
|
||||
try:
|
||||
key = self.derive_entropy(index=index, bytes_len=32, app_no=app_no)
|
||||
key = self.derive_entropy(index=index, entropy_bytes=32, app_no=app_no)
|
||||
logging.debug(f"Derived symmetric key: {key.hex()}")
|
||||
return key
|
||||
except Exception as e:
|
||||
|
283
src/main.py
283
src/main.py
@@ -12,19 +12,23 @@ import logging
|
||||
import signal
|
||||
import time
|
||||
import argparse
|
||||
import asyncio
|
||||
import gzip
|
||||
import tomli
|
||||
from tomli import TOMLDecodeError
|
||||
from colorama import init as colorama_init
|
||||
from termcolor import colored
|
||||
from utils.color_scheme import color_text
|
||||
import traceback
|
||||
import importlib
|
||||
|
||||
from seedpass.core.manager import PasswordManager
|
||||
from seedpass.core.manager import PasswordManager, restore_backup_index
|
||||
from nostr.client import NostrClient
|
||||
from seedpass.core.entry_types import EntryType
|
||||
from seedpass.core.config_manager import ConfigManager
|
||||
from constants import INACTIVITY_TIMEOUT, initialize_app
|
||||
from utils.password_prompt import PasswordPromptError
|
||||
from utils.password_prompt import (
|
||||
PasswordPromptError,
|
||||
prompt_existing_password,
|
||||
prompt_new_password,
|
||||
)
|
||||
from utils import (
|
||||
timed_input,
|
||||
copy_to_clipboard,
|
||||
@@ -32,12 +36,33 @@ from utils import (
|
||||
pause,
|
||||
clear_header_with_notification,
|
||||
)
|
||||
from utils.clipboard import ClipboardUnavailableError
|
||||
from utils.atomic_write import atomic_write
|
||||
import queue
|
||||
from local_bip85.bip85 import Bip85Error
|
||||
|
||||
|
||||
colorama_init()
|
||||
|
||||
OPTIONAL_DEPENDENCIES = {
|
||||
"pyperclip": "clipboard support for secret mode",
|
||||
"qrcode": "QR code generation for TOTP setup",
|
||||
"toga": "desktop GUI features",
|
||||
}
|
||||
|
||||
|
||||
def _warn_missing_optional_dependencies() -> None:
|
||||
"""Log warnings for any optional packages that are not installed."""
|
||||
for module, feature in OPTIONAL_DEPENDENCIES.items():
|
||||
try:
|
||||
importlib.import_module(module)
|
||||
except ModuleNotFoundError:
|
||||
logging.warning(
|
||||
"Optional dependency '%s' is not installed; %s will be unavailable.",
|
||||
module,
|
||||
feature,
|
||||
)
|
||||
|
||||
|
||||
def load_global_config() -> dict:
|
||||
"""Load configuration from ~/.seedpass/config.toml if present."""
|
||||
@@ -47,7 +72,7 @@ def load_global_config() -> dict:
|
||||
try:
|
||||
with open(config_path, "rb") as f:
|
||||
return tomli.load(f)
|
||||
except Exception as exc:
|
||||
except (OSError, TOMLDecodeError) as exc:
|
||||
logging.warning(f"Failed to read {config_path}: {exc}")
|
||||
return {}
|
||||
|
||||
@@ -165,6 +190,13 @@ def handle_switch_fingerprint(password_manager: PasswordManager):
|
||||
return
|
||||
|
||||
selected_fingerprint = fingerprints[int(choice) - 1]
|
||||
if selected_fingerprint == password_manager.current_fingerprint:
|
||||
print(
|
||||
colored(
|
||||
f"Seed profile {selected_fingerprint} is already active.", "yellow"
|
||||
)
|
||||
)
|
||||
return
|
||||
if password_manager.select_fingerprint(selected_fingerprint):
|
||||
print(colored(f"Switched to seed profile {selected_fingerprint}.", "green"))
|
||||
else:
|
||||
@@ -188,11 +220,7 @@ def handle_add_new_fingerprint(password_manager: PasswordManager):
|
||||
|
||||
|
||||
def handle_remove_fingerprint(password_manager: PasswordManager):
|
||||
"""
|
||||
Handles removing an existing seed profile.
|
||||
|
||||
:param password_manager: An instance of PasswordManager.
|
||||
"""
|
||||
"""Handle removing an existing seed profile."""
|
||||
try:
|
||||
fingerprints = password_manager.fingerprint_manager.list_fingerprints()
|
||||
if not fingerprints:
|
||||
@@ -211,12 +239,24 @@ def handle_remove_fingerprint(password_manager: PasswordManager):
|
||||
|
||||
selected_fingerprint = fingerprints[int(choice) - 1]
|
||||
confirm = confirm_action(
|
||||
f"Are you sure you want to remove seed profile {selected_fingerprint}? This will delete all associated data. (Y/N): "
|
||||
f"Are you sure you want to remove seed profile {selected_fingerprint}? This will delete all associated data. (Y/N):"
|
||||
)
|
||||
if confirm:
|
||||
|
||||
def _cleanup_and_exit() -> None:
|
||||
password_manager.current_fingerprint = None
|
||||
password_manager.is_dirty = False
|
||||
getattr(password_manager, "cleanup", lambda: None)()
|
||||
print(colored("All seed profiles removed. Exiting.", "yellow"))
|
||||
sys.exit(0)
|
||||
|
||||
if password_manager.fingerprint_manager.remove_fingerprint(
|
||||
selected_fingerprint
|
||||
selected_fingerprint, _cleanup_and_exit
|
||||
):
|
||||
password_manager.current_fingerprint = (
|
||||
password_manager.fingerprint_manager.current_fingerprint
|
||||
)
|
||||
password_manager.is_dirty = False
|
||||
print(
|
||||
colored(
|
||||
f"Seed profile {selected_fingerprint} removed successfully.",
|
||||
@@ -305,7 +345,33 @@ def _display_live_stats(
|
||||
stats_mgr.reset()
|
||||
return
|
||||
|
||||
# Flush any pending input so an accidental newline doesn't exit immediately
|
||||
try: # pragma: no cover - depends on platform
|
||||
import termios
|
||||
|
||||
termios.tcflush(sys.stdin, termios.TCIFLUSH)
|
||||
except Exception:
|
||||
try: # pragma: no cover - Windows fallback
|
||||
import msvcrt
|
||||
|
||||
while msvcrt.kbhit():
|
||||
msvcrt.getwch()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
while True:
|
||||
# Break out immediately if the user has already pressed Enter
|
||||
try: # pragma: no cover - non-interactive environments
|
||||
import select
|
||||
|
||||
ready, _, _ = select.select([sys.stdin], [], [], 0)
|
||||
if ready:
|
||||
line = sys.stdin.readline().strip()
|
||||
if line == "" or line.lower() == "b":
|
||||
break
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if callable(sync_fn):
|
||||
try:
|
||||
sync_fn()
|
||||
@@ -404,34 +470,36 @@ def handle_post_to_nostr(
|
||||
|
||||
|
||||
def handle_retrieve_from_nostr(password_manager: PasswordManager):
|
||||
"""
|
||||
Handles the action of retrieving the encrypted password index from Nostr.
|
||||
"""
|
||||
"""Retrieve the encrypted password index from Nostr."""
|
||||
try:
|
||||
password_manager.nostr_client.fingerprint = password_manager.current_fingerprint
|
||||
result = asyncio.run(password_manager.nostr_client.fetch_latest_snapshot())
|
||||
if result:
|
||||
manifest, chunks = result
|
||||
encrypted = gzip.decompress(b"".join(chunks))
|
||||
if manifest.delta_since:
|
||||
version = int(manifest.delta_since)
|
||||
deltas = asyncio.run(
|
||||
password_manager.nostr_client.fetch_deltas_since(version)
|
||||
)
|
||||
if deltas:
|
||||
encrypted = deltas[-1]
|
||||
password_manager.encryption_manager.decrypt_and_save_index_from_nostr(
|
||||
encrypted
|
||||
)
|
||||
print(colored("Encrypted index retrieved and saved successfully.", "green"))
|
||||
logging.info("Encrypted index retrieved and saved successfully from Nostr.")
|
||||
else:
|
||||
password_manager.sync_index_from_nostr()
|
||||
if password_manager.nostr_client.last_error:
|
||||
msg = (
|
||||
f"No Nostr events found for fingerprint"
|
||||
f" {password_manager.current_fingerprint}."
|
||||
if "Snapshot not found" in password_manager.nostr_client.last_error
|
||||
else password_manager.nostr_client.last_error
|
||||
)
|
||||
print(colored(msg, "red"))
|
||||
logging.error(msg)
|
||||
else:
|
||||
try:
|
||||
legacy_pub = (
|
||||
password_manager.nostr_client.key_manager.generate_legacy_nostr_keys().public_key_hex()
|
||||
)
|
||||
if password_manager.nostr_client.keys.public_key_hex() == legacy_pub:
|
||||
note = "Restored index from legacy Nostr backup."
|
||||
print(colored(note, "yellow"))
|
||||
logging.info(note)
|
||||
except Exception:
|
||||
pass
|
||||
print(
|
||||
colored(
|
||||
"Encrypted index retrieved and saved successfully.",
|
||||
"green",
|
||||
)
|
||||
)
|
||||
logging.info("Encrypted index retrieved and saved successfully from Nostr.")
|
||||
except Exception as e:
|
||||
logging.error(f"Failed to retrieve from Nostr: {e}", exc_info=True)
|
||||
print(colored(f"Error: Failed to retrieve from Nostr: {e}", "red"))
|
||||
@@ -667,8 +735,7 @@ def handle_set_additional_backup_location(pm: PasswordManager) -> None:
|
||||
path = Path(value).expanduser()
|
||||
path.mkdir(parents=True, exist_ok=True)
|
||||
test_file = path / ".seedpass_write_test"
|
||||
with open(test_file, "w") as f:
|
||||
f.write("test")
|
||||
atomic_write(test_file, lambda f: f.write("test"))
|
||||
test_file.unlink()
|
||||
except Exception as e:
|
||||
print(colored(f"Path not writable: {e}", "red"))
|
||||
@@ -707,8 +774,18 @@ def handle_toggle_secret_mode(pm: PasswordManager) -> None:
|
||||
"""Toggle secret mode and adjust clipboard delay."""
|
||||
cfg = pm.config_manager
|
||||
if cfg is None:
|
||||
print(colored("Configuration manager unavailable.", "red"))
|
||||
return
|
||||
vault = getattr(pm, "vault", None)
|
||||
fingerprint_dir = getattr(pm, "fingerprint_dir", None)
|
||||
if vault is not None and fingerprint_dir is not None:
|
||||
try:
|
||||
cfg = pm.config_manager = ConfigManager(vault, fingerprint_dir)
|
||||
except Exception as exc:
|
||||
logging.error(f"Failed to initialize ConfigManager: {exc}")
|
||||
print(colored("Configuration manager unavailable.", "red"))
|
||||
return
|
||||
else:
|
||||
print(colored("Configuration manager unavailable.", "red"))
|
||||
return
|
||||
try:
|
||||
enabled = cfg.get_secret_mode_enabled()
|
||||
delay = cfg.get_clipboard_clear_delay()
|
||||
@@ -748,8 +825,18 @@ def handle_toggle_quick_unlock(pm: PasswordManager) -> None:
|
||||
"""Enable or disable Quick Unlock."""
|
||||
cfg = pm.config_manager
|
||||
if cfg is None:
|
||||
print(colored("Configuration manager unavailable.", "red"))
|
||||
return
|
||||
vault = getattr(pm, "vault", None)
|
||||
fingerprint_dir = getattr(pm, "fingerprint_dir", None)
|
||||
if vault is not None and fingerprint_dir is not None:
|
||||
try:
|
||||
cfg = pm.config_manager = ConfigManager(vault, fingerprint_dir)
|
||||
except Exception as exc:
|
||||
logging.error(f"Failed to initialize ConfigManager: {exc}")
|
||||
print(colored("Configuration manager unavailable.", "red"))
|
||||
return
|
||||
else:
|
||||
print(colored("Configuration manager unavailable.", "red"))
|
||||
return
|
||||
try:
|
||||
enabled = cfg.get_quick_unlock()
|
||||
except Exception as exc:
|
||||
@@ -775,8 +862,18 @@ def handle_toggle_offline_mode(pm: PasswordManager) -> None:
|
||||
"""Enable or disable offline mode."""
|
||||
cfg = pm.config_manager
|
||||
if cfg is None:
|
||||
print(colored("Configuration manager unavailable.", "red"))
|
||||
return
|
||||
vault = getattr(pm, "vault", None)
|
||||
fingerprint_dir = getattr(pm, "fingerprint_dir", None)
|
||||
if vault is not None and fingerprint_dir is not None:
|
||||
try:
|
||||
cfg = pm.config_manager = ConfigManager(vault, fingerprint_dir)
|
||||
except Exception as exc:
|
||||
logging.error(f"Failed to initialize ConfigManager: {exc}")
|
||||
print(colored("Configuration manager unavailable.", "red"))
|
||||
return
|
||||
else:
|
||||
print(colored("Configuration manager unavailable.", "red"))
|
||||
return
|
||||
try:
|
||||
enabled = cfg.get_offline_mode()
|
||||
except Exception as exc:
|
||||
@@ -930,7 +1027,16 @@ def handle_settings(password_manager: PasswordManager) -> None:
|
||||
elif choice == "2":
|
||||
handle_nostr_menu(password_manager)
|
||||
elif choice == "3":
|
||||
password_manager.change_password()
|
||||
try:
|
||||
old_pw = prompt_existing_password("Enter your current password: ")
|
||||
new_pw = prompt_new_password()
|
||||
password_manager.change_password(old_pw, new_pw)
|
||||
except ValueError:
|
||||
print(colored("Incorrect password.", "red"))
|
||||
except PasswordPromptError:
|
||||
pass
|
||||
except Exception as e:
|
||||
print(colored(f"Error: {e}", "red"))
|
||||
pause()
|
||||
elif choice == "4":
|
||||
password_manager.handle_verify_checksum()
|
||||
@@ -1008,6 +1114,7 @@ def display_menu(
|
||||
getattr(password_manager, "start_background_relay_check", lambda: None)()
|
||||
_display_live_stats(password_manager)
|
||||
while True:
|
||||
getattr(password_manager, "poll_background_errors", lambda: None)()
|
||||
fp, parent_fp, child_fp = getattr(
|
||||
password_manager,
|
||||
"header_fingerprint_args",
|
||||
@@ -1028,11 +1135,15 @@ def display_menu(
|
||||
getattr(password_manager, "start_background_relay_check", lambda: None)()
|
||||
continue
|
||||
# Periodically push updates to Nostr
|
||||
if (
|
||||
password_manager.is_dirty
|
||||
and time.time() - password_manager.last_update >= sync_interval
|
||||
):
|
||||
handle_post_to_nostr(password_manager)
|
||||
current_fp = getattr(password_manager, "current_fingerprint", None)
|
||||
if current_fp:
|
||||
if (
|
||||
password_manager.is_dirty
|
||||
and time.time() - password_manager.last_update >= sync_interval
|
||||
):
|
||||
handle_post_to_nostr(password_manager)
|
||||
password_manager.is_dirty = False
|
||||
else:
|
||||
password_manager.is_dirty = False
|
||||
|
||||
# Flush logging handlers
|
||||
@@ -1166,6 +1277,7 @@ def main(argv: list[str] | None = None, *, fingerprint: str | None = None) -> in
|
||||
Optional seed profile fingerprint to select automatically.
|
||||
"""
|
||||
configure_logging()
|
||||
_warn_missing_optional_dependencies()
|
||||
initialize_app()
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.info("Starting SeedPass Password Manager")
|
||||
@@ -1173,6 +1285,21 @@ def main(argv: list[str] | None = None, *, fingerprint: str | None = None) -> in
|
||||
load_global_config()
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--fingerprint")
|
||||
parser.add_argument(
|
||||
"--restore-backup",
|
||||
help="Restore index from backup file before starting",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-clipboard",
|
||||
action="store_true",
|
||||
help="Disable clipboard support and print secrets",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--max-prompt-attempts",
|
||||
type=int,
|
||||
default=None,
|
||||
help="Maximum number of password/seed prompt attempts (0 to disable)",
|
||||
)
|
||||
sub = parser.add_subparsers(dest="command")
|
||||
|
||||
exp = sub.add_parser("export")
|
||||
@@ -1192,6 +1319,44 @@ def main(argv: list[str] | None = None, *, fingerprint: str | None = None) -> in
|
||||
|
||||
args = parser.parse_args(argv)
|
||||
|
||||
if args.restore_backup:
|
||||
fp_target = args.fingerprint or fingerprint
|
||||
if fp_target is None:
|
||||
print(
|
||||
colored(
|
||||
"Error: --fingerprint is required when using --restore-backup.",
|
||||
"red",
|
||||
)
|
||||
)
|
||||
return 1
|
||||
try:
|
||||
restore_backup_index(Path(args.restore_backup), fp_target)
|
||||
logger.info("Restored backup from %s", args.restore_backup)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to restore backup: {e}", exc_info=True)
|
||||
print(colored(f"Error: Failed to restore backup: {e}", "red"))
|
||||
return 1
|
||||
elif args.command is None:
|
||||
print("Startup Options:")
|
||||
print("1. Continue")
|
||||
print("2. Restore from backup")
|
||||
choice = input("Select an option: ").strip()
|
||||
if choice == "2":
|
||||
path = input("Enter backup file path: ").strip()
|
||||
fp_target = args.fingerprint or fingerprint
|
||||
if fp_target is None:
|
||||
fp_target = input("Enter fingerprint for restore: ").strip()
|
||||
try:
|
||||
restore_backup_index(Path(path), fp_target)
|
||||
logger.info("Restored backup from %s", path)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to restore backup: {e}", exc_info=True)
|
||||
print(colored(f"Error: Failed to restore backup: {e}", "red"))
|
||||
return 1
|
||||
|
||||
if args.max_prompt_attempts is not None:
|
||||
os.environ["SEEDPASS_MAX_PROMPT_ATTEMPTS"] = str(args.max_prompt_attempts)
|
||||
|
||||
try:
|
||||
password_manager = PasswordManager(fingerprint=args.fingerprint or fingerprint)
|
||||
logger.info("PasswordManager initialized successfully.")
|
||||
@@ -1204,6 +1369,9 @@ def main(argv: list[str] | None = None, *, fingerprint: str | None = None) -> in
|
||||
print(colored(f"Error: Failed to initialize PasswordManager: {e}", "red"))
|
||||
return 1
|
||||
|
||||
if args.no_clipboard:
|
||||
password_manager.secret_mode_enabled = False
|
||||
|
||||
if args.command == "export":
|
||||
password_manager.handle_export_database(Path(args.file))
|
||||
return 0
|
||||
@@ -1247,15 +1415,22 @@ def main(argv: list[str] | None = None, *, fingerprint: str | None = None) -> in
|
||||
if entry.get("type") != EntryType.TOTP.value:
|
||||
print(colored("Entry is not a TOTP entry.", "red"))
|
||||
return 1
|
||||
code = password_manager.entry_manager.get_totp_code(
|
||||
idx, password_manager.parent_seed
|
||||
key = getattr(password_manager, "KEY_TOTP_DET", None) or getattr(
|
||||
password_manager, "parent_seed", None
|
||||
)
|
||||
code = password_manager.entry_manager.get_totp_code(idx, key)
|
||||
print(code)
|
||||
try:
|
||||
copy_to_clipboard(code, password_manager.clipboard_clear_delay)
|
||||
print(colored("Code copied to clipboard", "green"))
|
||||
except Exception as exc:
|
||||
logging.warning(f"Clipboard copy failed: {exc}")
|
||||
if copy_to_clipboard(code, password_manager.clipboard_clear_delay):
|
||||
print(colored("Code copied to clipboard", "green"))
|
||||
except ClipboardUnavailableError as exc:
|
||||
print(
|
||||
colored(
|
||||
f"Clipboard unavailable: {exc}\n"
|
||||
"Re-run with '--no-clipboard' to print codes instead.",
|
||||
"yellow",
|
||||
)
|
||||
)
|
||||
return 0
|
||||
|
||||
def signal_handler(sig, _frame):
|
||||
|
@@ -1,36 +1,39 @@
|
||||
# src/nostr/client.py
|
||||
|
||||
import asyncio
|
||||
import base64
|
||||
import json
|
||||
import logging
|
||||
import time
|
||||
from typing import List, Optional, Tuple, TYPE_CHECKING
|
||||
import hashlib
|
||||
import asyncio
|
||||
import gzip
|
||||
import threading
|
||||
import websockets
|
||||
from datetime import timedelta
|
||||
from typing import List, Optional, TYPE_CHECKING
|
||||
|
||||
# Imports from the nostr-sdk library
|
||||
import websockets
|
||||
from nostr_sdk import (
|
||||
Client,
|
||||
Keys,
|
||||
NostrSigner,
|
||||
EventBuilder,
|
||||
Filter,
|
||||
Kind,
|
||||
KindStandard,
|
||||
NostrSigner,
|
||||
Tag,
|
||||
RelayUrl,
|
||||
PublicKey,
|
||||
)
|
||||
from datetime import timedelta
|
||||
from nostr_sdk import EventId, Timestamp
|
||||
from nostr_sdk import EventId, Keys, Timestamp
|
||||
|
||||
from .key_manager import KeyManager as SeedPassKeyManager
|
||||
from .backup_models import Manifest, ChunkMeta, KIND_MANIFEST, KIND_SNAPSHOT_CHUNK
|
||||
from seedpass.core.encryption import EncryptionManager
|
||||
from constants import MAX_RETRIES, RETRY_DELAY
|
||||
from utils.file_lock import exclusive_lock
|
||||
from seedpass.core.encryption import EncryptionManager
|
||||
|
||||
from .backup_models import (
|
||||
ChunkMeta,
|
||||
KIND_DELTA,
|
||||
KIND_MANIFEST,
|
||||
KIND_SNAPSHOT_CHUNK,
|
||||
Manifest,
|
||||
)
|
||||
from .connection import ConnectionHandler, DEFAULT_RELAYS
|
||||
from .key_manager import KeyManager as SeedPassKeyManager
|
||||
from .snapshot import MANIFEST_ID_PREFIX, SnapshotHandler, prepare_snapshot
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover - imported for type hints
|
||||
from seedpass.core.config_manager import ConfigManager
|
||||
@@ -42,56 +45,8 @@ ClientBuilder = Client
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.setLevel(logging.WARNING)
|
||||
|
||||
DEFAULT_RELAYS = [
|
||||
"wss://relay.snort.social",
|
||||
"wss://nostr.oxtr.dev",
|
||||
"wss://relay.primal.net",
|
||||
]
|
||||
|
||||
# Identifier prefix for replaceable manifest events
|
||||
MANIFEST_ID_PREFIX = "seedpass-manifest-"
|
||||
|
||||
|
||||
def prepare_snapshot(
|
||||
encrypted_bytes: bytes, limit: int
|
||||
) -> Tuple[Manifest, list[bytes]]:
|
||||
"""Compress and split the encrypted vault into chunks.
|
||||
|
||||
Each chunk is hashed with SHA-256 and described in the returned
|
||||
:class:`Manifest`.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
encrypted_bytes : bytes
|
||||
The encrypted vault contents.
|
||||
limit : int
|
||||
Maximum chunk size in bytes.
|
||||
|
||||
Returns
|
||||
-------
|
||||
Tuple[Manifest, list[bytes]]
|
||||
The manifest describing all chunks and the list of chunk bytes.
|
||||
"""
|
||||
|
||||
compressed = gzip.compress(encrypted_bytes)
|
||||
chunks = [compressed[i : i + limit] for i in range(0, len(compressed), limit)]
|
||||
|
||||
metas: list[ChunkMeta] = []
|
||||
for i, chunk in enumerate(chunks):
|
||||
metas.append(
|
||||
ChunkMeta(
|
||||
id=f"seedpass-chunk-{i:04d}",
|
||||
size=len(chunk),
|
||||
hash=hashlib.sha256(chunk).hexdigest(),
|
||||
event_id=None,
|
||||
)
|
||||
)
|
||||
|
||||
manifest = Manifest(ver=1, algo="gzip", chunks=metas)
|
||||
return manifest, chunks
|
||||
|
||||
|
||||
class NostrClient:
|
||||
class NostrClient(ConnectionHandler, SnapshotHandler):
|
||||
"""Interact with the Nostr network using nostr-sdk."""
|
||||
|
||||
def __init__(
|
||||
@@ -151,540 +106,10 @@ class NostrClient:
|
||||
|
||||
self._connected = False
|
||||
|
||||
def connect(self) -> None:
|
||||
"""Connect the client to all configured relays."""
|
||||
if self.offline_mode or not self.relays:
|
||||
return
|
||||
if not self._connected:
|
||||
self.initialize_client_pool()
|
||||
|
||||
def initialize_client_pool(self) -> None:
|
||||
"""Add relays to the client and connect."""
|
||||
if self.offline_mode or not self.relays:
|
||||
return
|
||||
asyncio.run(self._initialize_client_pool())
|
||||
|
||||
async def _connect_async(self) -> None:
|
||||
"""Ensure the client is connected within an async context."""
|
||||
if self.offline_mode or not self.relays:
|
||||
return
|
||||
if not self._connected:
|
||||
await self._initialize_client_pool()
|
||||
|
||||
async def _initialize_client_pool(self) -> None:
|
||||
if self.offline_mode or not self.relays:
|
||||
return
|
||||
|
||||
formatted = []
|
||||
for relay in self.relays:
|
||||
if isinstance(relay, str):
|
||||
try:
|
||||
formatted.append(RelayUrl.parse(relay))
|
||||
except Exception:
|
||||
logger.error("Invalid relay URL: %s", relay)
|
||||
else:
|
||||
formatted.append(relay)
|
||||
|
||||
if hasattr(self.client, "add_relays"):
|
||||
await self.client.add_relays(formatted)
|
||||
else:
|
||||
for relay in formatted:
|
||||
await self.client.add_relay(relay)
|
||||
|
||||
await self.client.connect()
|
||||
self._connected = True
|
||||
logger.info("NostrClient connected to relays: %s", formatted)
|
||||
|
||||
async def _ping_relay(self, relay: str, timeout: float) -> bool:
|
||||
"""Attempt to retrieve the latest event from a single relay."""
|
||||
sub_id = "seedpass-health"
|
||||
pubkey = self.keys.public_key().to_hex()
|
||||
req = json.dumps(
|
||||
["REQ", sub_id, {"kinds": [1], "authors": [pubkey], "limit": 1}]
|
||||
)
|
||||
try:
|
||||
async with websockets.connect(
|
||||
relay, open_timeout=timeout, close_timeout=timeout
|
||||
) as ws:
|
||||
await ws.send(req)
|
||||
while True:
|
||||
msg = await asyncio.wait_for(ws.recv(), timeout=timeout)
|
||||
data = json.loads(msg)
|
||||
if data[0] in {"EVENT", "EOSE"}:
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
async def _check_relay_health(self, min_relays: int, timeout: float) -> int:
|
||||
tasks = [self._ping_relay(r, timeout) for r in self.relays]
|
||||
results = await asyncio.gather(*tasks, return_exceptions=True)
|
||||
healthy = sum(1 for r in results if r is True)
|
||||
if healthy < min_relays:
|
||||
logger.warning(
|
||||
"Only %s relays responded with data; consider adding more.", healthy
|
||||
)
|
||||
return healthy
|
||||
|
||||
def check_relay_health(self, min_relays: int = 2, timeout: float = 5.0) -> int:
|
||||
"""Ping relays and return the count of those providing data."""
|
||||
if self.offline_mode or not self.relays:
|
||||
return 0
|
||||
return asyncio.run(self._check_relay_health(min_relays, timeout))
|
||||
|
||||
def publish_json_to_nostr(
|
||||
self,
|
||||
encrypted_json: bytes,
|
||||
to_pubkey: str | None = None,
|
||||
alt_summary: str | None = None,
|
||||
) -> str | None:
|
||||
"""Builds and publishes a Kind 1 text note or direct message.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
encrypted_json : bytes
|
||||
The encrypted index data to publish.
|
||||
to_pubkey : str | None, optional
|
||||
If provided, send as a direct message to this public key.
|
||||
alt_summary : str | None, optional
|
||||
If provided, include an ``alt`` tag so uploads can be
|
||||
associated with a specific event like a password change.
|
||||
"""
|
||||
if self.offline_mode or not self.relays:
|
||||
return None
|
||||
self.connect()
|
||||
self.last_error = None
|
||||
try:
|
||||
content = base64.b64encode(encrypted_json).decode("utf-8")
|
||||
|
||||
if to_pubkey:
|
||||
receiver = PublicKey.parse(to_pubkey)
|
||||
event_output = self.client.send_private_msg_to(
|
||||
self.relays, receiver, content
|
||||
)
|
||||
else:
|
||||
builder = EventBuilder.text_note(content)
|
||||
if alt_summary:
|
||||
builder = builder.tags([Tag.alt(alt_summary)])
|
||||
event = builder.build(self.keys.public_key()).sign_with_keys(self.keys)
|
||||
event_output = self.publish_event(event)
|
||||
|
||||
event_id_hex = (
|
||||
event_output.id.to_hex()
|
||||
if hasattr(event_output, "id")
|
||||
else str(event_output)
|
||||
)
|
||||
logger.info(f"Successfully published event with ID: {event_id_hex}")
|
||||
return event_id_hex
|
||||
|
||||
except Exception as e:
|
||||
self.last_error = str(e)
|
||||
logger.error(f"Failed to publish JSON to Nostr: {e}")
|
||||
return None
|
||||
|
||||
def publish_event(self, event):
|
||||
"""Publish a prepared event to the configured relays."""
|
||||
if self.offline_mode or not self.relays:
|
||||
return None
|
||||
self.connect()
|
||||
return asyncio.run(self._publish_event(event))
|
||||
|
||||
async def _publish_event(self, event):
|
||||
if self.offline_mode or not self.relays:
|
||||
return None
|
||||
await self._connect_async()
|
||||
return await self.client.send_event(event)
|
||||
|
||||
def update_relays(self, new_relays: List[str]) -> None:
|
||||
"""Reconnect the client using a new set of relays."""
|
||||
self.close_client_pool()
|
||||
self.relays = new_relays
|
||||
signer = NostrSigner.keys(self.keys)
|
||||
self.client = Client(signer)
|
||||
self._connected = False
|
||||
# Immediately reconnect using the updated relay list
|
||||
self.initialize_client_pool()
|
||||
|
||||
def retrieve_json_from_nostr_sync(
|
||||
self, retries: int | None = None, delay: float | None = None
|
||||
) -> Optional[bytes]:
|
||||
"""Retrieve the latest Kind 1 event from the author with optional retries."""
|
||||
if self.offline_mode or not self.relays:
|
||||
return None
|
||||
|
||||
if retries is None or delay is None:
|
||||
if self.config_manager is None:
|
||||
from seedpass.core.config_manager import ConfigManager
|
||||
from seedpass.core.vault import Vault
|
||||
|
||||
cfg_mgr = ConfigManager(
|
||||
Vault(self.encryption_manager, self.fingerprint_dir),
|
||||
self.fingerprint_dir,
|
||||
)
|
||||
else:
|
||||
cfg_mgr = self.config_manager
|
||||
cfg = cfg_mgr.load_config(require_pin=False)
|
||||
retries = int(cfg.get("nostr_max_retries", MAX_RETRIES))
|
||||
delay = float(cfg.get("nostr_retry_delay", RETRY_DELAY))
|
||||
|
||||
self.connect()
|
||||
self.last_error = None
|
||||
for attempt in range(retries):
|
||||
try:
|
||||
result = asyncio.run(self._retrieve_json_from_nostr())
|
||||
if result is not None:
|
||||
return result
|
||||
except Exception as e:
|
||||
self.last_error = str(e)
|
||||
logger.error("Failed to retrieve events from Nostr: %s", e)
|
||||
if attempt < retries - 1:
|
||||
sleep_time = delay * (2**attempt)
|
||||
time.sleep(sleep_time)
|
||||
return None
|
||||
|
||||
async def _retrieve_json_from_nostr(self) -> Optional[bytes]:
|
||||
if self.offline_mode or not self.relays:
|
||||
return None
|
||||
await self._connect_async()
|
||||
# Filter for the latest text note (Kind 1) from our public key
|
||||
pubkey = self.keys.public_key()
|
||||
f = Filter().author(pubkey).kind(Kind.from_std(KindStandard.TEXT_NOTE)).limit(1)
|
||||
|
||||
timeout = timedelta(seconds=10)
|
||||
events = (await self.client.fetch_events(f, timeout)).to_vec()
|
||||
|
||||
if not events:
|
||||
self.last_error = "No events found on relays for this user."
|
||||
logger.warning(self.last_error)
|
||||
return None
|
||||
|
||||
latest_event = events[0]
|
||||
content_b64 = latest_event.content()
|
||||
|
||||
if content_b64:
|
||||
return base64.b64decode(content_b64.encode("utf-8"))
|
||||
self.last_error = "Latest event contained no content"
|
||||
return None
|
||||
|
||||
async def publish_snapshot(
|
||||
self, encrypted_bytes: bytes, limit: int = 50_000
|
||||
) -> tuple[Manifest, str]:
|
||||
"""Publish a compressed snapshot split into chunks.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
encrypted_bytes : bytes
|
||||
Vault contents already encrypted with the user's key.
|
||||
limit : int, optional
|
||||
Maximum chunk size in bytes. Defaults to 50 kB.
|
||||
"""
|
||||
|
||||
start = time.perf_counter()
|
||||
if self.offline_mode or not self.relays:
|
||||
return Manifest(ver=1, algo="gzip", chunks=[]), ""
|
||||
await self.ensure_manifest_is_current()
|
||||
await self._connect_async()
|
||||
manifest, chunks = prepare_snapshot(encrypted_bytes, limit)
|
||||
for meta, chunk in zip(manifest.chunks, chunks):
|
||||
content = base64.b64encode(chunk).decode("utf-8")
|
||||
builder = EventBuilder(Kind(KIND_SNAPSHOT_CHUNK), content).tags(
|
||||
[Tag.identifier(meta.id)]
|
||||
)
|
||||
event = builder.build(self.keys.public_key()).sign_with_keys(self.keys)
|
||||
result = await self.client.send_event(event)
|
||||
try:
|
||||
meta.event_id = (
|
||||
result.id.to_hex() if hasattr(result, "id") else str(result)
|
||||
)
|
||||
except Exception:
|
||||
meta.event_id = None
|
||||
|
||||
manifest_json = json.dumps(
|
||||
{
|
||||
"ver": manifest.ver,
|
||||
"algo": manifest.algo,
|
||||
"chunks": [meta.__dict__ for meta in manifest.chunks],
|
||||
"delta_since": manifest.delta_since,
|
||||
}
|
||||
)
|
||||
|
||||
manifest_identifier = f"{MANIFEST_ID_PREFIX}{self.fingerprint}"
|
||||
manifest_event = (
|
||||
EventBuilder(Kind(KIND_MANIFEST), manifest_json)
|
||||
.tags([Tag.identifier(manifest_identifier)])
|
||||
.build(self.keys.public_key())
|
||||
.sign_with_keys(self.keys)
|
||||
)
|
||||
await self.client.send_event(manifest_event)
|
||||
with self._state_lock:
|
||||
self.current_manifest = manifest
|
||||
self.current_manifest_id = manifest_identifier
|
||||
# Record when this snapshot was published for future delta events
|
||||
self.current_manifest.delta_since = int(time.time())
|
||||
self._delta_events = []
|
||||
if getattr(self, "verbose_timing", False):
|
||||
duration = time.perf_counter() - start
|
||||
logger.info("publish_snapshot completed in %.2f seconds", duration)
|
||||
return manifest, manifest_identifier
|
||||
|
||||
async def _fetch_chunks_with_retry(
|
||||
self, manifest_event
|
||||
) -> tuple[Manifest, list[bytes]] | None:
|
||||
"""Retrieve all chunks referenced by ``manifest_event`` with retries."""
|
||||
|
||||
pubkey = self.keys.public_key()
|
||||
timeout = timedelta(seconds=10)
|
||||
|
||||
try:
|
||||
data = json.loads(manifest_event.content())
|
||||
manifest = Manifest(
|
||||
ver=data["ver"],
|
||||
algo=data["algo"],
|
||||
chunks=[ChunkMeta(**c) for c in data["chunks"]],
|
||||
delta_since=(
|
||||
int(data["delta_since"])
|
||||
if data.get("delta_since") is not None
|
||||
else None
|
||||
),
|
||||
)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
if self.config_manager is None:
|
||||
from seedpass.core.config_manager import ConfigManager
|
||||
from seedpass.core.vault import Vault
|
||||
|
||||
cfg_mgr = ConfigManager(
|
||||
Vault(self.encryption_manager, self.fingerprint_dir),
|
||||
self.fingerprint_dir,
|
||||
)
|
||||
else:
|
||||
cfg_mgr = self.config_manager
|
||||
cfg = cfg_mgr.load_config(require_pin=False)
|
||||
max_retries = int(cfg.get("nostr_max_retries", MAX_RETRIES))
|
||||
delay = float(cfg.get("nostr_retry_delay", RETRY_DELAY))
|
||||
|
||||
chunks: list[bytes] = []
|
||||
for meta in manifest.chunks:
|
||||
chunk_bytes: bytes | None = None
|
||||
for attempt in range(max_retries):
|
||||
cf = Filter().author(pubkey).kind(Kind(KIND_SNAPSHOT_CHUNK))
|
||||
if meta.event_id:
|
||||
cf = cf.id(EventId.parse(meta.event_id))
|
||||
else:
|
||||
cf = cf.identifier(meta.id)
|
||||
cf = cf.limit(1)
|
||||
cev = (await self.client.fetch_events(cf, timeout)).to_vec()
|
||||
if cev:
|
||||
candidate = base64.b64decode(cev[0].content().encode("utf-8"))
|
||||
if hashlib.sha256(candidate).hexdigest() == meta.hash:
|
||||
chunk_bytes = candidate
|
||||
break
|
||||
if attempt < max_retries - 1:
|
||||
await asyncio.sleep(delay * (2**attempt))
|
||||
if chunk_bytes is None:
|
||||
return None
|
||||
chunks.append(chunk_bytes)
|
||||
|
||||
ident = None
|
||||
try:
|
||||
tags_obj = manifest_event.tags()
|
||||
ident = tags_obj.identifier()
|
||||
except Exception:
|
||||
tags = getattr(manifest_event, "tags", None)
|
||||
if callable(tags):
|
||||
tags = tags()
|
||||
if tags:
|
||||
tag = tags[0]
|
||||
if hasattr(tag, "as_vec"):
|
||||
vec = tag.as_vec()
|
||||
if vec and len(vec) >= 2:
|
||||
ident = vec[1]
|
||||
elif isinstance(tag, (list, tuple)) and len(tag) >= 2:
|
||||
ident = tag[1]
|
||||
elif isinstance(tag, str):
|
||||
ident = tag
|
||||
with self._state_lock:
|
||||
self.current_manifest = manifest
|
||||
self.current_manifest_id = ident
|
||||
return manifest, chunks
|
||||
|
||||
async def fetch_latest_snapshot(self) -> Tuple[Manifest, list[bytes]] | None:
|
||||
"""Retrieve the latest manifest and all snapshot chunks."""
|
||||
if self.offline_mode or not self.relays:
|
||||
return None
|
||||
await self._connect_async()
|
||||
|
||||
self.last_error = None
|
||||
pubkey = self.keys.public_key()
|
||||
ident = f"{MANIFEST_ID_PREFIX}{self.fingerprint}"
|
||||
f = Filter().author(pubkey).kind(Kind(KIND_MANIFEST)).identifier(ident).limit(1)
|
||||
timeout = timedelta(seconds=10)
|
||||
try:
|
||||
events = (await self.client.fetch_events(f, timeout)).to_vec()
|
||||
except Exception as e: # pragma: no cover - network errors
|
||||
self.last_error = str(e)
|
||||
logger.error(
|
||||
"Failed to fetch manifest from relays %s: %s",
|
||||
self.relays,
|
||||
e,
|
||||
)
|
||||
return None
|
||||
|
||||
if not events:
|
||||
return None
|
||||
|
||||
for manifest_event in events:
|
||||
try:
|
||||
result = await self._fetch_chunks_with_retry(manifest_event)
|
||||
if result is not None:
|
||||
return result
|
||||
except Exception as e: # pragma: no cover - network errors
|
||||
self.last_error = str(e)
|
||||
logger.error(
|
||||
"Error retrieving snapshot from relays %s: %s",
|
||||
self.relays,
|
||||
e,
|
||||
)
|
||||
|
||||
if self.last_error is None:
|
||||
self.last_error = "Snapshot not found on relays"
|
||||
|
||||
return None
|
||||
|
||||
async def ensure_manifest_is_current(self) -> None:
|
||||
"""Verify the local manifest is up to date before publishing."""
|
||||
if self.offline_mode or not self.relays:
|
||||
return
|
||||
await self._connect_async()
|
||||
pubkey = self.keys.public_key()
|
||||
ident = f"{MANIFEST_ID_PREFIX}{self.fingerprint}"
|
||||
f = Filter().author(pubkey).kind(Kind(KIND_MANIFEST)).identifier(ident).limit(1)
|
||||
timeout = timedelta(seconds=10)
|
||||
try:
|
||||
events = (await self.client.fetch_events(f, timeout)).to_vec()
|
||||
except Exception:
|
||||
return
|
||||
if not events:
|
||||
return
|
||||
try:
|
||||
data = json.loads(events[0].content())
|
||||
remote = data.get("delta_since")
|
||||
if remote is not None:
|
||||
remote = int(remote)
|
||||
except Exception:
|
||||
return
|
||||
with self._state_lock:
|
||||
local = self.current_manifest.delta_since if self.current_manifest else None
|
||||
if remote is not None and (local is None or remote > local):
|
||||
self.last_error = "Manifest out of date"
|
||||
raise RuntimeError("Manifest out of date")
|
||||
|
||||
async def publish_delta(self, delta_bytes: bytes, manifest_id: str) -> str:
|
||||
"""Publish a delta event referencing a manifest."""
|
||||
if self.offline_mode or not self.relays:
|
||||
return ""
|
||||
await self.ensure_manifest_is_current()
|
||||
await self._connect_async()
|
||||
|
||||
content = base64.b64encode(delta_bytes).decode("utf-8")
|
||||
tag = Tag.event(EventId.parse(manifest_id))
|
||||
builder = EventBuilder(Kind(KIND_DELTA), content).tags([tag])
|
||||
event = builder.build(self.keys.public_key()).sign_with_keys(self.keys)
|
||||
result = await self.client.send_event(event)
|
||||
delta_id = result.id.to_hex() if hasattr(result, "id") else str(result)
|
||||
created_at = getattr(
|
||||
event, "created_at", getattr(event, "timestamp", int(time.time()))
|
||||
)
|
||||
if hasattr(created_at, "secs"):
|
||||
created_at = created_at.secs
|
||||
manifest_event = None
|
||||
with self._state_lock:
|
||||
if self.current_manifest is not None:
|
||||
self.current_manifest.delta_since = int(created_at)
|
||||
manifest_json = json.dumps(
|
||||
{
|
||||
"ver": self.current_manifest.ver,
|
||||
"algo": self.current_manifest.algo,
|
||||
"chunks": [
|
||||
meta.__dict__ for meta in self.current_manifest.chunks
|
||||
],
|
||||
"delta_since": self.current_manifest.delta_since,
|
||||
}
|
||||
)
|
||||
manifest_event = (
|
||||
EventBuilder(Kind(KIND_MANIFEST), manifest_json)
|
||||
.tags([Tag.identifier(self.current_manifest_id)])
|
||||
.build(self.keys.public_key())
|
||||
.sign_with_keys(self.keys)
|
||||
)
|
||||
self._delta_events.append(delta_id)
|
||||
if manifest_event is not None:
|
||||
await self.client.send_event(manifest_event)
|
||||
return delta_id
|
||||
|
||||
async def fetch_deltas_since(self, version: int) -> list[bytes]:
|
||||
"""Retrieve delta events newer than the given version."""
|
||||
if self.offline_mode or not self.relays:
|
||||
return []
|
||||
await self._connect_async()
|
||||
|
||||
pubkey = self.keys.public_key()
|
||||
f = (
|
||||
Filter()
|
||||
.author(pubkey)
|
||||
.kind(Kind(KIND_DELTA))
|
||||
.since(Timestamp.from_secs(version))
|
||||
)
|
||||
timeout = timedelta(seconds=10)
|
||||
events = (await self.client.fetch_events(f, timeout)).to_vec()
|
||||
events.sort(
|
||||
key=lambda ev: getattr(ev, "created_at", getattr(ev, "timestamp", 0))
|
||||
)
|
||||
deltas: list[bytes] = []
|
||||
for ev in events:
|
||||
deltas.append(base64.b64decode(ev.content().encode("utf-8")))
|
||||
|
||||
manifest = self.get_current_manifest()
|
||||
if manifest is not None:
|
||||
snap_size = sum(c.size for c in manifest.chunks)
|
||||
if (
|
||||
len(deltas) >= self.delta_threshold
|
||||
or sum(len(d) for d in deltas) > snap_size
|
||||
):
|
||||
# Publish a new snapshot to consolidate deltas
|
||||
joined = b"".join(deltas)
|
||||
await self.publish_snapshot(joined)
|
||||
exp = Timestamp.from_secs(int(time.time()))
|
||||
for ev in events:
|
||||
exp_builder = EventBuilder(Kind(KIND_DELTA), ev.content()).tags(
|
||||
[Tag.expiration(exp)]
|
||||
)
|
||||
exp_event = exp_builder.build(
|
||||
self.keys.public_key()
|
||||
).sign_with_keys(self.keys)
|
||||
await self.client.send_event(exp_event)
|
||||
return deltas
|
||||
|
||||
def get_current_manifest(self) -> Manifest | None:
|
||||
"""Thread-safe access to ``current_manifest``."""
|
||||
with self._state_lock:
|
||||
return self.current_manifest
|
||||
|
||||
def get_current_manifest_id(self) -> str | None:
|
||||
"""Thread-safe access to ``current_manifest_id``."""
|
||||
with self._state_lock:
|
||||
return self.current_manifest_id
|
||||
|
||||
def get_delta_events(self) -> list[str]:
|
||||
"""Thread-safe snapshot of pending delta event IDs."""
|
||||
with self._state_lock:
|
||||
return list(self._delta_events)
|
||||
|
||||
def close_client_pool(self) -> None:
|
||||
"""Disconnects the client from all relays."""
|
||||
try:
|
||||
asyncio.run(self.client.disconnect())
|
||||
self._connected = False
|
||||
logger.info("NostrClient disconnected from relays.")
|
||||
except Exception as e:
|
||||
logger.error("Error during NostrClient shutdown: %s", e)
|
||||
__all__ = [
|
||||
"NostrClient",
|
||||
"prepare_snapshot",
|
||||
"DEFAULT_RELAYS",
|
||||
"MANIFEST_ID_PREFIX",
|
||||
]
|
||||
|
@@ -27,7 +27,8 @@ class Keys:
|
||||
|
||||
@staticmethod
|
||||
def hex_to_bech32(key_str: str, prefix: str = "npub") -> str:
|
||||
data = convertbits(bytes.fromhex(key_str), 8, 5)
|
||||
# Pad to align with 5-bit groups as expected for Bech32 encoding
|
||||
data = convertbits(bytes.fromhex(key_str), 8, 5, True)
|
||||
return bech32_encode(prefix, data)
|
||||
|
||||
@staticmethod
|
||||
|
232
src/nostr/connection.py
Normal file
232
src/nostr/connection.py
Normal file
@@ -0,0 +1,232 @@
|
||||
import asyncio
|
||||
import base64
|
||||
import json
|
||||
import logging
|
||||
from datetime import timedelta
|
||||
from typing import List, Optional
|
||||
|
||||
import websockets
|
||||
from . import client as nostr_client
|
||||
from constants import MAX_RETRIES, RETRY_DELAY
|
||||
|
||||
logger = logging.getLogger("nostr.client")
|
||||
logger.setLevel(logging.WARNING)
|
||||
|
||||
DEFAULT_RELAYS = [
|
||||
"wss://relay.snort.social",
|
||||
"wss://nostr.oxtr.dev",
|
||||
"wss://relay.primal.net",
|
||||
]
|
||||
|
||||
|
||||
class ConnectionHandler:
|
||||
"""Mixin providing relay connection and retry logic."""
|
||||
|
||||
async def connect(self) -> None:
|
||||
"""Connect the client to all configured relays."""
|
||||
if self.offline_mode or not self.relays:
|
||||
return
|
||||
if not getattr(self, "_connected", False):
|
||||
await self._initialize_client_pool()
|
||||
|
||||
def initialize_client_pool(self) -> None:
|
||||
"""Add relays to the client and connect."""
|
||||
if self.offline_mode or not self.relays:
|
||||
return
|
||||
asyncio.run(self._initialize_client_pool())
|
||||
|
||||
async def _connect_async(self) -> None:
|
||||
"""Ensure the client is connected within an async context."""
|
||||
if self.offline_mode or not self.relays:
|
||||
return
|
||||
if not getattr(self, "_connected", False):
|
||||
await self._initialize_client_pool()
|
||||
|
||||
async def _initialize_client_pool(self) -> None:
|
||||
if self.offline_mode or not self.relays:
|
||||
return
|
||||
|
||||
formatted = []
|
||||
for relay in self.relays:
|
||||
if isinstance(relay, str):
|
||||
try:
|
||||
formatted.append(nostr_client.RelayUrl.parse(relay))
|
||||
except Exception:
|
||||
logger.error("Invalid relay URL: %s", relay)
|
||||
else:
|
||||
formatted.append(relay)
|
||||
|
||||
if hasattr(self.client, "add_relays"):
|
||||
await self.client.add_relays(formatted)
|
||||
else:
|
||||
for relay in formatted:
|
||||
await self.client.add_relay(relay)
|
||||
|
||||
await self.client.connect()
|
||||
self._connected = True
|
||||
logger.info("NostrClient connected to relays: %s", formatted)
|
||||
|
||||
async def _ping_relay(self, relay: str, timeout: float) -> bool:
|
||||
"""Attempt to retrieve the latest event from a single relay."""
|
||||
sub_id = "seedpass-health"
|
||||
pubkey = self.keys.public_key().to_hex()
|
||||
req = json.dumps(
|
||||
[
|
||||
"REQ",
|
||||
sub_id,
|
||||
{"kinds": [1], "authors": [pubkey], "limit": 1},
|
||||
]
|
||||
)
|
||||
try:
|
||||
async with websockets.connect(
|
||||
relay, open_timeout=timeout, close_timeout=timeout
|
||||
) as ws:
|
||||
await ws.send(req)
|
||||
while True:
|
||||
msg = await asyncio.wait_for(ws.recv(), timeout=timeout)
|
||||
data = json.loads(msg)
|
||||
if data[0] in {"EVENT", "EOSE"}:
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
async def _check_relay_health(self, min_relays: int, timeout: float) -> int:
|
||||
tasks = [self._ping_relay(r, timeout) for r in self.relays]
|
||||
results = await asyncio.gather(*tasks, return_exceptions=True)
|
||||
healthy = sum(1 for r in results if r is True)
|
||||
if healthy < min_relays:
|
||||
logger.warning(
|
||||
"Only %s relays responded with data; consider adding more.", healthy
|
||||
)
|
||||
return healthy
|
||||
|
||||
def check_relay_health(self, min_relays: int = 2, timeout: float = 5.0) -> int:
|
||||
"""Ping relays and return the count of those providing data."""
|
||||
if self.offline_mode or not self.relays:
|
||||
return 0
|
||||
return asyncio.run(self._check_relay_health(min_relays, timeout))
|
||||
|
||||
async def publish_json_to_nostr(
|
||||
self,
|
||||
encrypted_json: bytes,
|
||||
to_pubkey: str | None = None,
|
||||
alt_summary: str | None = None,
|
||||
) -> str | None:
|
||||
"""Build and publish a Kind 1 text note or direct message."""
|
||||
if self.offline_mode or not self.relays:
|
||||
return None
|
||||
await self.connect()
|
||||
self.last_error = None
|
||||
try:
|
||||
content = base64.b64encode(encrypted_json).decode("utf-8")
|
||||
|
||||
if to_pubkey:
|
||||
receiver = nostr_client.PublicKey.parse(to_pubkey)
|
||||
event_output = self.client.send_private_msg_to(
|
||||
self.relays, receiver, content
|
||||
)
|
||||
else:
|
||||
builder = nostr_client.EventBuilder.text_note(content)
|
||||
if alt_summary:
|
||||
builder = builder.tags([nostr_client.Tag.alt(alt_summary)])
|
||||
event = builder.build(self.keys.public_key()).sign_with_keys(self.keys)
|
||||
event_output = await self.publish_event(event)
|
||||
|
||||
event_id_hex = (
|
||||
event_output.id.to_hex()
|
||||
if hasattr(event_output, "id")
|
||||
else str(event_output)
|
||||
)
|
||||
logger.info("Successfully published event with ID: %s", event_id_hex)
|
||||
return event_id_hex
|
||||
|
||||
except Exception as e:
|
||||
self.last_error = str(e)
|
||||
logger.error("Failed to publish JSON to Nostr: %s", e)
|
||||
return None
|
||||
|
||||
async def publish_event(self, event):
|
||||
"""Publish a prepared event to the configured relays."""
|
||||
if self.offline_mode or not self.relays:
|
||||
return None
|
||||
await self.connect()
|
||||
return await self.client.send_event(event)
|
||||
|
||||
def update_relays(self, new_relays: List[str]) -> None:
|
||||
"""Reconnect the client using a new set of relays."""
|
||||
self.close_client_pool()
|
||||
self.relays = new_relays
|
||||
signer = nostr_client.NostrSigner.keys(self.keys)
|
||||
self.client = nostr_client.Client(signer)
|
||||
self._connected = False
|
||||
self.initialize_client_pool()
|
||||
|
||||
async def retrieve_json_from_nostr(
|
||||
self, retries: int | None = None, delay: float | None = None
|
||||
) -> Optional[bytes]:
|
||||
"""Retrieve the latest Kind 1 event from the author with optional retries."""
|
||||
if self.offline_mode or not self.relays:
|
||||
return None
|
||||
|
||||
if retries is None or delay is None:
|
||||
if self.config_manager is None:
|
||||
from seedpass.core.config_manager import ConfigManager
|
||||
from seedpass.core.vault import Vault
|
||||
|
||||
cfg_mgr = ConfigManager(
|
||||
Vault(self.encryption_manager, self.fingerprint_dir),
|
||||
self.fingerprint_dir,
|
||||
)
|
||||
else:
|
||||
cfg_mgr = self.config_manager
|
||||
cfg = cfg_mgr.load_config(require_pin=False)
|
||||
retries = int(cfg.get("nostr_max_retries", MAX_RETRIES))
|
||||
delay = float(cfg.get("nostr_retry_delay", RETRY_DELAY))
|
||||
|
||||
await self.connect()
|
||||
self.last_error = None
|
||||
for attempt in range(retries):
|
||||
try:
|
||||
result = await self._retrieve_json_from_nostr()
|
||||
if result is not None:
|
||||
return result
|
||||
except Exception as e:
|
||||
self.last_error = str(e)
|
||||
logger.error("Failed to retrieve events from Nostr: %s", e)
|
||||
if attempt < retries - 1:
|
||||
sleep_time = delay * (2**attempt)
|
||||
await asyncio.sleep(sleep_time)
|
||||
return None
|
||||
|
||||
async def _retrieve_json_from_nostr(self) -> Optional[bytes]:
|
||||
if self.offline_mode or not self.relays:
|
||||
return None
|
||||
await self._connect_async()
|
||||
pubkey = self.keys.public_key()
|
||||
f = (
|
||||
nostr_client.Filter()
|
||||
.author(pubkey)
|
||||
.kind(nostr_client.Kind.from_std(nostr_client.KindStandard.TEXT_NOTE))
|
||||
.limit(1)
|
||||
)
|
||||
timeout = timedelta(seconds=10)
|
||||
events = (await self.client.fetch_events(f, timeout)).to_vec()
|
||||
if not events:
|
||||
self.last_error = "No events found on relays for this user."
|
||||
logger.warning(self.last_error)
|
||||
return None
|
||||
latest_event = events[0]
|
||||
content_b64 = latest_event.content()
|
||||
if content_b64:
|
||||
return base64.b64decode(content_b64.encode("utf-8"))
|
||||
self.last_error = "Latest event contained no content"
|
||||
return None
|
||||
|
||||
def close_client_pool(self) -> None:
|
||||
"""Disconnect the client from all relays."""
|
||||
try:
|
||||
asyncio.run(self.client.disconnect())
|
||||
self._connected = False
|
||||
logger.info("NostrClient disconnected from relays.")
|
||||
except Exception as e:
|
||||
logger.error("Error during NostrClient shutdown: %s", e)
|
@@ -2,16 +2,8 @@
|
||||
|
||||
import time
|
||||
import logging
|
||||
import traceback
|
||||
|
||||
try:
|
||||
from monstr.event.event import Event
|
||||
except ImportError: # pragma: no cover - optional dependency
|
||||
|
||||
class Event: # minimal placeholder for type hints when monstr is absent
|
||||
id: str
|
||||
created_at: int
|
||||
content: str
|
||||
from nostr_sdk import Event
|
||||
|
||||
|
||||
# Instantiate the logger
|
||||
@@ -27,26 +19,15 @@ class EventHandler:
|
||||
pass # Initialize if needed
|
||||
|
||||
def handle_new_event(self, evt: Event):
|
||||
"""
|
||||
Processes incoming events by logging their details.
|
||||
"""Process and log details from a Nostr event."""
|
||||
|
||||
:param evt: The received Event object.
|
||||
"""
|
||||
try:
|
||||
# Assuming evt.created_at is always an integer Unix timestamp
|
||||
if isinstance(evt.created_at, int):
|
||||
created_at_str = time.strftime(
|
||||
"%Y-%m-%d %H:%M:%S", time.gmtime(evt.created_at)
|
||||
)
|
||||
else:
|
||||
# Handle unexpected types gracefully
|
||||
created_at_str = str(evt.created_at)
|
||||
created_at = evt.created_at().as_secs()
|
||||
created_at_str = time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(created_at))
|
||||
event_id = evt.id().to_hex()
|
||||
|
||||
# Log the event details without extra newlines
|
||||
logger.info(
|
||||
f"[New Event] ID: {evt.id} | Created At: {created_at_str} | Content: {evt.content}"
|
||||
f"[New Event] ID: {event_id} | Created At: {created_at_str} | Content: {evt.content()}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error handling new event: {e}", exc_info=True)
|
||||
# Optionally, handle the exception without re-raising
|
||||
# For example, continue processing other events
|
||||
|
@@ -2,13 +2,16 @@
|
||||
|
||||
import hashlib
|
||||
import logging
|
||||
import traceback
|
||||
from bech32 import bech32_encode, convertbits
|
||||
|
||||
from local_bip85.bip85 import BIP85
|
||||
from bip_utils import Bip39SeedGenerator
|
||||
from .coincurve_keys import Keys
|
||||
|
||||
# BIP-85 application numbers for Nostr key derivation
|
||||
NOSTR_KEY_APP_ID = 1237
|
||||
LEGACY_NOSTR_KEY_APP_ID = 0
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -82,7 +85,8 @@ class KeyManager:
|
||||
# Derive entropy for Nostr key (32 bytes)
|
||||
entropy_bytes = self.bip85.derive_entropy(
|
||||
index=index,
|
||||
bytes_len=32, # Adjust parameter name and value as per your method signature
|
||||
entropy_bytes=32,
|
||||
app_no=NOSTR_KEY_APP_ID,
|
||||
)
|
||||
|
||||
# Generate Nostr key pair from entropy
|
||||
@@ -94,6 +98,17 @@ class KeyManager:
|
||||
logger.error(f"Failed to generate Nostr keys: {e}", exc_info=True)
|
||||
raise
|
||||
|
||||
def generate_legacy_nostr_keys(self) -> Keys:
|
||||
"""Derive Nostr keys using the legacy application ID."""
|
||||
try:
|
||||
entropy = self.bip85.derive_entropy(
|
||||
index=0, entropy_bytes=32, app_no=LEGACY_NOSTR_KEY_APP_ID
|
||||
)
|
||||
return Keys(priv_k=entropy.hex())
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to generate legacy Nostr keys: {e}", exc_info=True)
|
||||
raise
|
||||
|
||||
def get_public_key_hex(self) -> str:
|
||||
"""
|
||||
Returns the public key in hexadecimal format.
|
||||
|
@@ -1,41 +0,0 @@
|
||||
# nostr/logging_config.py
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
# Comment out or remove the configure_logging function to avoid conflicts
|
||||
# def configure_logging():
|
||||
# """
|
||||
# Configures logging with both file and console handlers.
|
||||
# Logs include the timestamp, log level, message, filename, and line number.
|
||||
# Only ERROR and higher-level messages are shown in the terminal, while all messages
|
||||
# are logged in the log file.
|
||||
# """
|
||||
# logger = logging.getLogger()
|
||||
# logger.setLevel(logging.DEBUG) # Set root logger to DEBUG
|
||||
#
|
||||
# # Prevent adding multiple handlers if configure_logging is called multiple times
|
||||
# if not logger.handlers:
|
||||
# # Create the 'logs' folder if it doesn't exist
|
||||
# log_directory = 'logs'
|
||||
# if not os.path.exists(log_directory):
|
||||
# os.makedirs(log_directory)
|
||||
#
|
||||
# # Create handlers
|
||||
# c_handler = logging.StreamHandler()
|
||||
# f_handler = logging.FileHandler(os.path.join(log_directory, 'app.log'))
|
||||
#
|
||||
# # Set levels: only errors and critical messages will be shown in the console
|
||||
# c_handler.setLevel(logging.ERROR)
|
||||
# f_handler.setLevel(logging.DEBUG)
|
||||
#
|
||||
# # Create formatters and add them to handlers, include file and line number in log messages
|
||||
# formatter = logging.Formatter(
|
||||
# '%(asctime)s [%(levelname)s] %(message)s [%(filename)s:%(lineno)d]'
|
||||
# )
|
||||
# c_handler.setFormatter(formatter)
|
||||
# f_handler.setFormatter(formatter)
|
||||
#
|
||||
# # Add handlers to the logger
|
||||
# logger.addHandler(c_handler)
|
||||
# logger.addHandler(f_handler)
|
425
src/nostr/snapshot.py
Normal file
425
src/nostr/snapshot.py
Normal file
@@ -0,0 +1,425 @@
|
||||
import asyncio
|
||||
import base64
|
||||
import gzip
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
import time
|
||||
from datetime import timedelta
|
||||
from typing import Tuple
|
||||
|
||||
from . import client as nostr_client
|
||||
|
||||
from constants import MAX_RETRIES, RETRY_DELAY
|
||||
|
||||
from .backup_models import (
|
||||
ChunkMeta,
|
||||
Manifest,
|
||||
KIND_DELTA,
|
||||
KIND_MANIFEST,
|
||||
KIND_SNAPSHOT_CHUNK,
|
||||
)
|
||||
|
||||
logger = logging.getLogger("nostr.client")
|
||||
logger.setLevel(logging.WARNING)
|
||||
|
||||
# Identifier prefix for replaceable manifest events
|
||||
MANIFEST_ID_PREFIX = "seedpass-manifest-"
|
||||
|
||||
|
||||
def prepare_snapshot(
|
||||
encrypted_bytes: bytes, limit: int
|
||||
) -> Tuple[Manifest, list[bytes]]:
|
||||
"""Compress and split the encrypted vault into chunks."""
|
||||
compressed = gzip.compress(encrypted_bytes)
|
||||
chunks = [compressed[i : i + limit] for i in range(0, len(compressed), limit)]
|
||||
metas: list[ChunkMeta] = []
|
||||
for i, chunk in enumerate(chunks):
|
||||
metas.append(
|
||||
ChunkMeta(
|
||||
id=f"seedpass-chunk-{i:04d}",
|
||||
size=len(chunk),
|
||||
hash=hashlib.sha256(chunk).hexdigest(),
|
||||
event_id=None,
|
||||
)
|
||||
)
|
||||
manifest = Manifest(ver=1, algo="gzip", chunks=metas)
|
||||
return manifest, chunks
|
||||
|
||||
|
||||
class SnapshotHandler:
|
||||
"""Mixin providing chunk and manifest handling."""
|
||||
|
||||
async def publish_snapshot(
|
||||
self, encrypted_bytes: bytes, limit: int = 50_000
|
||||
) -> tuple[Manifest, str]:
|
||||
start = time.perf_counter()
|
||||
if self.offline_mode or not self.relays:
|
||||
return Manifest(ver=1, algo="gzip", chunks=[]), ""
|
||||
await self.ensure_manifest_is_current()
|
||||
await self._connect_async()
|
||||
manifest, chunks = prepare_snapshot(encrypted_bytes, limit)
|
||||
|
||||
existing: dict[str, str] = {}
|
||||
if self.current_manifest:
|
||||
for old in self.current_manifest.chunks:
|
||||
if old.hash and old.event_id:
|
||||
existing[old.hash] = old.event_id
|
||||
|
||||
for meta, chunk in zip(manifest.chunks, chunks):
|
||||
cached_id = existing.get(meta.hash)
|
||||
if cached_id:
|
||||
meta.event_id = cached_id
|
||||
continue
|
||||
content = base64.b64encode(chunk).decode("utf-8")
|
||||
builder = nostr_client.EventBuilder(
|
||||
nostr_client.Kind(KIND_SNAPSHOT_CHUNK), content
|
||||
).tags([nostr_client.Tag.identifier(meta.id)])
|
||||
event = builder.build(self.keys.public_key()).sign_with_keys(self.keys)
|
||||
result = await self.client.send_event(event)
|
||||
try:
|
||||
meta.event_id = (
|
||||
result.id.to_hex() if hasattr(result, "id") else str(result)
|
||||
)
|
||||
except Exception:
|
||||
meta.event_id = None
|
||||
|
||||
manifest_json = json.dumps(
|
||||
{
|
||||
"ver": manifest.ver,
|
||||
"algo": manifest.algo,
|
||||
"chunks": [meta.__dict__ for meta in manifest.chunks],
|
||||
"delta_since": manifest.delta_since,
|
||||
}
|
||||
)
|
||||
|
||||
manifest_identifier = (
|
||||
self.current_manifest_id or f"{MANIFEST_ID_PREFIX}{self.fingerprint}"
|
||||
)
|
||||
manifest_event = (
|
||||
nostr_client.EventBuilder(nostr_client.Kind(KIND_MANIFEST), manifest_json)
|
||||
.tags([nostr_client.Tag.identifier(manifest_identifier)])
|
||||
.build(self.keys.public_key())
|
||||
.sign_with_keys(self.keys)
|
||||
)
|
||||
await self.client.send_event(manifest_event)
|
||||
with self._state_lock:
|
||||
self.current_manifest = manifest
|
||||
self.current_manifest_id = manifest_identifier
|
||||
self.current_manifest.delta_since = int(time.time())
|
||||
self._delta_events = []
|
||||
if getattr(self, "verbose_timing", False):
|
||||
duration = time.perf_counter() - start
|
||||
logger.info("publish_snapshot completed in %.2f seconds", duration)
|
||||
return manifest, manifest_identifier
|
||||
|
||||
async def _fetch_chunks_with_retry(
|
||||
self, manifest_event
|
||||
) -> tuple[Manifest, list[bytes]] | None:
|
||||
pubkey = self.keys.public_key()
|
||||
timeout = timedelta(seconds=10)
|
||||
try:
|
||||
data = json.loads(manifest_event.content())
|
||||
manifest = Manifest(
|
||||
ver=data["ver"],
|
||||
algo=data["algo"],
|
||||
chunks=[ChunkMeta(**c) for c in data["chunks"]],
|
||||
delta_since=(
|
||||
int(data["delta_since"])
|
||||
if data.get("delta_since") is not None
|
||||
else None
|
||||
),
|
||||
)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
if self.config_manager is None:
|
||||
from seedpass.core.config_manager import ConfigManager
|
||||
from seedpass.core.vault import Vault
|
||||
|
||||
cfg_mgr = ConfigManager(
|
||||
Vault(self.encryption_manager, self.fingerprint_dir),
|
||||
self.fingerprint_dir,
|
||||
)
|
||||
else:
|
||||
cfg_mgr = self.config_manager
|
||||
cfg = cfg_mgr.load_config(require_pin=False)
|
||||
max_retries = int(cfg.get("nostr_max_retries", MAX_RETRIES))
|
||||
delay = float(cfg.get("nostr_retry_delay", RETRY_DELAY))
|
||||
|
||||
chunks: list[bytes] = []
|
||||
for meta in manifest.chunks:
|
||||
chunk_bytes: bytes | None = None
|
||||
for attempt in range(max_retries):
|
||||
cf = (
|
||||
nostr_client.Filter()
|
||||
.author(pubkey)
|
||||
.kind(nostr_client.Kind(KIND_SNAPSHOT_CHUNK))
|
||||
)
|
||||
if meta.event_id:
|
||||
cf = cf.id(nostr_client.EventId.parse(meta.event_id))
|
||||
else:
|
||||
cf = cf.identifier(meta.id)
|
||||
cf = cf.limit(1)
|
||||
cev = (await self.client.fetch_events(cf, timeout)).to_vec()
|
||||
if cev:
|
||||
candidate = base64.b64decode(cev[0].content().encode("utf-8"))
|
||||
if hashlib.sha256(candidate).hexdigest() == meta.hash:
|
||||
chunk_bytes = candidate
|
||||
break
|
||||
if attempt < max_retries - 1:
|
||||
await asyncio.sleep(delay * (2**attempt))
|
||||
if chunk_bytes is None:
|
||||
return None
|
||||
chunks.append(chunk_bytes)
|
||||
|
||||
ident = None
|
||||
try:
|
||||
tags_obj = manifest_event.tags()
|
||||
ident = tags_obj.identifier()
|
||||
except Exception:
|
||||
tags = getattr(manifest_event, "tags", None)
|
||||
if callable(tags):
|
||||
tags = tags()
|
||||
if tags:
|
||||
tag = tags[0]
|
||||
if hasattr(tag, "as_vec"):
|
||||
vec = tag.as_vec()
|
||||
if vec and len(vec) >= 2:
|
||||
ident = vec[1]
|
||||
elif isinstance(tag, (list, tuple)) and len(tag) >= 2:
|
||||
ident = tag[1]
|
||||
elif isinstance(tag, str):
|
||||
ident = tag
|
||||
with self._state_lock:
|
||||
self.current_manifest = manifest
|
||||
self.current_manifest_id = ident
|
||||
return manifest, chunks
|
||||
|
||||
async def _fetch_manifest_with_keys(
|
||||
self, keys_obj: nostr_client.Keys
|
||||
) -> tuple[Manifest, list[bytes]] | None:
|
||||
"""Retrieve the manifest and chunks using ``keys_obj``."""
|
||||
self.keys = keys_obj
|
||||
pubkey = self.keys.public_key()
|
||||
timeout = timedelta(seconds=10)
|
||||
|
||||
ident = f"{MANIFEST_ID_PREFIX}{self.fingerprint}"
|
||||
f = (
|
||||
nostr_client.Filter()
|
||||
.author(pubkey)
|
||||
.kind(nostr_client.Kind(KIND_MANIFEST))
|
||||
.identifier(ident)
|
||||
.limit(1)
|
||||
)
|
||||
try:
|
||||
events = (await self.client.fetch_events(f, timeout)).to_vec()
|
||||
except Exception as e: # pragma: no cover - network errors
|
||||
self.last_error = str(e)
|
||||
logger.error(
|
||||
"Failed to fetch manifest from relays %s: %s",
|
||||
self.relays,
|
||||
e,
|
||||
)
|
||||
return None
|
||||
|
||||
if not events:
|
||||
ident = MANIFEST_ID_PREFIX.rstrip("-")
|
||||
f = (
|
||||
nostr_client.Filter()
|
||||
.author(pubkey)
|
||||
.kind(nostr_client.Kind(KIND_MANIFEST))
|
||||
.identifier(ident)
|
||||
.limit(1)
|
||||
)
|
||||
try:
|
||||
events = (await self.client.fetch_events(f, timeout)).to_vec()
|
||||
except Exception as e: # pragma: no cover - network errors
|
||||
self.last_error = str(e)
|
||||
logger.error(
|
||||
"Failed to fetch manifest from relays %s: %s",
|
||||
self.relays,
|
||||
e,
|
||||
)
|
||||
return None
|
||||
if not events:
|
||||
return None
|
||||
|
||||
logger.info("Fetched manifest using identifier %s", ident)
|
||||
|
||||
for manifest_event in events:
|
||||
try:
|
||||
result = await self._fetch_chunks_with_retry(manifest_event)
|
||||
if result is not None:
|
||||
return result
|
||||
except Exception as e: # pragma: no cover - network errors
|
||||
self.last_error = str(e)
|
||||
logger.error(
|
||||
"Error retrieving snapshot from relays %s: %s",
|
||||
self.relays,
|
||||
e,
|
||||
)
|
||||
return None
|
||||
|
||||
async def fetch_latest_snapshot(self) -> Tuple[Manifest, list[bytes]] | None:
|
||||
"""Retrieve the latest manifest and all snapshot chunks."""
|
||||
if self.offline_mode or not self.relays:
|
||||
return None
|
||||
await self._connect_async()
|
||||
self.last_error = None
|
||||
logger.debug("Searching for backup with current keys...")
|
||||
try:
|
||||
primary_keys = nostr_client.Keys.parse(
|
||||
self.key_manager.keys.private_key_hex()
|
||||
)
|
||||
except Exception:
|
||||
primary_keys = self.keys
|
||||
result = await self._fetch_manifest_with_keys(primary_keys)
|
||||
if result is not None:
|
||||
return result
|
||||
logger.warning(
|
||||
"No backup found with current keys. Falling back to legacy key derivation..."
|
||||
)
|
||||
try:
|
||||
legacy_keys = self.key_manager.generate_legacy_nostr_keys()
|
||||
legacy_sdk_keys = nostr_client.Keys.parse(legacy_keys.private_key_hex())
|
||||
except Exception as e:
|
||||
self.last_error = str(e)
|
||||
return None
|
||||
result = await self._fetch_manifest_with_keys(legacy_sdk_keys)
|
||||
if result is not None:
|
||||
logger.info("Found legacy backup with old key derivation.")
|
||||
return result
|
||||
if self.last_error is None:
|
||||
self.last_error = "No backup found on Nostr relays."
|
||||
return None
|
||||
|
||||
async def ensure_manifest_is_current(self) -> None:
|
||||
"""Verify the local manifest is up to date before publishing."""
|
||||
if self.offline_mode or not self.relays:
|
||||
return
|
||||
await self._connect_async()
|
||||
pubkey = self.keys.public_key()
|
||||
ident = self.current_manifest_id or f"{MANIFEST_ID_PREFIX}{self.fingerprint}"
|
||||
f = (
|
||||
nostr_client.Filter()
|
||||
.author(pubkey)
|
||||
.kind(nostr_client.Kind(KIND_MANIFEST))
|
||||
.identifier(ident)
|
||||
.limit(1)
|
||||
)
|
||||
timeout = timedelta(seconds=10)
|
||||
try:
|
||||
events = (await self.client.fetch_events(f, timeout)).to_vec()
|
||||
except Exception:
|
||||
return
|
||||
if not events:
|
||||
return
|
||||
try:
|
||||
data = json.loads(events[0].content())
|
||||
remote = data.get("delta_since")
|
||||
if remote is not None:
|
||||
remote = int(remote)
|
||||
except Exception:
|
||||
return
|
||||
with self._state_lock:
|
||||
local = self.current_manifest.delta_since if self.current_manifest else None
|
||||
if remote is not None and (local is None or remote > local):
|
||||
self.last_error = "Manifest out of date"
|
||||
raise RuntimeError("Manifest out of date")
|
||||
|
||||
async def publish_delta(self, delta_bytes: bytes, manifest_id: str) -> str:
|
||||
if self.offline_mode or not self.relays:
|
||||
return ""
|
||||
await self.ensure_manifest_is_current()
|
||||
await self._connect_async()
|
||||
content = base64.b64encode(delta_bytes).decode("utf-8")
|
||||
tag = nostr_client.Tag.event(nostr_client.EventId.parse(manifest_id))
|
||||
builder = nostr_client.EventBuilder(
|
||||
nostr_client.Kind(KIND_DELTA), content
|
||||
).tags([tag])
|
||||
event = builder.build(self.keys.public_key()).sign_with_keys(self.keys)
|
||||
result = await self.client.send_event(event)
|
||||
delta_id = result.id.to_hex() if hasattr(result, "id") else str(result)
|
||||
created_at = getattr(
|
||||
event, "created_at", getattr(event, "timestamp", int(time.time()))
|
||||
)
|
||||
if hasattr(created_at, "secs"):
|
||||
created_at = created_at.secs
|
||||
manifest_event = None
|
||||
with self._state_lock:
|
||||
if self.current_manifest is not None:
|
||||
self.current_manifest.delta_since = int(created_at)
|
||||
manifest_json = json.dumps(
|
||||
{
|
||||
"ver": self.current_manifest.ver,
|
||||
"algo": self.current_manifest.algo,
|
||||
"chunks": [
|
||||
meta.__dict__ for meta in self.current_manifest.chunks
|
||||
],
|
||||
"delta_since": self.current_manifest.delta_since,
|
||||
}
|
||||
)
|
||||
manifest_event = (
|
||||
nostr_client.EventBuilder(
|
||||
nostr_client.Kind(KIND_MANIFEST), manifest_json
|
||||
)
|
||||
.tags([nostr_client.Tag.identifier(self.current_manifest_id)])
|
||||
.build(self.keys.public_key())
|
||||
.sign_with_keys(self.keys)
|
||||
)
|
||||
self._delta_events.append(delta_id)
|
||||
if manifest_event is not None:
|
||||
await self.client.send_event(manifest_event)
|
||||
return delta_id
|
||||
|
||||
async def fetch_deltas_since(self, version: int) -> list[bytes]:
|
||||
if self.offline_mode or not self.relays:
|
||||
return []
|
||||
await self._connect_async()
|
||||
pubkey = self.keys.public_key()
|
||||
f = (
|
||||
nostr_client.Filter()
|
||||
.author(pubkey)
|
||||
.kind(nostr_client.Kind(KIND_DELTA))
|
||||
.since(nostr_client.Timestamp.from_secs(version))
|
||||
)
|
||||
timeout = timedelta(seconds=10)
|
||||
events = (await self.client.fetch_events(f, timeout)).to_vec()
|
||||
events.sort(
|
||||
key=lambda ev: getattr(ev, "created_at", getattr(ev, "timestamp", 0))
|
||||
)
|
||||
deltas: list[bytes] = []
|
||||
for ev in events:
|
||||
deltas.append(base64.b64decode(ev.content().encode("utf-8")))
|
||||
manifest = self.get_current_manifest()
|
||||
if manifest is not None:
|
||||
snap_size = sum(c.size for c in manifest.chunks)
|
||||
if (
|
||||
len(deltas) >= self.delta_threshold
|
||||
or sum(len(d) for d in deltas) > snap_size
|
||||
):
|
||||
joined = b"".join(deltas)
|
||||
await self.publish_snapshot(joined)
|
||||
exp = nostr_client.Timestamp.from_secs(int(time.time()))
|
||||
for ev in events:
|
||||
exp_builder = nostr_client.EventBuilder(
|
||||
nostr_client.Kind(KIND_DELTA), ev.content()
|
||||
).tags([nostr_client.Tag.expiration(exp)])
|
||||
exp_event = exp_builder.build(
|
||||
self.keys.public_key()
|
||||
).sign_with_keys(self.keys)
|
||||
await self.client.send_event(exp_event)
|
||||
return deltas
|
||||
|
||||
def get_current_manifest(self) -> Manifest | None:
|
||||
with self._state_lock:
|
||||
return self.current_manifest
|
||||
|
||||
def get_current_manifest_id(self) -> str | None:
|
||||
with self._state_lock:
|
||||
return self.current_manifest_id
|
||||
|
||||
def get_delta_events(self) -> list[str]:
|
||||
with self._state_lock:
|
||||
return list(self._delta_events)
|
@@ -1,8 +0,0 @@
|
||||
# nostr/utils.py
|
||||
|
||||
import logging
|
||||
|
||||
|
||||
# Example utility function (if any specific to nostr package)
|
||||
def some_helper_function():
|
||||
pass # Implement as needed
|
@@ -28,7 +28,6 @@ Generated on: 2025-04-06
|
||||
├── encryption_manager.py
|
||||
├── event_handler.py
|
||||
├── key_manager.py
|
||||
├── logging_config.py
|
||||
├── utils.py
|
||||
├── utils/
|
||||
├── __init__.py
|
||||
@@ -3082,52 +3081,6 @@ __all__ = ['NostrClient']
|
||||
|
||||
```
|
||||
|
||||
## nostr/logging_config.py
|
||||
```python
|
||||
# nostr/logging_config.py
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
# Comment out or remove the configure_logging function to avoid conflicts
|
||||
# def configure_logging():
|
||||
# """
|
||||
# Configures logging with both file and console handlers.
|
||||
# Logs include the timestamp, log level, message, filename, and line number.
|
||||
# Only ERROR and higher-level messages are shown in the terminal, while all messages
|
||||
# are logged in the log file.
|
||||
# """
|
||||
# logger = logging.getLogger()
|
||||
# logger.setLevel(logging.DEBUG) # Set root logger to DEBUG
|
||||
#
|
||||
# # Prevent adding multiple handlers if configure_logging is called multiple times
|
||||
# if not logger.handlers:
|
||||
# # Create the 'logs' folder if it doesn't exist
|
||||
# log_directory = 'logs'
|
||||
# if not os.path.exists(log_directory):
|
||||
# os.makedirs(log_directory)
|
||||
#
|
||||
# # Create handlers
|
||||
# c_handler = logging.StreamHandler()
|
||||
# f_handler = logging.FileHandler(os.path.join(log_directory, 'app.log'))
|
||||
#
|
||||
# # Set levels: only errors and critical messages will be shown in the console
|
||||
# c_handler.setLevel(logging.ERROR)
|
||||
# f_handler.setLevel(logging.DEBUG)
|
||||
#
|
||||
# # Create formatters and add them to handlers, include file and line number in log messages
|
||||
# formatter = logging.Formatter(
|
||||
# '%(asctime)s [%(levelname)s] %(message)s [%(filename)s:%(lineno)d]'
|
||||
# )
|
||||
# c_handler.setFormatter(formatter)
|
||||
# f_handler.setFormatter(formatter)
|
||||
#
|
||||
# # Add handlers to the logger
|
||||
# logger.addHandler(c_handler)
|
||||
# logger.addHandler(f_handler)
|
||||
|
||||
```
|
||||
|
||||
## nostr/event_handler.py
|
||||
```python
|
||||
# nostr/event_handler.py
|
||||
|
@@ -1,38 +1,42 @@
|
||||
colorama>=0.4.6
|
||||
termcolor>=1.1.0
|
||||
cryptography>=40.0.2
|
||||
bip-utils>=2.5.0
|
||||
bech32==1.2.0
|
||||
coincurve>=18.0.0
|
||||
mnemonic
|
||||
aiohttp>=3.12.14
|
||||
bcrypt
|
||||
pytest>=7.0
|
||||
pytest-cov
|
||||
pytest-xdist
|
||||
portalocker>=2.8
|
||||
nostr-sdk>=0.43
|
||||
websocket-client==1.7.0
|
||||
colorama>=0.4.6,<1
|
||||
termcolor>=1.1.0,<4
|
||||
cryptography>=40.0.2,<46
|
||||
bip-utils>=2.5.0,<3
|
||||
bech32>=1.2,<2
|
||||
coincurve>=18.0.0,<22
|
||||
mnemonic>=0.21,<1
|
||||
aiohttp>=3.9,<4
|
||||
bcrypt>=4,<5
|
||||
pytest>=7,<9
|
||||
pytest-cov>=4,<7
|
||||
pytest-xdist>=3,<4
|
||||
portalocker>=2.8,<4
|
||||
nostr-sdk>=0.43,<1
|
||||
websocket-client>=1.7,<2
|
||||
|
||||
websockets>=15.0.0
|
||||
tomli
|
||||
hypothesis
|
||||
mutmut==2.4.4
|
||||
pgpy==0.6.0
|
||||
pyotp>=2.8.0
|
||||
websockets>=15,<16
|
||||
tomli>=2,<3
|
||||
hypothesis>=6,<7
|
||||
mutmut>=2.4.4,<4
|
||||
pgpy>=0.6,<1
|
||||
pyotp>=2.8,<3
|
||||
|
||||
freezegun
|
||||
pyperclip
|
||||
qrcode>=8.2
|
||||
typer>=0.12.3
|
||||
fastapi>=0.116.1
|
||||
uvicorn>=0.35.0
|
||||
starlette>=0.47.2
|
||||
httpx>=0.28.1
|
||||
requests>=2.32
|
||||
python-multipart
|
||||
orjson
|
||||
argon2-cffi
|
||||
toga-core>=0.5.2
|
||||
pillow
|
||||
toga-dummy>=0.5.2 # for headless GUI tests
|
||||
freezegun>=1.5.4,<2
|
||||
typer>=0.12.3,<1
|
||||
|
||||
# Optional dependencies - install as needed for additional features
|
||||
pyperclip>=1.9,<2 # Clipboard support for secret mode
|
||||
qrcode>=8.2,<9 # Generate QR codes for TOTP setup
|
||||
fastapi>=0.110,<1 # API server
|
||||
uvicorn>=0.29,<1 # API server
|
||||
starlette>=0.47.2,<1 # API server
|
||||
httpx>=0.28.1,<1 # API server
|
||||
requests>=2.32,<3 # API server
|
||||
python-multipart>=0.0.20,<0.1 # API server file uploads
|
||||
PyJWT>=2.10.1,<3 # JWT authentication for API server
|
||||
orjson>=3.11.1,<4 # Fast JSON serialization for API server
|
||||
argon2-cffi>=21,<26 # Password hashing for API server
|
||||
toga-core>=0.5.2,<1 # Desktop GUI
|
||||
pillow>=11.3,<12 # Image support for GUI
|
||||
toga-dummy>=0.5.2,<1 # Headless GUI tests
|
||||
slowapi>=0.1.9,<1 # Rate limiting for API server
|
||||
|
@@ -1,30 +1,35 @@
|
||||
# Runtime dependencies for vendoring/packaging only
|
||||
# Generated from requirements.txt with all test-only packages removed
|
||||
colorama>=0.4.6
|
||||
termcolor>=1.1.0
|
||||
cryptography>=40.0.2
|
||||
bip-utils>=2.5.0
|
||||
bech32==1.2.0
|
||||
coincurve>=18.0.0
|
||||
mnemonic
|
||||
aiohttp>=3.12.14
|
||||
bcrypt
|
||||
portalocker>=2.8
|
||||
nostr-sdk>=0.43
|
||||
websocket-client==1.7.0
|
||||
colorama>=0.4.6,<1
|
||||
termcolor>=1.1.0,<4
|
||||
cryptography>=40.0.2,<46
|
||||
bip-utils>=2.5.0,<3
|
||||
bech32>=1.2,<2
|
||||
coincurve>=18.0.0,<22
|
||||
mnemonic>=0.21,<1
|
||||
aiohttp>=3.9,<4
|
||||
bcrypt>=4,<5
|
||||
portalocker>=2.8,<4
|
||||
nostr-sdk>=0.43,<1
|
||||
websocket-client>=1.7,<2
|
||||
|
||||
websockets>=15.0.0
|
||||
tomli
|
||||
pgpy==0.6.0
|
||||
pyotp>=2.8.0
|
||||
pyperclip
|
||||
qrcode>=8.2
|
||||
typer>=0.12.3
|
||||
fastapi>=0.116.0
|
||||
uvicorn>=0.35.0
|
||||
httpx>=0.28.1
|
||||
requests>=2.32
|
||||
python-multipart
|
||||
orjson
|
||||
argon2-cffi
|
||||
toga-core>=0.5.2
|
||||
websockets>=15,<16
|
||||
tomli>=2,<3
|
||||
pgpy>=0.6,<1
|
||||
pyotp>=2.8,<3
|
||||
pyperclip>=1.9,<2
|
||||
qrcode>=8.2,<9
|
||||
typer>=0.12.3,<1
|
||||
fastapi>=0.110,<1
|
||||
uvicorn>=0.29,<1
|
||||
starlette>=0.47.2,<1
|
||||
httpx>=0.28.1,<1
|
||||
requests>=2.32,<3
|
||||
python-multipart>=0.0.20,<0.1
|
||||
PyJWT>=2.10.1,<3
|
||||
orjson>=3.11.1,<4
|
||||
argon2-cffi>=21,<26
|
||||
toga-core>=0.5.2,<1
|
||||
pillow>=11.3,<12
|
||||
toga-dummy>=0.5.2,<1
|
||||
slowapi>=0.1.9,<1
|
||||
|
@@ -3,4 +3,3 @@ selected_directories:
|
||||
- utils/
|
||||
- nostr/
|
||||
- local_bip85/
|
||||
- password_manager/
|
||||
|
@@ -0,0 +1,3 @@
|
||||
"""SeedPass package initialization."""
|
||||
|
||||
# Optionally re-export selected symbols here.
|
||||
|
@@ -9,56 +9,95 @@ import secrets
|
||||
import queue
|
||||
from typing import Any, List, Optional
|
||||
|
||||
from datetime import datetime, timedelta, timezone
|
||||
import jwt
|
||||
import logging
|
||||
|
||||
from fastapi import FastAPI, Header, HTTPException, Request, Response
|
||||
from fastapi.concurrency import run_in_threadpool
|
||||
import asyncio
|
||||
import sys
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
import hashlib
|
||||
import hmac
|
||||
|
||||
from slowapi import Limiter, _rate_limit_exceeded_handler
|
||||
from slowapi.errors import RateLimitExceeded
|
||||
from slowapi.util import get_remote_address
|
||||
from slowapi.middleware import SlowAPIMiddleware
|
||||
|
||||
from seedpass.core.manager import PasswordManager
|
||||
from seedpass.core.entry_types import EntryType
|
||||
from seedpass.core.api import UtilityService
|
||||
|
||||
|
||||
_RATE_LIMIT = int(os.getenv("SEEDPASS_RATE_LIMIT", "100"))
|
||||
_RATE_WINDOW = int(os.getenv("SEEDPASS_RATE_WINDOW", "60"))
|
||||
_RATE_LIMIT_STR = f"{_RATE_LIMIT}/{_RATE_WINDOW} seconds"
|
||||
|
||||
limiter = Limiter(key_func=get_remote_address, default_limits=[_RATE_LIMIT_STR])
|
||||
app = FastAPI()
|
||||
|
||||
_pm: Optional[PasswordManager] = None
|
||||
_token: str = ""
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _check_token(auth: str | None) -> None:
|
||||
if auth != f"Bearer {_token}":
|
||||
def _get_pm(request: Request) -> PasswordManager:
|
||||
pm = getattr(request.app.state, "pm", None)
|
||||
assert pm is not None
|
||||
return pm
|
||||
|
||||
|
||||
def _check_token(request: Request, auth: str | None) -> None:
|
||||
if auth is None or not auth.startswith("Bearer "):
|
||||
raise HTTPException(status_code=401, detail="Unauthorized")
|
||||
token = auth.split(" ", 1)[1]
|
||||
jwt_secret = getattr(request.app.state, "jwt_secret", "")
|
||||
token_hash = getattr(request.app.state, "token_hash", "")
|
||||
try:
|
||||
jwt.decode(token, jwt_secret, algorithms=["HS256"])
|
||||
except jwt.ExpiredSignatureError:
|
||||
raise HTTPException(status_code=401, detail="Token expired")
|
||||
except jwt.InvalidTokenError:
|
||||
raise HTTPException(status_code=401, detail="Unauthorized")
|
||||
if not hmac.compare_digest(hashlib.sha256(token.encode()).hexdigest(), token_hash):
|
||||
raise HTTPException(status_code=401, detail="Unauthorized")
|
||||
|
||||
|
||||
def _reload_relays(relays: list[str]) -> None:
|
||||
def _reload_relays(request: Request, relays: list[str]) -> None:
|
||||
"""Reload the Nostr client with a new relay list."""
|
||||
assert _pm is not None
|
||||
pm = _get_pm(request)
|
||||
try:
|
||||
_pm.nostr_client.close_client_pool()
|
||||
except Exception:
|
||||
pass
|
||||
pm.nostr_client.close_client_pool()
|
||||
except (OSError, RuntimeError, ValueError) as exc:
|
||||
logger.warning("Failed to close NostrClient pool: %s", exc)
|
||||
try:
|
||||
_pm.nostr_client.relays = relays
|
||||
_pm.nostr_client.initialize_client_pool()
|
||||
except Exception:
|
||||
pass
|
||||
pm.nostr_client.relays = relays
|
||||
pm.nostr_client.initialize_client_pool()
|
||||
except (OSError, RuntimeError, ValueError) as exc:
|
||||
logger.error("Failed to initialize NostrClient with relays %s: %s", relays, exc)
|
||||
|
||||
|
||||
def start_server(fingerprint: str | None = None) -> str:
|
||||
"""Initialize global state and return the API token.
|
||||
"""Initialize global state and return a short-lived JWT token.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
fingerprint:
|
||||
Optional seed profile fingerprint to select before starting the server.
|
||||
"""
|
||||
global _pm, _token
|
||||
if fingerprint is None:
|
||||
_pm = PasswordManager()
|
||||
pm = PasswordManager()
|
||||
else:
|
||||
_pm = PasswordManager(fingerprint=fingerprint)
|
||||
_token = secrets.token_urlsafe(16)
|
||||
print(f"API token: {_token}")
|
||||
pm = PasswordManager(fingerprint=fingerprint)
|
||||
app.state.pm = pm
|
||||
app.state.jwt_secret = secrets.token_urlsafe(32)
|
||||
payload = {"exp": datetime.now(timezone.utc) + timedelta(minutes=5)}
|
||||
raw_token = jwt.encode(payload, app.state.jwt_secret, algorithm="HS256")
|
||||
app.state.token_hash = hashlib.sha256(raw_token.encode()).hexdigest()
|
||||
if not getattr(app.state, "limiter", None):
|
||||
app.state.limiter = limiter
|
||||
app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler)
|
||||
app.add_middleware(SlowAPIMiddleware)
|
||||
origins = [
|
||||
o.strip()
|
||||
for o in os.getenv("SEEDPASS_CORS_ORIGINS", "").split(",")
|
||||
@@ -71,14 +110,35 @@ def start_server(fingerprint: str | None = None) -> str:
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
return _token
|
||||
return raw_token
|
||||
|
||||
|
||||
def _require_password(request: Request, password: str | None) -> None:
|
||||
pm = _get_pm(request)
|
||||
if password is None or not pm.verify_password(password):
|
||||
raise HTTPException(status_code=401, detail="Invalid password")
|
||||
|
||||
|
||||
def _validate_encryption_path(request: Request, path: Path) -> Path:
|
||||
"""Validate and normalize ``path`` within the active fingerprint directory.
|
||||
|
||||
Returns the resolved absolute path if validation succeeds.
|
||||
"""
|
||||
|
||||
pm = _get_pm(request)
|
||||
try:
|
||||
return pm.encryption_manager.resolve_relative_path(path)
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
|
||||
@app.get("/api/v1/entry")
|
||||
def search_entry(query: str, authorization: str | None = Header(None)) -> List[Any]:
|
||||
_check_token(authorization)
|
||||
assert _pm is not None
|
||||
results = _pm.entry_manager.search_entries(query)
|
||||
async def search_entry(
|
||||
request: Request, query: str, authorization: str | None = Header(None)
|
||||
) -> List[Any]:
|
||||
_check_token(request, authorization)
|
||||
pm = _get_pm(request)
|
||||
results = await run_in_threadpool(pm.entry_manager.search_entries, query)
|
||||
return [
|
||||
{
|
||||
"id": idx,
|
||||
@@ -93,17 +153,24 @@ def search_entry(query: str, authorization: str | None = Header(None)) -> List[A
|
||||
|
||||
|
||||
@app.get("/api/v1/entry/{entry_id}")
|
||||
def get_entry(entry_id: int, authorization: str | None = Header(None)) -> Any:
|
||||
_check_token(authorization)
|
||||
assert _pm is not None
|
||||
entry = _pm.entry_manager.retrieve_entry(entry_id)
|
||||
async def get_entry(
|
||||
request: Request,
|
||||
entry_id: int,
|
||||
authorization: str | None = Header(None),
|
||||
password: str | None = Header(None, alias="X-SeedPass-Password"),
|
||||
) -> Any:
|
||||
_check_token(request, authorization)
|
||||
_require_password(request, password)
|
||||
pm = _get_pm(request)
|
||||
entry = await run_in_threadpool(pm.entry_manager.retrieve_entry, entry_id)
|
||||
if entry is None:
|
||||
raise HTTPException(status_code=404, detail="Not found")
|
||||
return entry
|
||||
|
||||
|
||||
@app.post("/api/v1/entry")
|
||||
def create_entry(
|
||||
async def create_entry(
|
||||
request: Request,
|
||||
entry: dict,
|
||||
authorization: str | None = Header(None),
|
||||
) -> dict[str, Any]:
|
||||
@@ -113,8 +180,8 @@ def create_entry(
|
||||
on, the corresponding entry type is created. When omitted or set to
|
||||
``password`` the behaviour matches the legacy password-entry API.
|
||||
"""
|
||||
_check_token(authorization)
|
||||
assert _pm is not None
|
||||
_check_token(request, authorization)
|
||||
pm = _get_pm(request)
|
||||
|
||||
etype = (entry.get("type") or entry.get("kind") or "password").lower()
|
||||
|
||||
@@ -130,7 +197,9 @@ def create_entry(
|
||||
"min_special",
|
||||
]
|
||||
kwargs = {k: entry.get(k) for k in policy_keys if entry.get(k) is not None}
|
||||
index = _pm.entry_manager.add_entry(
|
||||
|
||||
index = await run_in_threadpool(
|
||||
pm.entry_manager.add_entry,
|
||||
entry.get("label"),
|
||||
int(entry.get("length", 12)),
|
||||
entry.get("username"),
|
||||
@@ -140,10 +209,12 @@ def create_entry(
|
||||
return {"id": index}
|
||||
|
||||
if etype == "totp":
|
||||
index = _pm.entry_manager.get_next_index()
|
||||
uri = _pm.entry_manager.add_totp(
|
||||
index = await run_in_threadpool(pm.entry_manager.get_next_index)
|
||||
|
||||
uri = await run_in_threadpool(
|
||||
pm.entry_manager.add_totp,
|
||||
entry.get("label"),
|
||||
_pm.parent_seed,
|
||||
pm.parent_seed,
|
||||
secret=entry.get("secret"),
|
||||
index=entry.get("index"),
|
||||
period=int(entry.get("period", 30)),
|
||||
@@ -154,9 +225,10 @@ def create_entry(
|
||||
return {"id": index, "uri": uri}
|
||||
|
||||
if etype == "ssh":
|
||||
index = _pm.entry_manager.add_ssh_key(
|
||||
index = await run_in_threadpool(
|
||||
pm.entry_manager.add_ssh_key,
|
||||
entry.get("label"),
|
||||
_pm.parent_seed,
|
||||
pm.parent_seed,
|
||||
index=entry.get("index"),
|
||||
notes=entry.get("notes", ""),
|
||||
archived=entry.get("archived", False),
|
||||
@@ -164,9 +236,10 @@ def create_entry(
|
||||
return {"id": index}
|
||||
|
||||
if etype == "pgp":
|
||||
index = _pm.entry_manager.add_pgp_key(
|
||||
index = await run_in_threadpool(
|
||||
pm.entry_manager.add_pgp_key,
|
||||
entry.get("label"),
|
||||
_pm.parent_seed,
|
||||
pm.parent_seed,
|
||||
index=entry.get("index"),
|
||||
key_type=entry.get("key_type", "ed25519"),
|
||||
user_id=entry.get("user_id", ""),
|
||||
@@ -176,9 +249,10 @@ def create_entry(
|
||||
return {"id": index}
|
||||
|
||||
if etype == "nostr":
|
||||
index = _pm.entry_manager.add_nostr_key(
|
||||
index = await run_in_threadpool(
|
||||
pm.entry_manager.add_nostr_key,
|
||||
entry.get("label"),
|
||||
_pm.parent_seed,
|
||||
pm.parent_seed,
|
||||
index=entry.get("index"),
|
||||
notes=entry.get("notes", ""),
|
||||
archived=entry.get("archived", False),
|
||||
@@ -186,7 +260,8 @@ def create_entry(
|
||||
return {"id": index}
|
||||
|
||||
if etype == "key_value":
|
||||
index = _pm.entry_manager.add_key_value(
|
||||
index = await run_in_threadpool(
|
||||
pm.entry_manager.add_key_value,
|
||||
entry.get("label"),
|
||||
entry.get("key"),
|
||||
entry.get("value"),
|
||||
@@ -196,13 +271,14 @@ def create_entry(
|
||||
|
||||
if etype in {"seed", "managed_account"}:
|
||||
func = (
|
||||
_pm.entry_manager.add_seed
|
||||
pm.entry_manager.add_seed
|
||||
if etype == "seed"
|
||||
else _pm.entry_manager.add_managed_account
|
||||
else pm.entry_manager.add_managed_account
|
||||
)
|
||||
index = func(
|
||||
index = await run_in_threadpool(
|
||||
func,
|
||||
entry.get("label"),
|
||||
_pm.parent_seed,
|
||||
pm.parent_seed,
|
||||
index=entry.get("index"),
|
||||
notes=entry.get("notes", ""),
|
||||
)
|
||||
@@ -213,6 +289,7 @@ def create_entry(
|
||||
|
||||
@app.put("/api/v1/entry/{entry_id}")
|
||||
def update_entry(
|
||||
request: Request,
|
||||
entry_id: int,
|
||||
entry: dict,
|
||||
authorization: str | None = Header(None),
|
||||
@@ -222,10 +299,10 @@ def update_entry(
|
||||
Additional fields like ``period``, ``digits`` and ``value`` are forwarded for
|
||||
specialized entry types (e.g. TOTP or key/value entries).
|
||||
"""
|
||||
_check_token(authorization)
|
||||
assert _pm is not None
|
||||
_check_token(request, authorization)
|
||||
pm = _get_pm(request)
|
||||
try:
|
||||
_pm.entry_manager.modify_entry(
|
||||
pm.entry_manager.modify_entry(
|
||||
entry_id,
|
||||
username=entry.get("username"),
|
||||
url=entry.get("url"),
|
||||
@@ -243,31 +320,34 @@ def update_entry(
|
||||
|
||||
@app.post("/api/v1/entry/{entry_id}/archive")
|
||||
def archive_entry(
|
||||
entry_id: int, authorization: str | None = Header(None)
|
||||
request: Request, entry_id: int, authorization: str | None = Header(None)
|
||||
) -> dict[str, str]:
|
||||
"""Archive an entry."""
|
||||
_check_token(authorization)
|
||||
assert _pm is not None
|
||||
_pm.entry_manager.archive_entry(entry_id)
|
||||
_check_token(request, authorization)
|
||||
pm = _get_pm(request)
|
||||
pm.entry_manager.archive_entry(entry_id)
|
||||
return {"status": "archived"}
|
||||
|
||||
|
||||
@app.post("/api/v1/entry/{entry_id}/unarchive")
|
||||
def unarchive_entry(
|
||||
entry_id: int, authorization: str | None = Header(None)
|
||||
request: Request, entry_id: int, authorization: str | None = Header(None)
|
||||
) -> dict[str, str]:
|
||||
"""Restore an archived entry."""
|
||||
_check_token(authorization)
|
||||
assert _pm is not None
|
||||
_pm.entry_manager.restore_entry(entry_id)
|
||||
_check_token(request, authorization)
|
||||
pm = _get_pm(request)
|
||||
pm.entry_manager.restore_entry(entry_id)
|
||||
return {"status": "active"}
|
||||
|
||||
|
||||
@app.get("/api/v1/config/{key}")
|
||||
def get_config(key: str, authorization: str | None = Header(None)) -> Any:
|
||||
_check_token(authorization)
|
||||
assert _pm is not None
|
||||
value = _pm.config_manager.load_config(require_pin=False).get(key)
|
||||
def get_config(
|
||||
request: Request, key: str, authorization: str | None = Header(None)
|
||||
) -> Any:
|
||||
_check_token(request, authorization)
|
||||
pm = _get_pm(request)
|
||||
value = pm.config_manager.load_config(require_pin=False).get(key)
|
||||
|
||||
if value is None:
|
||||
raise HTTPException(status_code=404, detail="Not found")
|
||||
return {"key": key, "value": value}
|
||||
@@ -275,12 +355,15 @@ def get_config(key: str, authorization: str | None = Header(None)) -> Any:
|
||||
|
||||
@app.put("/api/v1/config/{key}")
|
||||
def update_config(
|
||||
key: str, data: dict, authorization: str | None = Header(None)
|
||||
request: Request,
|
||||
key: str,
|
||||
data: dict,
|
||||
authorization: str | None = Header(None),
|
||||
) -> dict[str, str]:
|
||||
"""Update a configuration setting."""
|
||||
_check_token(authorization)
|
||||
assert _pm is not None
|
||||
cfg = _pm.config_manager
|
||||
_check_token(request, authorization)
|
||||
pm = _get_pm(request)
|
||||
cfg = pm.config_manager
|
||||
mapping = {
|
||||
"relays": lambda v: cfg.set_relays(v, require_pin=False),
|
||||
"pin": cfg.set_pin,
|
||||
@@ -293,6 +376,7 @@ def update_config(
|
||||
}
|
||||
|
||||
action = mapping.get(key)
|
||||
|
||||
if action is None:
|
||||
raise HTTPException(status_code=400, detail="Unknown key")
|
||||
|
||||
@@ -305,84 +389,105 @@ def update_config(
|
||||
|
||||
@app.post("/api/v1/secret-mode")
|
||||
def set_secret_mode(
|
||||
data: dict, authorization: str | None = Header(None)
|
||||
request: Request, data: dict, authorization: str | None = Header(None)
|
||||
) -> dict[str, str]:
|
||||
"""Enable/disable secret mode and set the clipboard delay."""
|
||||
_check_token(authorization)
|
||||
assert _pm is not None
|
||||
_check_token(request, authorization)
|
||||
pm = _get_pm(request)
|
||||
enabled = data.get("enabled")
|
||||
|
||||
delay = data.get("delay")
|
||||
|
||||
if enabled is None or delay is None:
|
||||
raise HTTPException(status_code=400, detail="Missing fields")
|
||||
cfg = _pm.config_manager
|
||||
cfg = pm.config_manager
|
||||
cfg.set_secret_mode_enabled(bool(enabled))
|
||||
cfg.set_clipboard_clear_delay(int(delay))
|
||||
_pm.secret_mode_enabled = bool(enabled)
|
||||
_pm.clipboard_clear_delay = int(delay)
|
||||
pm.secret_mode_enabled = bool(enabled)
|
||||
pm.clipboard_clear_delay = int(delay)
|
||||
return {"status": "ok"}
|
||||
|
||||
|
||||
@app.get("/api/v1/fingerprint")
|
||||
def list_fingerprints(authorization: str | None = Header(None)) -> List[str]:
|
||||
_check_token(authorization)
|
||||
assert _pm is not None
|
||||
return _pm.fingerprint_manager.list_fingerprints()
|
||||
def list_fingerprints(
|
||||
request: Request, authorization: str | None = Header(None)
|
||||
) -> List[str]:
|
||||
_check_token(request, authorization)
|
||||
pm = _get_pm(request)
|
||||
return pm.fingerprint_manager.list_fingerprints()
|
||||
|
||||
|
||||
@app.post("/api/v1/fingerprint")
|
||||
def add_fingerprint(authorization: str | None = Header(None)) -> dict[str, str]:
|
||||
def add_fingerprint(
|
||||
request: Request, authorization: str | None = Header(None)
|
||||
) -> dict[str, str]:
|
||||
"""Create a new seed profile."""
|
||||
_check_token(authorization)
|
||||
assert _pm is not None
|
||||
_pm.add_new_fingerprint()
|
||||
_check_token(request, authorization)
|
||||
pm = _get_pm(request)
|
||||
pm.add_new_fingerprint()
|
||||
return {"status": "ok"}
|
||||
|
||||
|
||||
@app.delete("/api/v1/fingerprint/{fingerprint}")
|
||||
def remove_fingerprint(
|
||||
fingerprint: str, authorization: str | None = Header(None)
|
||||
request: Request, fingerprint: str, authorization: str | None = Header(None)
|
||||
) -> dict[str, str]:
|
||||
"""Remove a seed profile."""
|
||||
_check_token(authorization)
|
||||
assert _pm is not None
|
||||
_pm.fingerprint_manager.remove_fingerprint(fingerprint)
|
||||
_check_token(request, authorization)
|
||||
pm = _get_pm(request)
|
||||
pm.fingerprint_manager.remove_fingerprint(fingerprint)
|
||||
return {"status": "deleted"}
|
||||
|
||||
|
||||
@app.post("/api/v1/fingerprint/select")
|
||||
def select_fingerprint(
|
||||
data: dict, authorization: str | None = Header(None)
|
||||
request: Request, data: dict, authorization: str | None = Header(None)
|
||||
) -> dict[str, str]:
|
||||
"""Switch the active seed profile."""
|
||||
_check_token(authorization)
|
||||
assert _pm is not None
|
||||
_check_token(request, authorization)
|
||||
pm = _get_pm(request)
|
||||
fp = data.get("fingerprint")
|
||||
|
||||
if not fp:
|
||||
raise HTTPException(status_code=400, detail="Missing fingerprint")
|
||||
_pm.select_fingerprint(fp)
|
||||
pm.select_fingerprint(fp)
|
||||
return {"status": "ok"}
|
||||
|
||||
|
||||
@app.get("/api/v1/totp/export")
|
||||
def export_totp(authorization: str | None = Header(None)) -> dict:
|
||||
def export_totp(
|
||||
request: Request,
|
||||
authorization: str | None = Header(None),
|
||||
password: str | None = Header(None, alias="X-SeedPass-Password"),
|
||||
) -> dict:
|
||||
"""Return all stored TOTP entries in JSON format."""
|
||||
_check_token(authorization)
|
||||
assert _pm is not None
|
||||
return _pm.entry_manager.export_totp_entries(_pm.parent_seed)
|
||||
_check_token(request, authorization)
|
||||
_require_password(request, password)
|
||||
pm = _get_pm(request)
|
||||
key = getattr(pm, "KEY_TOTP_DET", None) or getattr(pm, "parent_seed", None)
|
||||
return pm.entry_manager.export_totp_entries(key)
|
||||
|
||||
|
||||
@app.get("/api/v1/totp")
|
||||
def get_totp_codes(authorization: str | None = Header(None)) -> dict:
|
||||
def get_totp_codes(
|
||||
request: Request,
|
||||
authorization: str | None = Header(None),
|
||||
password: str | None = Header(None, alias="X-SeedPass-Password"),
|
||||
) -> dict:
|
||||
"""Return active TOTP codes with remaining seconds."""
|
||||
_check_token(authorization)
|
||||
assert _pm is not None
|
||||
entries = _pm.entry_manager.list_entries(
|
||||
filter_kind=EntryType.TOTP.value, include_archived=False
|
||||
_check_token(request, authorization)
|
||||
_require_password(request, password)
|
||||
pm = _get_pm(request)
|
||||
entries = pm.entry_manager.list_entries(
|
||||
filter_kinds=[EntryType.TOTP.value], include_archived=False
|
||||
)
|
||||
codes = []
|
||||
for idx, label, _u, _url, _arch in entries:
|
||||
code = _pm.entry_manager.get_totp_code(idx, _pm.parent_seed)
|
||||
rem = _pm.entry_manager.get_totp_time_remaining(idx)
|
||||
key = getattr(pm, "KEY_TOTP_DET", None) or getattr(pm, "parent_seed", None)
|
||||
code = pm.entry_manager.get_totp_code(idx, key)
|
||||
|
||||
rem = pm.entry_manager.get_totp_time_remaining(idx)
|
||||
|
||||
codes.append(
|
||||
{"id": idx, "label": label, "code": code, "seconds_remaining": rem}
|
||||
)
|
||||
@@ -390,132 +495,138 @@ def get_totp_codes(authorization: str | None = Header(None)) -> dict:
|
||||
|
||||
|
||||
@app.get("/api/v1/stats")
|
||||
def get_profile_stats(authorization: str | None = Header(None)) -> dict:
|
||||
def get_profile_stats(
|
||||
request: Request, authorization: str | None = Header(None)
|
||||
) -> dict:
|
||||
"""Return statistics about the active seed profile."""
|
||||
_check_token(authorization)
|
||||
assert _pm is not None
|
||||
return _pm.get_profile_stats()
|
||||
_check_token(request, authorization)
|
||||
pm = _get_pm(request)
|
||||
return pm.get_profile_stats()
|
||||
|
||||
|
||||
@app.get("/api/v1/notifications")
|
||||
def get_notifications(authorization: str | None = Header(None)) -> List[dict]:
|
||||
def get_notifications(
|
||||
request: Request, authorization: str | None = Header(None)
|
||||
) -> List[dict]:
|
||||
"""Return and clear queued notifications."""
|
||||
_check_token(authorization)
|
||||
assert _pm is not None
|
||||
_check_token(request, authorization)
|
||||
pm = _get_pm(request)
|
||||
notes = []
|
||||
while True:
|
||||
try:
|
||||
note = _pm.notifications.get_nowait()
|
||||
note = pm.notifications.get_nowait()
|
||||
except queue.Empty:
|
||||
break
|
||||
notes.append({"level": note.level, "message": note.message})
|
||||
return notes
|
||||
|
||||
|
||||
@app.get("/api/v1/parent-seed")
|
||||
def get_parent_seed(
|
||||
authorization: str | None = Header(None), file: str | None = None
|
||||
) -> dict:
|
||||
"""Return the parent seed or save it as an encrypted backup."""
|
||||
_check_token(authorization)
|
||||
assert _pm is not None
|
||||
if file:
|
||||
path = Path(file)
|
||||
_pm.encryption_manager.encrypt_and_save_file(
|
||||
_pm.parent_seed.encode("utf-8"), path
|
||||
)
|
||||
return {"status": "saved", "path": str(path)}
|
||||
return {"seed": _pm.parent_seed}
|
||||
|
||||
|
||||
@app.get("/api/v1/nostr/pubkey")
|
||||
def get_nostr_pubkey(authorization: str | None = Header(None)) -> Any:
|
||||
_check_token(authorization)
|
||||
assert _pm is not None
|
||||
return {"npub": _pm.nostr_client.key_manager.get_npub()}
|
||||
def get_nostr_pubkey(request: Request, authorization: str | None = Header(None)) -> Any:
|
||||
_check_token(request, authorization)
|
||||
pm = _get_pm(request)
|
||||
return {"npub": pm.nostr_client.key_manager.get_npub()}
|
||||
|
||||
|
||||
@app.get("/api/v1/relays")
|
||||
def list_relays(authorization: str | None = Header(None)) -> dict:
|
||||
def list_relays(request: Request, authorization: str | None = Header(None)) -> dict:
|
||||
"""Return the configured Nostr relays."""
|
||||
_check_token(authorization)
|
||||
assert _pm is not None
|
||||
cfg = _pm.config_manager.load_config(require_pin=False)
|
||||
_check_token(request, authorization)
|
||||
pm = _get_pm(request)
|
||||
cfg = pm.config_manager.load_config(require_pin=False)
|
||||
return {"relays": cfg.get("relays", [])}
|
||||
|
||||
|
||||
@app.post("/api/v1/relays")
|
||||
def add_relay(data: dict, authorization: str | None = Header(None)) -> dict[str, str]:
|
||||
def add_relay(
|
||||
request: Request, data: dict, authorization: str | None = Header(None)
|
||||
) -> dict[str, str]:
|
||||
"""Add a relay URL to the configuration."""
|
||||
_check_token(authorization)
|
||||
assert _pm is not None
|
||||
_check_token(request, authorization)
|
||||
pm = _get_pm(request)
|
||||
url = data.get("url")
|
||||
|
||||
if not url:
|
||||
raise HTTPException(status_code=400, detail="Missing url")
|
||||
cfg = _pm.config_manager.load_config(require_pin=False)
|
||||
cfg = pm.config_manager.load_config(require_pin=False)
|
||||
relays = cfg.get("relays", [])
|
||||
|
||||
if url in relays:
|
||||
raise HTTPException(status_code=400, detail="Relay already present")
|
||||
relays.append(url)
|
||||
_pm.config_manager.set_relays(relays, require_pin=False)
|
||||
_reload_relays(relays)
|
||||
pm.config_manager.set_relays(relays, require_pin=False)
|
||||
_reload_relays(request, relays)
|
||||
return {"status": "ok"}
|
||||
|
||||
|
||||
@app.delete("/api/v1/relays/{idx}")
|
||||
def remove_relay(idx: int, authorization: str | None = Header(None)) -> dict[str, str]:
|
||||
def remove_relay(
|
||||
request: Request, idx: int, authorization: str | None = Header(None)
|
||||
) -> dict[str, str]:
|
||||
"""Remove a relay by its index (1-based)."""
|
||||
_check_token(authorization)
|
||||
assert _pm is not None
|
||||
cfg = _pm.config_manager.load_config(require_pin=False)
|
||||
_check_token(request, authorization)
|
||||
pm = _get_pm(request)
|
||||
cfg = pm.config_manager.load_config(require_pin=False)
|
||||
relays = cfg.get("relays", [])
|
||||
|
||||
if not (1 <= idx <= len(relays)):
|
||||
raise HTTPException(status_code=400, detail="Invalid index")
|
||||
if len(relays) == 1:
|
||||
raise HTTPException(status_code=400, detail="At least one relay required")
|
||||
relays.pop(idx - 1)
|
||||
_pm.config_manager.set_relays(relays, require_pin=False)
|
||||
_reload_relays(relays)
|
||||
pm.config_manager.set_relays(relays, require_pin=False)
|
||||
_reload_relays(request, relays)
|
||||
return {"status": "ok"}
|
||||
|
||||
|
||||
@app.post("/api/v1/relays/reset")
|
||||
def reset_relays(authorization: str | None = Header(None)) -> dict[str, str]:
|
||||
def reset_relays(
|
||||
request: Request, authorization: str | None = Header(None)
|
||||
) -> dict[str, str]:
|
||||
"""Reset relay list to defaults."""
|
||||
_check_token(authorization)
|
||||
assert _pm is not None
|
||||
_check_token(request, authorization)
|
||||
pm = _get_pm(request)
|
||||
from nostr.client import DEFAULT_RELAYS
|
||||
|
||||
relays = list(DEFAULT_RELAYS)
|
||||
_pm.config_manager.set_relays(relays, require_pin=False)
|
||||
_reload_relays(relays)
|
||||
pm.config_manager.set_relays(relays, require_pin=False)
|
||||
_reload_relays(request, relays)
|
||||
return {"status": "ok"}
|
||||
|
||||
|
||||
@app.post("/api/v1/checksum/verify")
|
||||
def verify_checksum(authorization: str | None = Header(None)) -> dict[str, str]:
|
||||
def verify_checksum(
|
||||
request: Request, authorization: str | None = Header(None)
|
||||
) -> dict[str, str]:
|
||||
"""Verify the SeedPass script checksum."""
|
||||
_check_token(authorization)
|
||||
assert _pm is not None
|
||||
_pm.handle_verify_checksum()
|
||||
_check_token(request, authorization)
|
||||
pm = _get_pm(request)
|
||||
pm.handle_verify_checksum()
|
||||
return {"status": "ok"}
|
||||
|
||||
|
||||
@app.post("/api/v1/checksum/update")
|
||||
def update_checksum(authorization: str | None = Header(None)) -> dict[str, str]:
|
||||
def update_checksum(
|
||||
request: Request, authorization: str | None = Header(None)
|
||||
) -> dict[str, str]:
|
||||
"""Regenerate the script checksum file."""
|
||||
_check_token(authorization)
|
||||
assert _pm is not None
|
||||
_pm.handle_update_script_checksum()
|
||||
_check_token(request, authorization)
|
||||
pm = _get_pm(request)
|
||||
pm.handle_update_script_checksum()
|
||||
return {"status": "ok"}
|
||||
|
||||
|
||||
@app.post("/api/v1/vault/export")
|
||||
def export_vault(authorization: str | None = Header(None)):
|
||||
def export_vault(
|
||||
request: Request,
|
||||
authorization: str | None = Header(None),
|
||||
password: str | None = Header(None, alias="X-SeedPass-Password"),
|
||||
):
|
||||
"""Export the vault and return the encrypted file."""
|
||||
_check_token(authorization)
|
||||
assert _pm is not None
|
||||
path = _pm.handle_export_database()
|
||||
_check_token(request, authorization)
|
||||
_require_password(request, password)
|
||||
pm = _get_pm(request)
|
||||
path = pm.handle_export_database()
|
||||
if path is None:
|
||||
raise HTTPException(status_code=500, detail="Export failed")
|
||||
data = Path(path).read_bytes()
|
||||
@@ -527,13 +638,15 @@ async def import_vault(
|
||||
request: Request, authorization: str | None = Header(None)
|
||||
) -> dict[str, str]:
|
||||
"""Import a vault backup from a file upload or a server path."""
|
||||
_check_token(authorization)
|
||||
assert _pm is not None
|
||||
_check_token(request, authorization)
|
||||
pm = _get_pm(request)
|
||||
|
||||
ctype = request.headers.get("content-type", "")
|
||||
|
||||
if ctype.startswith("multipart/form-data"):
|
||||
form = await request.form()
|
||||
file = form.get("file")
|
||||
|
||||
if file is None:
|
||||
raise HTTPException(status_code=400, detail="Missing file")
|
||||
data = await file.read()
|
||||
@@ -541,54 +654,75 @@ async def import_vault(
|
||||
tmp.write(data)
|
||||
tmp_path = Path(tmp.name)
|
||||
try:
|
||||
_pm.handle_import_database(tmp_path)
|
||||
pm.handle_import_database(tmp_path)
|
||||
finally:
|
||||
os.unlink(tmp_path)
|
||||
else:
|
||||
body = await request.json()
|
||||
path = body.get("path")
|
||||
if not path:
|
||||
path_str = body.get("path")
|
||||
|
||||
if not path_str:
|
||||
raise HTTPException(status_code=400, detail="Missing file or path")
|
||||
_pm.handle_import_database(Path(path))
|
||||
_pm.sync_vault()
|
||||
|
||||
path = _validate_encryption_path(request, Path(path_str))
|
||||
if not str(path).endswith(".json.enc"):
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Selected file must be a '.json.enc' backup",
|
||||
)
|
||||
|
||||
pm.handle_import_database(path)
|
||||
pm.sync_vault()
|
||||
return {"status": "ok"}
|
||||
|
||||
|
||||
@app.post("/api/v1/vault/backup-parent-seed")
|
||||
def backup_parent_seed(
|
||||
data: dict | None = None, authorization: str | None = Header(None)
|
||||
request: Request,
|
||||
data: dict,
|
||||
authorization: str | None = Header(None),
|
||||
password: str | None = Header(None, alias="X-SeedPass-Password"),
|
||||
) -> dict[str, str]:
|
||||
"""Backup and reveal the parent seed."""
|
||||
_check_token(authorization)
|
||||
assert _pm is not None
|
||||
path = None
|
||||
if data is not None:
|
||||
p = data.get("path")
|
||||
if p:
|
||||
path = Path(p)
|
||||
_pm.handle_backup_reveal_parent_seed(path)
|
||||
return {"status": "ok"}
|
||||
"""Create an encrypted backup of the parent seed after confirmation."""
|
||||
_check_token(request, authorization)
|
||||
_require_password(request, password)
|
||||
pm = _get_pm(request)
|
||||
|
||||
if not data.get("confirm"):
|
||||
|
||||
raise HTTPException(status_code=400, detail="Confirmation required")
|
||||
|
||||
path_str = data.get("path")
|
||||
|
||||
if not path_str:
|
||||
raise HTTPException(status_code=400, detail="Missing path")
|
||||
path = Path(path_str)
|
||||
_validate_encryption_path(request, path)
|
||||
pm.encryption_manager.encrypt_and_save_file(pm.parent_seed.encode("utf-8"), path)
|
||||
return {"status": "saved", "path": str(path)}
|
||||
|
||||
|
||||
@app.post("/api/v1/change-password")
|
||||
def change_password(
|
||||
data: dict, authorization: str | None = Header(None)
|
||||
request: Request, data: dict, authorization: str | None = Header(None)
|
||||
) -> dict[str, str]:
|
||||
"""Change the master password for the active profile."""
|
||||
_check_token(authorization)
|
||||
assert _pm is not None
|
||||
_pm.change_password(data.get("old", ""), data.get("new", ""))
|
||||
_check_token(request, authorization)
|
||||
pm = _get_pm(request)
|
||||
pm.change_password(data.get("old", ""), data.get("new", ""))
|
||||
|
||||
return {"status": "ok"}
|
||||
|
||||
|
||||
@app.post("/api/v1/password")
|
||||
def generate_password(
|
||||
data: dict, authorization: str | None = Header(None)
|
||||
request: Request, data: dict, authorization: str | None = Header(None)
|
||||
) -> dict[str, str]:
|
||||
"""Generate a password using optional policy overrides."""
|
||||
_check_token(authorization)
|
||||
assert _pm is not None
|
||||
_check_token(request, authorization)
|
||||
pm = _get_pm(request)
|
||||
length = int(data.get("length", 12))
|
||||
|
||||
policy_keys = [
|
||||
"include_special_chars",
|
||||
"allowed_special_chars",
|
||||
@@ -600,22 +734,28 @@ def generate_password(
|
||||
"min_special",
|
||||
]
|
||||
kwargs = {k: data.get(k) for k in policy_keys if data.get(k) is not None}
|
||||
util = UtilityService(_pm)
|
||||
|
||||
util = UtilityService(pm)
|
||||
password = util.generate_password(length, **kwargs)
|
||||
return {"password": password}
|
||||
|
||||
|
||||
@app.post("/api/v1/vault/lock")
|
||||
def lock_vault(authorization: str | None = Header(None)) -> dict[str, str]:
|
||||
def lock_vault(
|
||||
request: Request, authorization: str | None = Header(None)
|
||||
) -> dict[str, str]:
|
||||
"""Lock the vault and clear sensitive data from memory."""
|
||||
_check_token(authorization)
|
||||
assert _pm is not None
|
||||
_pm.lock_vault()
|
||||
_check_token(request, authorization)
|
||||
pm = _get_pm(request)
|
||||
pm.lock_vault()
|
||||
return {"status": "locked"}
|
||||
|
||||
|
||||
@app.post("/api/v1/shutdown")
|
||||
async def shutdown_server(authorization: str | None = Header(None)) -> dict[str, str]:
|
||||
_check_token(authorization)
|
||||
async def shutdown_server(
|
||||
request: Request, authorization: str | None = Header(None)
|
||||
) -> dict[str, str]:
|
||||
_check_token(request, authorization)
|
||||
asyncio.get_event_loop().call_soon(sys.exit, 0)
|
||||
|
||||
return {"status": "shutting down"}
|
||||
|
@@ -1,878 +0,0 @@
|
||||
from pathlib import Path
|
||||
from typing import Optional, List
|
||||
import json
|
||||
|
||||
import typer
|
||||
import sys
|
||||
|
||||
from seedpass.core.manager import PasswordManager
|
||||
from seedpass.core.entry_types import EntryType
|
||||
from seedpass.core.api import (
|
||||
VaultService,
|
||||
ProfileService,
|
||||
SyncService,
|
||||
EntryService,
|
||||
ConfigService,
|
||||
UtilityService,
|
||||
NostrService,
|
||||
ChangePasswordRequest,
|
||||
UnlockRequest,
|
||||
BackupParentSeedRequest,
|
||||
ProfileSwitchRequest,
|
||||
ProfileRemoveRequest,
|
||||
)
|
||||
import uvicorn
|
||||
from . import api as api_module
|
||||
|
||||
import importlib
|
||||
import importlib.util
|
||||
import subprocess
|
||||
|
||||
app = typer.Typer(
|
||||
help="SeedPass command line interface",
|
||||
invoke_without_command=True,
|
||||
)
|
||||
|
||||
# Global option shared across all commands
|
||||
fingerprint_option = typer.Option(
|
||||
None,
|
||||
"--fingerprint",
|
||||
"-f",
|
||||
help="Specify which seed profile to use",
|
||||
)
|
||||
|
||||
# Sub command groups
|
||||
entry_app = typer.Typer(help="Manage individual entries")
|
||||
vault_app = typer.Typer(help="Manage the entire vault")
|
||||
nostr_app = typer.Typer(help="Interact with Nostr relays")
|
||||
config_app = typer.Typer(help="Get or set configuration values")
|
||||
fingerprint_app = typer.Typer(help="Manage seed profiles")
|
||||
util_app = typer.Typer(help="Utility commands")
|
||||
api_app = typer.Typer(help="Run the API server")
|
||||
|
||||
app.add_typer(entry_app, name="entry")
|
||||
app.add_typer(vault_app, name="vault")
|
||||
app.add_typer(nostr_app, name="nostr")
|
||||
app.add_typer(config_app, name="config")
|
||||
app.add_typer(fingerprint_app, name="fingerprint")
|
||||
app.add_typer(util_app, name="util")
|
||||
app.add_typer(api_app, name="api")
|
||||
|
||||
|
||||
def _get_pm(ctx: typer.Context) -> PasswordManager:
|
||||
"""Return a PasswordManager optionally selecting a fingerprint."""
|
||||
fp = ctx.obj.get("fingerprint")
|
||||
if fp is None:
|
||||
pm = PasswordManager()
|
||||
else:
|
||||
pm = PasswordManager(fingerprint=fp)
|
||||
return pm
|
||||
|
||||
|
||||
def _get_services(
|
||||
ctx: typer.Context,
|
||||
) -> tuple[VaultService, ProfileService, SyncService]:
|
||||
"""Return service layer instances for the current context."""
|
||||
|
||||
pm = _get_pm(ctx)
|
||||
return VaultService(pm), ProfileService(pm), SyncService(pm)
|
||||
|
||||
|
||||
def _get_entry_service(ctx: typer.Context) -> EntryService:
|
||||
pm = _get_pm(ctx)
|
||||
return EntryService(pm)
|
||||
|
||||
|
||||
def _get_config_service(ctx: typer.Context) -> ConfigService:
|
||||
pm = _get_pm(ctx)
|
||||
return ConfigService(pm)
|
||||
|
||||
|
||||
def _get_util_service(ctx: typer.Context) -> UtilityService:
|
||||
pm = _get_pm(ctx)
|
||||
return UtilityService(pm)
|
||||
|
||||
|
||||
def _get_nostr_service(ctx: typer.Context) -> NostrService:
|
||||
pm = _get_pm(ctx)
|
||||
return NostrService(pm)
|
||||
|
||||
|
||||
def _gui_backend_available() -> bool:
|
||||
"""Return True if a platform-specific BeeWare backend is installed."""
|
||||
for pkg in ("toga_gtk", "toga_winforms", "toga_cocoa"):
|
||||
if importlib.util.find_spec(pkg) is not None:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
@app.callback(invoke_without_command=True)
|
||||
def main(ctx: typer.Context, fingerprint: Optional[str] = fingerprint_option) -> None:
|
||||
"""SeedPass CLI entry point.
|
||||
|
||||
When called without a subcommand this launches the interactive TUI.
|
||||
"""
|
||||
ctx.obj = {"fingerprint": fingerprint}
|
||||
if ctx.invoked_subcommand is None:
|
||||
tui = importlib.import_module("main")
|
||||
raise typer.Exit(tui.main(fingerprint=fingerprint))
|
||||
|
||||
|
||||
@entry_app.command("list")
|
||||
def entry_list(
|
||||
ctx: typer.Context,
|
||||
sort: str = typer.Option(
|
||||
"index", "--sort", help="Sort by 'index', 'label', or 'updated'"
|
||||
),
|
||||
kind: Optional[str] = typer.Option(None, "--kind", help="Filter by entry type"),
|
||||
archived: bool = typer.Option(False, "--archived", help="Include archived"),
|
||||
) -> None:
|
||||
"""List entries in the vault."""
|
||||
service = _get_entry_service(ctx)
|
||||
entries = service.list_entries(
|
||||
sort_by=sort, filter_kind=kind, include_archived=archived
|
||||
)
|
||||
for idx, label, username, url, is_archived in entries:
|
||||
line = f"{idx}: {label}"
|
||||
if username:
|
||||
line += f" ({username})"
|
||||
if url:
|
||||
line += f" {url}"
|
||||
if is_archived:
|
||||
line += " [archived]"
|
||||
typer.echo(line)
|
||||
|
||||
|
||||
@entry_app.command("search")
|
||||
def entry_search(
|
||||
ctx: typer.Context,
|
||||
query: str,
|
||||
kind: List[str] = typer.Option(
|
||||
None,
|
||||
"--kind",
|
||||
"-k",
|
||||
help="Filter by entry kinds (can be repeated)",
|
||||
),
|
||||
) -> None:
|
||||
"""Search entries."""
|
||||
service = _get_entry_service(ctx)
|
||||
kinds = list(kind) if kind else None
|
||||
results = service.search_entries(query, kinds=kinds)
|
||||
if not results:
|
||||
typer.echo("No matching entries found")
|
||||
return
|
||||
for idx, label, username, url, _arch, etype in results:
|
||||
line = f"{idx}: {etype.value.replace('_', ' ').title()} - {label}"
|
||||
if username:
|
||||
line += f" ({username})"
|
||||
if url:
|
||||
line += f" {url}"
|
||||
typer.echo(line)
|
||||
|
||||
|
||||
@entry_app.command("get")
|
||||
def entry_get(ctx: typer.Context, query: str) -> None:
|
||||
"""Retrieve a single entry's secret."""
|
||||
service = _get_entry_service(ctx)
|
||||
matches = service.search_entries(query)
|
||||
if len(matches) == 0:
|
||||
typer.echo("No matching entries found")
|
||||
raise typer.Exit(code=1)
|
||||
if len(matches) > 1:
|
||||
typer.echo("Matches:")
|
||||
for idx, label, username, _url, _arch, etype in matches:
|
||||
name = f"{idx}: {etype.value.replace('_', ' ').title()} - {label}"
|
||||
if username:
|
||||
name += f" ({username})"
|
||||
typer.echo(name)
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
index = matches[0][0]
|
||||
entry = service.retrieve_entry(index)
|
||||
etype = entry.get("type", entry.get("kind"))
|
||||
if etype == EntryType.PASSWORD.value:
|
||||
length = int(entry.get("length", 12))
|
||||
password = service.generate_password(length, index)
|
||||
typer.echo(password)
|
||||
elif etype == EntryType.TOTP.value:
|
||||
code = service.get_totp_code(index)
|
||||
typer.echo(code)
|
||||
else:
|
||||
typer.echo("Unsupported entry type")
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
|
||||
@entry_app.command("add")
|
||||
def entry_add(
|
||||
ctx: typer.Context,
|
||||
label: str,
|
||||
length: int = typer.Option(12, "--length"),
|
||||
username: Optional[str] = typer.Option(None, "--username"),
|
||||
url: Optional[str] = typer.Option(None, "--url"),
|
||||
no_special: bool = typer.Option(
|
||||
False, "--no-special", help="Exclude special characters", is_flag=True
|
||||
),
|
||||
allowed_special_chars: Optional[str] = typer.Option(
|
||||
None, "--allowed-special-chars", help="Explicit set of special characters"
|
||||
),
|
||||
special_mode: Optional[str] = typer.Option(
|
||||
None,
|
||||
"--special-mode",
|
||||
help="Special character mode",
|
||||
),
|
||||
exclude_ambiguous: bool = typer.Option(
|
||||
False,
|
||||
"--exclude-ambiguous",
|
||||
help="Exclude ambiguous characters",
|
||||
is_flag=True,
|
||||
),
|
||||
min_uppercase: Optional[int] = typer.Option(None, "--min-uppercase"),
|
||||
min_lowercase: Optional[int] = typer.Option(None, "--min-lowercase"),
|
||||
min_digits: Optional[int] = typer.Option(None, "--min-digits"),
|
||||
min_special: Optional[int] = typer.Option(None, "--min-special"),
|
||||
) -> None:
|
||||
"""Add a new password entry and output its index."""
|
||||
service = _get_entry_service(ctx)
|
||||
kwargs = {}
|
||||
if no_special:
|
||||
kwargs["include_special_chars"] = False
|
||||
if allowed_special_chars is not None:
|
||||
kwargs["allowed_special_chars"] = allowed_special_chars
|
||||
if special_mode is not None:
|
||||
kwargs["special_mode"] = special_mode
|
||||
if exclude_ambiguous:
|
||||
kwargs["exclude_ambiguous"] = True
|
||||
if min_uppercase is not None:
|
||||
kwargs["min_uppercase"] = min_uppercase
|
||||
if min_lowercase is not None:
|
||||
kwargs["min_lowercase"] = min_lowercase
|
||||
if min_digits is not None:
|
||||
kwargs["min_digits"] = min_digits
|
||||
if min_special is not None:
|
||||
kwargs["min_special"] = min_special
|
||||
|
||||
index = service.add_entry(label, length, username, url, **kwargs)
|
||||
typer.echo(str(index))
|
||||
|
||||
|
||||
@entry_app.command("add-totp")
|
||||
def entry_add_totp(
|
||||
ctx: typer.Context,
|
||||
label: str,
|
||||
index: Optional[int] = typer.Option(None, "--index", help="Derivation index"),
|
||||
secret: Optional[str] = typer.Option(None, "--secret", help="Import secret"),
|
||||
period: int = typer.Option(30, "--period", help="TOTP period in seconds"),
|
||||
digits: int = typer.Option(6, "--digits", help="Number of TOTP digits"),
|
||||
) -> None:
|
||||
"""Add a TOTP entry and output the otpauth URI."""
|
||||
service = _get_entry_service(ctx)
|
||||
uri = service.add_totp(
|
||||
label,
|
||||
index=index,
|
||||
secret=secret,
|
||||
period=period,
|
||||
digits=digits,
|
||||
)
|
||||
typer.echo(uri)
|
||||
|
||||
|
||||
@entry_app.command("add-ssh")
|
||||
def entry_add_ssh(
|
||||
ctx: typer.Context,
|
||||
label: str,
|
||||
index: Optional[int] = typer.Option(None, "--index", help="Derivation index"),
|
||||
notes: str = typer.Option("", "--notes", help="Entry notes"),
|
||||
) -> None:
|
||||
"""Add an SSH key entry and output its index."""
|
||||
service = _get_entry_service(ctx)
|
||||
idx = service.add_ssh_key(
|
||||
label,
|
||||
index=index,
|
||||
notes=notes,
|
||||
)
|
||||
typer.echo(str(idx))
|
||||
|
||||
|
||||
@entry_app.command("add-pgp")
|
||||
def entry_add_pgp(
|
||||
ctx: typer.Context,
|
||||
label: str,
|
||||
index: Optional[int] = typer.Option(None, "--index", help="Derivation index"),
|
||||
key_type: str = typer.Option("ed25519", "--key-type", help="Key type"),
|
||||
user_id: str = typer.Option("", "--user-id", help="User ID"),
|
||||
notes: str = typer.Option("", "--notes", help="Entry notes"),
|
||||
) -> None:
|
||||
"""Add a PGP key entry and output its index."""
|
||||
service = _get_entry_service(ctx)
|
||||
idx = service.add_pgp_key(
|
||||
label,
|
||||
index=index,
|
||||
key_type=key_type,
|
||||
user_id=user_id,
|
||||
notes=notes,
|
||||
)
|
||||
typer.echo(str(idx))
|
||||
|
||||
|
||||
@entry_app.command("add-nostr")
|
||||
def entry_add_nostr(
|
||||
ctx: typer.Context,
|
||||
label: str,
|
||||
index: Optional[int] = typer.Option(None, "--index", help="Derivation index"),
|
||||
notes: str = typer.Option("", "--notes", help="Entry notes"),
|
||||
) -> None:
|
||||
"""Add a Nostr key entry and output its index."""
|
||||
service = _get_entry_service(ctx)
|
||||
idx = service.add_nostr_key(
|
||||
label,
|
||||
index=index,
|
||||
notes=notes,
|
||||
)
|
||||
typer.echo(str(idx))
|
||||
|
||||
|
||||
@entry_app.command("add-seed")
|
||||
def entry_add_seed(
|
||||
ctx: typer.Context,
|
||||
label: str,
|
||||
index: Optional[int] = typer.Option(None, "--index", help="Derivation index"),
|
||||
words: int = typer.Option(24, "--words", help="Word count"),
|
||||
notes: str = typer.Option("", "--notes", help="Entry notes"),
|
||||
) -> None:
|
||||
"""Add a derived seed phrase entry and output its index."""
|
||||
service = _get_entry_service(ctx)
|
||||
idx = service.add_seed(
|
||||
label,
|
||||
index=index,
|
||||
words=words,
|
||||
notes=notes,
|
||||
)
|
||||
typer.echo(str(idx))
|
||||
|
||||
|
||||
@entry_app.command("add-key-value")
|
||||
def entry_add_key_value(
|
||||
ctx: typer.Context,
|
||||
label: str,
|
||||
key: str = typer.Option(..., "--key", help="Key name"),
|
||||
value: str = typer.Option(..., "--value", help="Stored value"),
|
||||
notes: str = typer.Option("", "--notes", help="Entry notes"),
|
||||
) -> None:
|
||||
"""Add a key/value entry and output its index."""
|
||||
service = _get_entry_service(ctx)
|
||||
idx = service.add_key_value(label, key, value, notes=notes)
|
||||
typer.echo(str(idx))
|
||||
|
||||
|
||||
@entry_app.command("add-managed-account")
|
||||
def entry_add_managed_account(
|
||||
ctx: typer.Context,
|
||||
label: str,
|
||||
index: Optional[int] = typer.Option(None, "--index", help="Derivation index"),
|
||||
notes: str = typer.Option("", "--notes", help="Entry notes"),
|
||||
) -> None:
|
||||
"""Add a managed account seed entry and output its index."""
|
||||
service = _get_entry_service(ctx)
|
||||
idx = service.add_managed_account(
|
||||
label,
|
||||
index=index,
|
||||
notes=notes,
|
||||
)
|
||||
typer.echo(str(idx))
|
||||
|
||||
|
||||
@entry_app.command("modify")
|
||||
def entry_modify(
|
||||
ctx: typer.Context,
|
||||
entry_id: int,
|
||||
label: Optional[str] = typer.Option(None, "--label"),
|
||||
username: Optional[str] = typer.Option(None, "--username"),
|
||||
url: Optional[str] = typer.Option(None, "--url"),
|
||||
notes: Optional[str] = typer.Option(None, "--notes"),
|
||||
period: Optional[int] = typer.Option(
|
||||
None, "--period", help="TOTP period in seconds"
|
||||
),
|
||||
digits: Optional[int] = typer.Option(None, "--digits", help="TOTP digits"),
|
||||
key: Optional[str] = typer.Option(None, "--key", help="New key"),
|
||||
value: Optional[str] = typer.Option(None, "--value", help="New value"),
|
||||
) -> None:
|
||||
"""Modify an existing entry."""
|
||||
service = _get_entry_service(ctx)
|
||||
try:
|
||||
service.modify_entry(
|
||||
entry_id,
|
||||
username=username,
|
||||
url=url,
|
||||
notes=notes,
|
||||
label=label,
|
||||
period=period,
|
||||
digits=digits,
|
||||
key=key,
|
||||
value=value,
|
||||
)
|
||||
except ValueError as e:
|
||||
typer.echo(str(e))
|
||||
sys.stdout.flush()
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
|
||||
@entry_app.command("archive")
|
||||
def entry_archive(ctx: typer.Context, entry_id: int) -> None:
|
||||
"""Archive an entry."""
|
||||
service = _get_entry_service(ctx)
|
||||
service.archive_entry(entry_id)
|
||||
typer.echo(str(entry_id))
|
||||
|
||||
|
||||
@entry_app.command("unarchive")
|
||||
def entry_unarchive(ctx: typer.Context, entry_id: int) -> None:
|
||||
"""Restore an archived entry."""
|
||||
service = _get_entry_service(ctx)
|
||||
service.restore_entry(entry_id)
|
||||
typer.echo(str(entry_id))
|
||||
|
||||
|
||||
@entry_app.command("totp-codes")
|
||||
def entry_totp_codes(ctx: typer.Context) -> None:
|
||||
"""Display all current TOTP codes."""
|
||||
service = _get_entry_service(ctx)
|
||||
service.display_totp_codes()
|
||||
|
||||
|
||||
@entry_app.command("export-totp")
|
||||
def entry_export_totp(
|
||||
ctx: typer.Context, file: str = typer.Option(..., help="Output file")
|
||||
) -> None:
|
||||
"""Export all TOTP secrets to a JSON file."""
|
||||
service = _get_entry_service(ctx)
|
||||
data = service.export_totp_entries()
|
||||
Path(file).write_text(json.dumps(data, indent=2))
|
||||
typer.echo(str(file))
|
||||
|
||||
|
||||
@vault_app.command("export")
|
||||
def vault_export(
|
||||
ctx: typer.Context, file: str = typer.Option(..., help="Output file")
|
||||
) -> None:
|
||||
"""Export the vault profile to an encrypted file."""
|
||||
vault_service, _profile, _sync = _get_services(ctx)
|
||||
data = vault_service.export_profile()
|
||||
Path(file).write_bytes(data)
|
||||
typer.echo(str(file))
|
||||
|
||||
|
||||
@vault_app.command("import")
|
||||
def vault_import(
|
||||
ctx: typer.Context, file: str = typer.Option(..., help="Input file")
|
||||
) -> None:
|
||||
"""Import a vault profile from an encrypted file."""
|
||||
vault_service, _profile, _sync = _get_services(ctx)
|
||||
data = Path(file).read_bytes()
|
||||
vault_service.import_profile(data)
|
||||
typer.echo(str(file))
|
||||
|
||||
|
||||
@vault_app.command("change-password")
|
||||
def vault_change_password(ctx: typer.Context) -> None:
|
||||
"""Change the master password used for encryption."""
|
||||
vault_service, _profile, _sync = _get_services(ctx)
|
||||
old_pw = typer.prompt("Current password", hide_input=True)
|
||||
new_pw = typer.prompt("New password", hide_input=True, confirmation_prompt=True)
|
||||
try:
|
||||
vault_service.change_password(
|
||||
ChangePasswordRequest(old_password=old_pw, new_password=new_pw)
|
||||
)
|
||||
except Exception as exc: # pragma: no cover - pass through errors
|
||||
typer.echo(f"Error: {exc}")
|
||||
raise typer.Exit(code=1)
|
||||
typer.echo("Password updated")
|
||||
|
||||
|
||||
@vault_app.command("unlock")
|
||||
def vault_unlock(ctx: typer.Context) -> None:
|
||||
"""Unlock the vault for the active profile."""
|
||||
vault_service, _profile, _sync = _get_services(ctx)
|
||||
password = typer.prompt("Master password", hide_input=True)
|
||||
try:
|
||||
resp = vault_service.unlock(UnlockRequest(password=password))
|
||||
except Exception as exc: # pragma: no cover - pass through errors
|
||||
typer.echo(f"Error: {exc}")
|
||||
raise typer.Exit(code=1)
|
||||
typer.echo(f"Unlocked in {resp.duration:.2f}s")
|
||||
|
||||
|
||||
@vault_app.command("lock")
|
||||
def vault_lock(ctx: typer.Context) -> None:
|
||||
"""Lock the vault and clear sensitive data from memory."""
|
||||
vault_service, _profile, _sync = _get_services(ctx)
|
||||
vault_service.lock()
|
||||
typer.echo("locked")
|
||||
|
||||
|
||||
@app.command("lock")
|
||||
def root_lock(ctx: typer.Context) -> None:
|
||||
"""Lock the vault for the active profile."""
|
||||
vault_service, _profile, _sync = _get_services(ctx)
|
||||
vault_service.lock()
|
||||
typer.echo("locked")
|
||||
|
||||
|
||||
@vault_app.command("stats")
|
||||
def vault_stats(ctx: typer.Context) -> None:
|
||||
"""Display statistics about the current seed profile."""
|
||||
vault_service, _profile, _sync = _get_services(ctx)
|
||||
stats = vault_service.stats()
|
||||
typer.echo(json.dumps(stats, indent=2))
|
||||
|
||||
|
||||
@vault_app.command("reveal-parent-seed")
|
||||
def vault_reveal_parent_seed(
|
||||
ctx: typer.Context,
|
||||
file: Optional[str] = typer.Option(
|
||||
None, "--file", help="Save encrypted seed to this path"
|
||||
),
|
||||
) -> None:
|
||||
"""Display the parent seed and optionally write an encrypted backup file."""
|
||||
vault_service, _profile, _sync = _get_services(ctx)
|
||||
password = typer.prompt("Master password", hide_input=True)
|
||||
vault_service.backup_parent_seed(
|
||||
BackupParentSeedRequest(path=Path(file) if file else None, password=password)
|
||||
)
|
||||
|
||||
|
||||
@nostr_app.command("sync")
|
||||
def nostr_sync(ctx: typer.Context) -> None:
|
||||
"""Sync with configured Nostr relays."""
|
||||
_vault, _profile, sync_service = _get_services(ctx)
|
||||
model = sync_service.sync()
|
||||
if model:
|
||||
typer.echo("Event IDs:")
|
||||
typer.echo(f"- manifest: {model.manifest_id}")
|
||||
for cid in model.chunk_ids:
|
||||
typer.echo(f"- chunk: {cid}")
|
||||
for did in model.delta_ids:
|
||||
typer.echo(f"- delta: {did}")
|
||||
else:
|
||||
typer.echo("Error: Failed to sync vault")
|
||||
|
||||
|
||||
@nostr_app.command("get-pubkey")
|
||||
def nostr_get_pubkey(ctx: typer.Context) -> None:
|
||||
"""Display the active profile's npub."""
|
||||
service = _get_nostr_service(ctx)
|
||||
npub = service.get_pubkey()
|
||||
typer.echo(npub)
|
||||
|
||||
|
||||
@nostr_app.command("list-relays")
|
||||
def nostr_list_relays(ctx: typer.Context) -> None:
|
||||
"""Display configured Nostr relays."""
|
||||
service = _get_nostr_service(ctx)
|
||||
relays = service.list_relays()
|
||||
for i, r in enumerate(relays, 1):
|
||||
typer.echo(f"{i}: {r}")
|
||||
|
||||
|
||||
@nostr_app.command("add-relay")
|
||||
def nostr_add_relay(ctx: typer.Context, url: str) -> None:
|
||||
"""Add a relay URL."""
|
||||
service = _get_nostr_service(ctx)
|
||||
try:
|
||||
service.add_relay(url)
|
||||
except Exception as exc: # pragma: no cover - pass through errors
|
||||
typer.echo(f"Error: {exc}")
|
||||
raise typer.Exit(code=1)
|
||||
typer.echo("Added")
|
||||
|
||||
|
||||
@nostr_app.command("remove-relay")
|
||||
def nostr_remove_relay(ctx: typer.Context, idx: int) -> None:
|
||||
"""Remove a relay by index (1-based)."""
|
||||
service = _get_nostr_service(ctx)
|
||||
try:
|
||||
service.remove_relay(idx)
|
||||
except Exception as exc: # pragma: no cover - pass through errors
|
||||
typer.echo(f"Error: {exc}")
|
||||
raise typer.Exit(code=1)
|
||||
typer.echo("Removed")
|
||||
|
||||
|
||||
@config_app.command("get")
|
||||
def config_get(ctx: typer.Context, key: str) -> None:
|
||||
"""Get a configuration value."""
|
||||
service = _get_config_service(ctx)
|
||||
value = service.get(key)
|
||||
if value is None:
|
||||
typer.echo("Key not found")
|
||||
else:
|
||||
typer.echo(str(value))
|
||||
|
||||
|
||||
@config_app.command("set")
|
||||
def config_set(ctx: typer.Context, key: str, value: str) -> None:
|
||||
"""Set a configuration value."""
|
||||
service = _get_config_service(ctx)
|
||||
|
||||
try:
|
||||
val = (
|
||||
[r.strip() for r in value.split(",") if r.strip()]
|
||||
if key == "relays"
|
||||
else value
|
||||
)
|
||||
service.set(key, val)
|
||||
except KeyError:
|
||||
typer.echo("Unknown key")
|
||||
raise typer.Exit(code=1)
|
||||
except Exception as exc: # pragma: no cover - pass through errors
|
||||
typer.echo(f"Error: {exc}")
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
typer.echo("Updated")
|
||||
|
||||
|
||||
@config_app.command("toggle-secret-mode")
|
||||
def config_toggle_secret_mode(ctx: typer.Context) -> None:
|
||||
"""Interactively enable or disable secret mode.
|
||||
|
||||
When enabled, newly generated and retrieved passwords are copied to the
|
||||
clipboard instead of printed to the screen.
|
||||
"""
|
||||
service = _get_config_service(ctx)
|
||||
try:
|
||||
enabled = service.get_secret_mode_enabled()
|
||||
delay = service.get_clipboard_clear_delay()
|
||||
except Exception as exc: # pragma: no cover - pass through errors
|
||||
typer.echo(f"Error loading settings: {exc}")
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
typer.echo(f"Secret mode is currently {'ON' if enabled else 'OFF'}")
|
||||
choice = (
|
||||
typer.prompt(
|
||||
"Enable secret mode? (y/n, blank to keep)", default="", show_default=False
|
||||
)
|
||||
.strip()
|
||||
.lower()
|
||||
)
|
||||
if choice in ("y", "yes"):
|
||||
enabled = True
|
||||
elif choice in ("n", "no"):
|
||||
enabled = False
|
||||
|
||||
inp = typer.prompt(
|
||||
f"Clipboard clear delay in seconds [{delay}]", default="", show_default=False
|
||||
).strip()
|
||||
if inp:
|
||||
try:
|
||||
delay = int(inp)
|
||||
if delay <= 0:
|
||||
typer.echo("Delay must be positive")
|
||||
raise typer.Exit(code=1)
|
||||
except ValueError:
|
||||
typer.echo("Invalid number")
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
try:
|
||||
service.set_secret_mode(enabled, delay)
|
||||
except Exception as exc: # pragma: no cover - pass through errors
|
||||
typer.echo(f"Error: {exc}")
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
status = "enabled" if enabled else "disabled"
|
||||
typer.echo(f"Secret mode {status}.")
|
||||
|
||||
|
||||
@config_app.command("toggle-offline")
|
||||
def config_toggle_offline(ctx: typer.Context) -> None:
|
||||
"""Enable or disable offline mode."""
|
||||
service = _get_config_service(ctx)
|
||||
try:
|
||||
enabled = service.get_offline_mode()
|
||||
except Exception as exc: # pragma: no cover - pass through errors
|
||||
typer.echo(f"Error loading settings: {exc}")
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
typer.echo(f"Offline mode is currently {'ON' if enabled else 'OFF'}")
|
||||
choice = (
|
||||
typer.prompt(
|
||||
"Enable offline mode? (y/n, blank to keep)", default="", show_default=False
|
||||
)
|
||||
.strip()
|
||||
.lower()
|
||||
)
|
||||
if choice in ("y", "yes"):
|
||||
enabled = True
|
||||
elif choice in ("n", "no"):
|
||||
enabled = False
|
||||
|
||||
try:
|
||||
service.set_offline_mode(enabled)
|
||||
except Exception as exc: # pragma: no cover - pass through errors
|
||||
typer.echo(f"Error: {exc}")
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
status = "enabled" if enabled else "disabled"
|
||||
typer.echo(f"Offline mode {status}.")
|
||||
|
||||
|
||||
@fingerprint_app.command("list")
|
||||
def fingerprint_list(ctx: typer.Context) -> None:
|
||||
"""List available seed profiles."""
|
||||
_vault, profile_service, _sync = _get_services(ctx)
|
||||
for fp in profile_service.list_profiles():
|
||||
typer.echo(fp)
|
||||
|
||||
|
||||
@fingerprint_app.command("add")
|
||||
def fingerprint_add(ctx: typer.Context) -> None:
|
||||
"""Create a new seed profile."""
|
||||
_vault, profile_service, _sync = _get_services(ctx)
|
||||
profile_service.add_profile()
|
||||
|
||||
|
||||
@fingerprint_app.command("remove")
|
||||
def fingerprint_remove(ctx: typer.Context, fingerprint: str) -> None:
|
||||
"""Remove a seed profile."""
|
||||
_vault, profile_service, _sync = _get_services(ctx)
|
||||
profile_service.remove_profile(ProfileRemoveRequest(fingerprint=fingerprint))
|
||||
|
||||
|
||||
@fingerprint_app.command("switch")
|
||||
def fingerprint_switch(ctx: typer.Context, fingerprint: str) -> None:
|
||||
"""Switch to another seed profile."""
|
||||
_vault, profile_service, _sync = _get_services(ctx)
|
||||
password = typer.prompt("Master password", hide_input=True)
|
||||
profile_service.switch_profile(
|
||||
ProfileSwitchRequest(fingerprint=fingerprint, password=password)
|
||||
)
|
||||
|
||||
|
||||
@util_app.command("generate-password")
|
||||
def generate_password(
|
||||
ctx: typer.Context,
|
||||
length: int = 24,
|
||||
no_special: bool = typer.Option(
|
||||
False, "--no-special", help="Exclude special characters", is_flag=True
|
||||
),
|
||||
allowed_special_chars: Optional[str] = typer.Option(
|
||||
None, "--allowed-special-chars", help="Explicit set of special characters"
|
||||
),
|
||||
special_mode: Optional[str] = typer.Option(
|
||||
None,
|
||||
"--special-mode",
|
||||
help="Special character mode",
|
||||
),
|
||||
exclude_ambiguous: bool = typer.Option(
|
||||
False,
|
||||
"--exclude-ambiguous",
|
||||
help="Exclude ambiguous characters",
|
||||
is_flag=True,
|
||||
),
|
||||
min_uppercase: Optional[int] = typer.Option(None, "--min-uppercase"),
|
||||
min_lowercase: Optional[int] = typer.Option(None, "--min-lowercase"),
|
||||
min_digits: Optional[int] = typer.Option(None, "--min-digits"),
|
||||
min_special: Optional[int] = typer.Option(None, "--min-special"),
|
||||
) -> None:
|
||||
"""Generate a strong password."""
|
||||
service = _get_util_service(ctx)
|
||||
kwargs = {}
|
||||
if no_special:
|
||||
kwargs["include_special_chars"] = False
|
||||
if allowed_special_chars is not None:
|
||||
kwargs["allowed_special_chars"] = allowed_special_chars
|
||||
if special_mode is not None:
|
||||
kwargs["special_mode"] = special_mode
|
||||
if exclude_ambiguous:
|
||||
kwargs["exclude_ambiguous"] = True
|
||||
if min_uppercase is not None:
|
||||
kwargs["min_uppercase"] = min_uppercase
|
||||
if min_lowercase is not None:
|
||||
kwargs["min_lowercase"] = min_lowercase
|
||||
if min_digits is not None:
|
||||
kwargs["min_digits"] = min_digits
|
||||
if min_special is not None:
|
||||
kwargs["min_special"] = min_special
|
||||
|
||||
password = service.generate_password(length, **kwargs)
|
||||
typer.echo(password)
|
||||
|
||||
|
||||
@util_app.command("verify-checksum")
|
||||
def verify_checksum(ctx: typer.Context) -> None:
|
||||
"""Verify the SeedPass script checksum."""
|
||||
service = _get_util_service(ctx)
|
||||
service.verify_checksum()
|
||||
|
||||
|
||||
@util_app.command("update-checksum")
|
||||
def update_checksum(ctx: typer.Context) -> None:
|
||||
"""Regenerate the script checksum file."""
|
||||
service = _get_util_service(ctx)
|
||||
service.update_checksum()
|
||||
|
||||
|
||||
@api_app.command("start")
|
||||
def api_start(ctx: typer.Context, host: str = "127.0.0.1", port: int = 8000) -> None:
|
||||
"""Start the SeedPass API server."""
|
||||
token = api_module.start_server(ctx.obj.get("fingerprint"))
|
||||
typer.echo(f"API token: {token}")
|
||||
uvicorn.run(api_module.app, host=host, port=port)
|
||||
|
||||
|
||||
@api_app.command("stop")
|
||||
def api_stop(ctx: typer.Context, host: str = "127.0.0.1", port: int = 8000) -> None:
|
||||
"""Stop the SeedPass API server."""
|
||||
import requests
|
||||
|
||||
try:
|
||||
requests.post(
|
||||
f"http://{host}:{port}/api/v1/shutdown",
|
||||
headers={"Authorization": f"Bearer {api_module._token}"},
|
||||
timeout=2,
|
||||
)
|
||||
except Exception as exc: # pragma: no cover - best effort
|
||||
typer.echo(f"Failed to stop server: {exc}")
|
||||
|
||||
|
||||
@app.command()
|
||||
def gui() -> None:
|
||||
"""Launch the BeeWare GUI.
|
||||
|
||||
If the platform specific backend is missing, attempt to install it and
|
||||
retry launching the GUI.
|
||||
"""
|
||||
if not _gui_backend_available():
|
||||
if sys.platform.startswith("linux"):
|
||||
pkg = "toga-gtk"
|
||||
elif sys.platform == "win32":
|
||||
pkg = "toga-winforms"
|
||||
elif sys.platform == "darwin":
|
||||
pkg = "toga-cocoa"
|
||||
else:
|
||||
typer.echo(
|
||||
f"Unsupported platform '{sys.platform}' for BeeWare GUI.",
|
||||
err=True,
|
||||
)
|
||||
raise typer.Exit(1)
|
||||
|
||||
typer.echo(f"Attempting to install {pkg} for GUI support...")
|
||||
try:
|
||||
subprocess.check_call([sys.executable, "-m", "pip", "install", pkg])
|
||||
typer.echo(f"Successfully installed {pkg}.")
|
||||
except subprocess.CalledProcessError as exc:
|
||||
typer.echo(f"Failed to install {pkg}: {exc}", err=True)
|
||||
raise typer.Exit(1)
|
||||
|
||||
if not _gui_backend_available():
|
||||
typer.echo(
|
||||
"BeeWare GUI backend still unavailable after installation attempt.",
|
||||
err=True,
|
||||
)
|
||||
raise typer.Exit(1)
|
||||
|
||||
from seedpass_gui.app import main
|
||||
|
||||
main()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
app()
|
164
src/seedpass/cli/__init__.py
Normal file
164
src/seedpass/cli/__init__.py
Normal file
@@ -0,0 +1,164 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import importlib
|
||||
import importlib.util
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Optional
|
||||
|
||||
import typer
|
||||
|
||||
from .common import _get_services
|
||||
|
||||
app = typer.Typer(
|
||||
help="SeedPass command line interface",
|
||||
invoke_without_command=True,
|
||||
)
|
||||
|
||||
# Global option shared across all commands
|
||||
fingerprint_option = typer.Option(
|
||||
None,
|
||||
"--fingerprint",
|
||||
"-f",
|
||||
help="Specify which seed profile to use",
|
||||
)
|
||||
|
||||
no_clipboard_option = typer.Option(
|
||||
False,
|
||||
"--no-clipboard",
|
||||
help="Disable clipboard support and print secrets instead",
|
||||
is_flag=True,
|
||||
)
|
||||
|
||||
# Sub command groups
|
||||
from . import entry, vault, nostr, config, fingerprint, util, api
|
||||
|
||||
app.add_typer(entry.app, name="entry")
|
||||
app.add_typer(vault.app, name="vault")
|
||||
app.add_typer(nostr.app, name="nostr")
|
||||
app.add_typer(config.app, name="config")
|
||||
app.add_typer(fingerprint.app, name="fingerprint")
|
||||
app.add_typer(util.app, name="util")
|
||||
app.add_typer(api.app, name="api")
|
||||
|
||||
|
||||
def _gui_backend_available() -> bool:
|
||||
"""Return True if a platform-specific BeeWare backend is installed."""
|
||||
for pkg in ("toga_gtk", "toga_winforms", "toga_cocoa"):
|
||||
if importlib.util.find_spec(pkg) is not None:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
@app.callback(invoke_without_command=True)
|
||||
def main(
|
||||
ctx: typer.Context,
|
||||
fingerprint: Optional[str] = fingerprint_option,
|
||||
no_clipboard: bool = no_clipboard_option,
|
||||
) -> None:
|
||||
"""SeedPass CLI entry point.
|
||||
|
||||
When called without a subcommand this launches the interactive TUI.
|
||||
"""
|
||||
ctx.obj = {"fingerprint": fingerprint, "no_clipboard": no_clipboard}
|
||||
if ctx.invoked_subcommand is None:
|
||||
tui = importlib.import_module("main")
|
||||
raise typer.Exit(tui.main(fingerprint=fingerprint))
|
||||
|
||||
|
||||
@app.command("lock")
|
||||
def root_lock(ctx: typer.Context) -> None:
|
||||
"""Lock the vault for the active profile."""
|
||||
vault_service, _profile, _sync = _get_services(ctx)
|
||||
vault_service.lock()
|
||||
typer.echo("locked")
|
||||
|
||||
|
||||
@app.command()
|
||||
def gui(
|
||||
install: bool = typer.Option(
|
||||
False,
|
||||
"--install",
|
||||
help="Attempt to install the BeeWare GUI backend if missing",
|
||||
)
|
||||
) -> None:
|
||||
"""Launch the BeeWare GUI.
|
||||
|
||||
If a platform specific backend is missing, inform the user how to
|
||||
install it. Using ``--install`` will attempt installation after
|
||||
confirmation.
|
||||
"""
|
||||
if not _gui_backend_available():
|
||||
if sys.platform.startswith("linux"):
|
||||
pkg = "toga-gtk"
|
||||
version = "0.5.2"
|
||||
sha256 = "15b346ac1a2584de5effe5e73a3888f055c68c93300aeb111db9d64186b31646"
|
||||
elif sys.platform == "win32":
|
||||
pkg = "toga-winforms"
|
||||
version = "0.5.2"
|
||||
sha256 = "83181309f204bcc4a34709d23fdfd68467ae8ecc39c906d13c661cb9a0ef581b"
|
||||
elif sys.platform == "darwin":
|
||||
pkg = "toga-cocoa"
|
||||
version = "0.5.2"
|
||||
sha256 = "a4d5d1546bf92372a6fb1b450164735fb107b2ee69d15bf87421fec3c78465f9"
|
||||
else:
|
||||
typer.echo(
|
||||
f"Unsupported platform '{sys.platform}' for BeeWare GUI.",
|
||||
err=True,
|
||||
)
|
||||
raise typer.Exit(1)
|
||||
|
||||
if not install:
|
||||
typer.echo(
|
||||
f"BeeWare GUI backend not found. Please install {pkg} manually or rerun "
|
||||
"with '--install'.",
|
||||
err=True,
|
||||
)
|
||||
raise typer.Exit(1)
|
||||
|
||||
if not typer.confirm(
|
||||
f"Install {pkg}=={version} with hash verification?", default=False
|
||||
):
|
||||
typer.echo("Installation cancelled.", err=True)
|
||||
raise typer.Exit(1)
|
||||
|
||||
typer.echo(
|
||||
"SeedPass uses pinned versions and SHA256 hashes to verify the GUI backend "
|
||||
"and protect against tampered packages."
|
||||
)
|
||||
|
||||
try:
|
||||
subprocess.check_call(
|
||||
[
|
||||
sys.executable,
|
||||
"-m",
|
||||
"pip",
|
||||
"install",
|
||||
"--require-hashes",
|
||||
f"{pkg}=={version}",
|
||||
f"--hash=sha256:{sha256}",
|
||||
]
|
||||
)
|
||||
typer.echo(f"Successfully installed {pkg}=={version}.")
|
||||
except subprocess.CalledProcessError as exc:
|
||||
typer.echo(
|
||||
"Secure installation failed. Please install the package manually "
|
||||
f"from a trusted source. Details: {exc}",
|
||||
err=True,
|
||||
)
|
||||
raise typer.Exit(1)
|
||||
|
||||
if not _gui_backend_available():
|
||||
typer.echo(
|
||||
"BeeWare GUI backend still unavailable after installation attempt.",
|
||||
err=True,
|
||||
)
|
||||
raise typer.Exit(1)
|
||||
|
||||
from seedpass_gui.app import main
|
||||
|
||||
main()
|
||||
|
||||
|
||||
if __name__ == "__main__": # pragma: no cover
|
||||
app()
|
32
src/seedpass/cli/api.py
Normal file
32
src/seedpass/cli/api.py
Normal file
@@ -0,0 +1,32 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import typer
|
||||
import uvicorn
|
||||
|
||||
from .. import api as api_module
|
||||
|
||||
|
||||
app = typer.Typer(help="Run the API server")
|
||||
|
||||
|
||||
@app.command("start")
|
||||
def api_start(ctx: typer.Context, host: str = "127.0.0.1", port: int = 8000) -> None:
|
||||
"""Start the SeedPass API server."""
|
||||
token = api_module.start_server(ctx.obj.get("fingerprint"))
|
||||
typer.echo(f"API token: {token}")
|
||||
uvicorn.run(api_module.app, host=host, port=port)
|
||||
|
||||
|
||||
@app.command("stop")
|
||||
def api_stop(ctx: typer.Context, host: str = "127.0.0.1", port: int = 8000) -> None:
|
||||
"""Stop the SeedPass API server."""
|
||||
import requests
|
||||
|
||||
try:
|
||||
requests.post(
|
||||
f"http://{host}:{port}/api/v1/shutdown",
|
||||
headers={"Authorization": f"Bearer {api_module.app.state.token_hash}"},
|
||||
timeout=2,
|
||||
)
|
||||
except Exception as exc: # pragma: no cover - best effort
|
||||
typer.echo(f"Failed to stop server: {exc}")
|
61
src/seedpass/cli/common.py
Normal file
61
src/seedpass/cli/common.py
Normal file
@@ -0,0 +1,61 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import typer
|
||||
|
||||
from seedpass.core.manager import PasswordManager
|
||||
from seedpass.core.entry_types import EntryType
|
||||
from seedpass.core.api import (
|
||||
VaultService,
|
||||
ProfileService,
|
||||
SyncService,
|
||||
EntryService,
|
||||
ConfigService,
|
||||
UtilityService,
|
||||
NostrService,
|
||||
ChangePasswordRequest,
|
||||
UnlockRequest,
|
||||
BackupParentSeedRequest,
|
||||
ProfileSwitchRequest,
|
||||
ProfileRemoveRequest,
|
||||
)
|
||||
|
||||
|
||||
def _get_pm(ctx: typer.Context) -> PasswordManager:
|
||||
"""Return a PasswordManager optionally selecting a fingerprint."""
|
||||
fp = ctx.obj.get("fingerprint")
|
||||
if fp is None:
|
||||
pm = PasswordManager()
|
||||
else:
|
||||
pm = PasswordManager(fingerprint=fp)
|
||||
if ctx.obj.get("no_clipboard"):
|
||||
pm.secret_mode_enabled = False
|
||||
return pm
|
||||
|
||||
|
||||
def _get_services(
|
||||
ctx: typer.Context,
|
||||
) -> tuple[VaultService, ProfileService, SyncService]:
|
||||
"""Return service layer instances for the current context."""
|
||||
|
||||
pm = _get_pm(ctx)
|
||||
return VaultService(pm), ProfileService(pm), SyncService(pm)
|
||||
|
||||
|
||||
def _get_entry_service(ctx: typer.Context) -> EntryService:
|
||||
pm = _get_pm(ctx)
|
||||
return EntryService(pm)
|
||||
|
||||
|
||||
def _get_config_service(ctx: typer.Context) -> ConfigService:
|
||||
pm = _get_pm(ctx)
|
||||
return ConfigService(pm)
|
||||
|
||||
|
||||
def _get_util_service(ctx: typer.Context) -> UtilityService:
|
||||
pm = _get_pm(ctx)
|
||||
return UtilityService(pm)
|
||||
|
||||
|
||||
def _get_nostr_service(ctx: typer.Context) -> NostrService:
|
||||
pm = _get_pm(ctx)
|
||||
return NostrService(pm)
|
125
src/seedpass/cli/config.py
Normal file
125
src/seedpass/cli/config.py
Normal file
@@ -0,0 +1,125 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import typer
|
||||
|
||||
from .common import _get_config_service
|
||||
|
||||
|
||||
app = typer.Typer(help="Get or set configuration values")
|
||||
|
||||
|
||||
@app.command("get")
|
||||
def config_get(ctx: typer.Context, key: str) -> None:
|
||||
"""Get a configuration value."""
|
||||
service = _get_config_service(ctx)
|
||||
value = service.get(key)
|
||||
if value is None:
|
||||
typer.echo("Key not found")
|
||||
else:
|
||||
typer.echo(str(value))
|
||||
|
||||
|
||||
@app.command("set")
|
||||
def config_set(ctx: typer.Context, key: str, value: str) -> None:
|
||||
"""Set a configuration value."""
|
||||
service = _get_config_service(ctx)
|
||||
|
||||
try:
|
||||
val = (
|
||||
[r.strip() for r in value.split(",") if r.strip()]
|
||||
if key == "relays"
|
||||
else value
|
||||
)
|
||||
service.set(key, val)
|
||||
except KeyError:
|
||||
typer.echo("Unknown key")
|
||||
raise typer.Exit(code=1)
|
||||
except Exception as exc: # pragma: no cover - pass through errors
|
||||
typer.echo(f"Error: {exc}")
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
typer.echo("Updated")
|
||||
|
||||
|
||||
@app.command("toggle-secret-mode")
|
||||
def config_toggle_secret_mode(ctx: typer.Context) -> None:
|
||||
"""Interactively enable or disable secret mode.
|
||||
|
||||
When enabled, newly generated and retrieved passwords are copied to the
|
||||
clipboard instead of printed to the screen.
|
||||
"""
|
||||
service = _get_config_service(ctx)
|
||||
try:
|
||||
enabled = service.get_secret_mode_enabled()
|
||||
delay = service.get_clipboard_clear_delay()
|
||||
except Exception as exc: # pragma: no cover - pass through errors
|
||||
typer.echo(f"Error loading settings: {exc}")
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
typer.echo(f"Secret mode is currently {'ON' if enabled else 'OFF'}")
|
||||
choice = (
|
||||
typer.prompt(
|
||||
"Enable secret mode? (y/n, blank to keep)", default="", show_default=False
|
||||
)
|
||||
.strip()
|
||||
.lower()
|
||||
)
|
||||
if choice in ("y", "yes"):
|
||||
enabled = True
|
||||
elif choice in ("n", "no"):
|
||||
enabled = False
|
||||
|
||||
inp = typer.prompt(
|
||||
f"Clipboard clear delay in seconds [{delay}]", default="", show_default=False
|
||||
).strip()
|
||||
if inp:
|
||||
try:
|
||||
delay = int(inp)
|
||||
if delay <= 0:
|
||||
typer.echo("Delay must be positive")
|
||||
raise typer.Exit(code=1)
|
||||
except ValueError:
|
||||
typer.echo("Invalid number")
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
try:
|
||||
service.set_secret_mode(enabled, delay)
|
||||
except Exception as exc: # pragma: no cover - pass through errors
|
||||
typer.echo(f"Error: {exc}")
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
status = "enabled" if enabled else "disabled"
|
||||
typer.echo(f"Secret mode {status}.")
|
||||
|
||||
|
||||
@app.command("toggle-offline")
|
||||
def config_toggle_offline(ctx: typer.Context) -> None:
|
||||
"""Enable or disable offline mode."""
|
||||
service = _get_config_service(ctx)
|
||||
try:
|
||||
enabled = service.get_offline_mode()
|
||||
except Exception as exc: # pragma: no cover - pass through errors
|
||||
typer.echo(f"Error loading settings: {exc}")
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
typer.echo(f"Offline mode is currently {'ON' if enabled else 'OFF'}")
|
||||
choice = (
|
||||
typer.prompt(
|
||||
"Enable offline mode? (y/n, blank to keep)", default="", show_default=False
|
||||
)
|
||||
.strip()
|
||||
.lower()
|
||||
)
|
||||
if choice in ("y", "yes"):
|
||||
enabled = True
|
||||
elif choice in ("n", "no"):
|
||||
enabled = False
|
||||
|
||||
try:
|
||||
service.set_offline_mode(enabled)
|
||||
except Exception as exc: # pragma: no cover - pass through errors
|
||||
typer.echo(f"Error: {exc}")
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
status = "enabled" if enabled else "disabled"
|
||||
typer.echo(f"Offline mode {status}.")
|
364
src/seedpass/cli/entry.py
Normal file
364
src/seedpass/cli/entry.py
Normal file
@@ -0,0 +1,364 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import List, Optional
|
||||
|
||||
import typer
|
||||
import click
|
||||
|
||||
from .common import _get_entry_service, EntryType
|
||||
from seedpass.core.entry_types import ALL_ENTRY_TYPES
|
||||
from utils.clipboard import ClipboardUnavailableError
|
||||
|
||||
|
||||
app = typer.Typer(help="Manage individual entries")
|
||||
|
||||
|
||||
@app.command("list")
|
||||
def entry_list(
|
||||
ctx: typer.Context,
|
||||
sort: str = typer.Option(
|
||||
"index", "--sort", help="Sort by 'index', 'label', or 'updated'"
|
||||
),
|
||||
kind: Optional[str] = typer.Option(
|
||||
None,
|
||||
"--kind",
|
||||
help="Filter by entry type",
|
||||
click_type=click.Choice(ALL_ENTRY_TYPES),
|
||||
),
|
||||
archived: bool = typer.Option(False, "--archived", help="Include archived"),
|
||||
) -> None:
|
||||
"""List entries in the vault."""
|
||||
service = _get_entry_service(ctx)
|
||||
entries = service.list_entries(
|
||||
sort_by=sort,
|
||||
filter_kinds=[kind] if kind else None,
|
||||
include_archived=archived,
|
||||
)
|
||||
for idx, label, username, url, is_archived in entries:
|
||||
line = f"{idx}: {label}"
|
||||
if username:
|
||||
line += f" ({username})"
|
||||
if url:
|
||||
line += f" {url}"
|
||||
if is_archived:
|
||||
line += " [archived]"
|
||||
typer.echo(line)
|
||||
|
||||
|
||||
@app.command("search")
|
||||
def entry_search(
|
||||
ctx: typer.Context,
|
||||
query: str,
|
||||
kinds: List[str] = typer.Option(
|
||||
None,
|
||||
"--kind",
|
||||
"-k",
|
||||
help="Filter by entry kinds (can be repeated)",
|
||||
click_type=click.Choice(ALL_ENTRY_TYPES),
|
||||
),
|
||||
) -> None:
|
||||
"""Search entries."""
|
||||
service = _get_entry_service(ctx)
|
||||
kinds = list(kinds) if kinds else None
|
||||
results = service.search_entries(query, kinds=kinds)
|
||||
if not results:
|
||||
typer.echo("No matching entries found")
|
||||
return
|
||||
for idx, label, username, url, _arch, etype in results:
|
||||
line = f"{idx}: {etype.value.replace('_', ' ').title()} - {label}"
|
||||
if username:
|
||||
line += f" ({username})"
|
||||
if url:
|
||||
line += f" {url}"
|
||||
typer.echo(line)
|
||||
|
||||
|
||||
@app.command("get")
|
||||
def entry_get(ctx: typer.Context, query: str) -> None:
|
||||
"""Retrieve a single entry's secret."""
|
||||
service = _get_entry_service(ctx)
|
||||
try:
|
||||
matches = service.search_entries(query)
|
||||
if len(matches) == 0:
|
||||
typer.echo("No matching entries found")
|
||||
raise typer.Exit(code=1)
|
||||
if len(matches) > 1:
|
||||
typer.echo("Matches:")
|
||||
for idx, label, username, _url, _arch, etype in matches:
|
||||
name = f"{idx}: {etype.value.replace('_', ' ').title()} - {label}"
|
||||
if username:
|
||||
name += f" ({username})"
|
||||
typer.echo(name)
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
index = matches[0][0]
|
||||
entry = service.retrieve_entry(index)
|
||||
etype = entry.get("type", entry.get("kind"))
|
||||
if etype == EntryType.PASSWORD.value:
|
||||
length = int(entry.get("length", 12))
|
||||
password = service.generate_password(length, index)
|
||||
typer.echo(password)
|
||||
elif etype == EntryType.TOTP.value:
|
||||
code = service.get_totp_code(index)
|
||||
typer.echo(code)
|
||||
else:
|
||||
typer.echo("Unsupported entry type")
|
||||
raise typer.Exit(code=1)
|
||||
except ClipboardUnavailableError as exc:
|
||||
typer.echo(
|
||||
f"Clipboard unavailable: {exc}\n"
|
||||
"Re-run with '--no-clipboard' to print secrets instead.",
|
||||
err=True,
|
||||
)
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
|
||||
@app.command("add")
|
||||
def entry_add(
|
||||
ctx: typer.Context,
|
||||
label: str,
|
||||
length: int = typer.Option(12, "--length"),
|
||||
username: Optional[str] = typer.Option(None, "--username"),
|
||||
url: Optional[str] = typer.Option(None, "--url"),
|
||||
no_special: bool = typer.Option(
|
||||
False, "--no-special", help="Exclude special characters", is_flag=True
|
||||
),
|
||||
allowed_special_chars: Optional[str] = typer.Option(
|
||||
None, "--allowed-special-chars", help="Explicit set of special characters"
|
||||
),
|
||||
special_mode: Optional[str] = typer.Option(
|
||||
None,
|
||||
"--special-mode",
|
||||
help="Special character mode",
|
||||
),
|
||||
exclude_ambiguous: bool = typer.Option(
|
||||
False,
|
||||
"--exclude-ambiguous",
|
||||
help="Exclude ambiguous characters",
|
||||
is_flag=True,
|
||||
),
|
||||
min_uppercase: Optional[int] = typer.Option(None, "--min-uppercase"),
|
||||
min_lowercase: Optional[int] = typer.Option(None, "--min-lowercase"),
|
||||
min_digits: Optional[int] = typer.Option(None, "--min-digits"),
|
||||
min_special: Optional[int] = typer.Option(None, "--min-special"),
|
||||
) -> None:
|
||||
"""Add a new password entry and output its index."""
|
||||
service = _get_entry_service(ctx)
|
||||
kwargs = {}
|
||||
if no_special:
|
||||
kwargs["include_special_chars"] = False
|
||||
if allowed_special_chars is not None:
|
||||
kwargs["allowed_special_chars"] = allowed_special_chars
|
||||
if special_mode is not None:
|
||||
kwargs["special_mode"] = special_mode
|
||||
if exclude_ambiguous:
|
||||
kwargs["exclude_ambiguous"] = True
|
||||
if min_uppercase is not None:
|
||||
kwargs["min_uppercase"] = min_uppercase
|
||||
if min_lowercase is not None:
|
||||
kwargs["min_lowercase"] = min_lowercase
|
||||
if min_digits is not None:
|
||||
kwargs["min_digits"] = min_digits
|
||||
if min_special is not None:
|
||||
kwargs["min_special"] = min_special
|
||||
|
||||
index = service.add_entry(label, length, username, url, **kwargs)
|
||||
typer.echo(str(index))
|
||||
|
||||
|
||||
@app.command("add-totp")
|
||||
def entry_add_totp(
|
||||
ctx: typer.Context,
|
||||
label: str,
|
||||
index: Optional[int] = typer.Option(None, "--index", help="Derivation index"),
|
||||
secret: Optional[str] = typer.Option(None, "--secret", help="Import secret"),
|
||||
period: int = typer.Option(30, "--period", help="TOTP period in seconds"),
|
||||
digits: int = typer.Option(6, "--digits", help="Number of TOTP digits"),
|
||||
) -> None:
|
||||
"""Add a TOTP entry and output the otpauth URI."""
|
||||
service = _get_entry_service(ctx)
|
||||
uri = service.add_totp(
|
||||
label,
|
||||
index=index,
|
||||
secret=secret,
|
||||
period=period,
|
||||
digits=digits,
|
||||
)
|
||||
typer.echo(uri)
|
||||
|
||||
|
||||
@app.command("add-ssh")
|
||||
def entry_add_ssh(
|
||||
ctx: typer.Context,
|
||||
label: str,
|
||||
index: Optional[int] = typer.Option(None, "--index", help="Derivation index"),
|
||||
notes: str = typer.Option("", "--notes", help="Entry notes"),
|
||||
) -> None:
|
||||
"""Add an SSH key entry and output its index."""
|
||||
service = _get_entry_service(ctx)
|
||||
idx = service.add_ssh_key(
|
||||
label,
|
||||
index=index,
|
||||
notes=notes,
|
||||
)
|
||||
typer.echo(str(idx))
|
||||
|
||||
|
||||
@app.command("add-pgp")
|
||||
def entry_add_pgp(
|
||||
ctx: typer.Context,
|
||||
label: str,
|
||||
index: Optional[int] = typer.Option(None, "--index", help="Derivation index"),
|
||||
key_type: str = typer.Option("ed25519", "--key-type", help="Key type"),
|
||||
user_id: str = typer.Option("", "--user-id", help="User ID"),
|
||||
notes: str = typer.Option("", "--notes", help="Entry notes"),
|
||||
) -> None:
|
||||
"""Add a PGP key entry and output its index."""
|
||||
service = _get_entry_service(ctx)
|
||||
idx = service.add_pgp_key(
|
||||
label,
|
||||
index=index,
|
||||
key_type=key_type,
|
||||
user_id=user_id,
|
||||
notes=notes,
|
||||
)
|
||||
typer.echo(str(idx))
|
||||
|
||||
|
||||
@app.command("add-nostr")
|
||||
def entry_add_nostr(
|
||||
ctx: typer.Context,
|
||||
label: str,
|
||||
index: Optional[int] = typer.Option(None, "--index", help="Derivation index"),
|
||||
notes: str = typer.Option("", "--notes", help="Entry notes"),
|
||||
) -> None:
|
||||
"""Add a Nostr key entry and output its index."""
|
||||
service = _get_entry_service(ctx)
|
||||
idx = service.add_nostr_key(
|
||||
label,
|
||||
index=index,
|
||||
notes=notes,
|
||||
)
|
||||
typer.echo(str(idx))
|
||||
|
||||
|
||||
@app.command("add-seed")
|
||||
def entry_add_seed(
|
||||
ctx: typer.Context,
|
||||
label: str,
|
||||
index: Optional[int] = typer.Option(None, "--index", help="Derivation index"),
|
||||
words: int = typer.Option(24, "--words", help="Word count"),
|
||||
notes: str = typer.Option("", "--notes", help="Entry notes"),
|
||||
) -> None:
|
||||
"""Add a derived seed phrase entry and output its index."""
|
||||
service = _get_entry_service(ctx)
|
||||
idx = service.add_seed(
|
||||
label,
|
||||
index=index,
|
||||
words=words,
|
||||
notes=notes,
|
||||
)
|
||||
typer.echo(str(idx))
|
||||
|
||||
|
||||
@app.command("add-key-value")
|
||||
def entry_add_key_value(
|
||||
ctx: typer.Context,
|
||||
label: str,
|
||||
key: str = typer.Option(..., "--key", help="Key name"),
|
||||
value: str = typer.Option(..., "--value", help="Stored value"),
|
||||
notes: str = typer.Option("", "--notes", help="Entry notes"),
|
||||
) -> None:
|
||||
"""Add a key/value entry and output its index."""
|
||||
service = _get_entry_service(ctx)
|
||||
idx = service.add_key_value(label, key, value, notes=notes)
|
||||
typer.echo(str(idx))
|
||||
|
||||
|
||||
@app.command("add-managed-account")
|
||||
def entry_add_managed_account(
|
||||
ctx: typer.Context,
|
||||
label: str,
|
||||
index: Optional[int] = typer.Option(None, "--index", help="Derivation index"),
|
||||
notes: str = typer.Option("", "--notes", help="Entry notes"),
|
||||
) -> None:
|
||||
"""Add a managed account seed entry and output its index."""
|
||||
service = _get_entry_service(ctx)
|
||||
idx = service.add_managed_account(
|
||||
label,
|
||||
index=index,
|
||||
notes=notes,
|
||||
)
|
||||
typer.echo(str(idx))
|
||||
|
||||
|
||||
@app.command("modify")
|
||||
def entry_modify(
|
||||
ctx: typer.Context,
|
||||
entry_id: int,
|
||||
label: Optional[str] = typer.Option(None, "--label"),
|
||||
username: Optional[str] = typer.Option(None, "--username"),
|
||||
url: Optional[str] = typer.Option(None, "--url"),
|
||||
notes: Optional[str] = typer.Option(None, "--notes"),
|
||||
period: Optional[int] = typer.Option(
|
||||
None, "--period", help="TOTP period in seconds"
|
||||
),
|
||||
digits: Optional[int] = typer.Option(None, "--digits", help="TOTP digits"),
|
||||
key: Optional[str] = typer.Option(None, "--key", help="New key"),
|
||||
value: Optional[str] = typer.Option(None, "--value", help="New value"),
|
||||
) -> None:
|
||||
"""Modify an existing entry."""
|
||||
service = _get_entry_service(ctx)
|
||||
try:
|
||||
service.modify_entry(
|
||||
entry_id,
|
||||
username=username,
|
||||
url=url,
|
||||
notes=notes,
|
||||
label=label,
|
||||
period=period,
|
||||
digits=digits,
|
||||
key=key,
|
||||
value=value,
|
||||
)
|
||||
except ValueError as e:
|
||||
typer.echo(str(e))
|
||||
sys.stdout.flush()
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
|
||||
@app.command("archive")
|
||||
def entry_archive(ctx: typer.Context, entry_id: int) -> None:
|
||||
"""Archive an entry."""
|
||||
service = _get_entry_service(ctx)
|
||||
service.archive_entry(entry_id)
|
||||
typer.echo(str(entry_id))
|
||||
|
||||
|
||||
@app.command("unarchive")
|
||||
def entry_unarchive(ctx: typer.Context, entry_id: int) -> None:
|
||||
"""Restore an archived entry."""
|
||||
service = _get_entry_service(ctx)
|
||||
service.restore_entry(entry_id)
|
||||
typer.echo(str(entry_id))
|
||||
|
||||
|
||||
@app.command("totp-codes")
|
||||
def entry_totp_codes(ctx: typer.Context) -> None:
|
||||
"""Display all current TOTP codes."""
|
||||
service = _get_entry_service(ctx)
|
||||
service.display_totp_codes()
|
||||
|
||||
|
||||
@app.command("export-totp")
|
||||
def entry_export_totp(
|
||||
ctx: typer.Context, file: str = typer.Option(..., help="Output file")
|
||||
) -> None:
|
||||
"""Export all TOTP secrets to a JSON file."""
|
||||
service = _get_entry_service(ctx)
|
||||
data = service.export_totp_entries()
|
||||
Path(file).write_text(json.dumps(data, indent=2))
|
||||
typer.echo(str(file))
|
40
src/seedpass/cli/fingerprint.py
Normal file
40
src/seedpass/cli/fingerprint.py
Normal file
@@ -0,0 +1,40 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import typer
|
||||
|
||||
from .common import _get_services, ProfileRemoveRequest, ProfileSwitchRequest
|
||||
|
||||
|
||||
app = typer.Typer(help="Manage seed profiles")
|
||||
|
||||
|
||||
@app.command("list")
|
||||
def fingerprint_list(ctx: typer.Context) -> None:
|
||||
"""List available seed profiles."""
|
||||
_vault, profile_service, _sync = _get_services(ctx)
|
||||
for fp in profile_service.list_profiles():
|
||||
typer.echo(fp)
|
||||
|
||||
|
||||
@app.command("add")
|
||||
def fingerprint_add(ctx: typer.Context) -> None:
|
||||
"""Create a new seed profile."""
|
||||
_vault, profile_service, _sync = _get_services(ctx)
|
||||
profile_service.add_profile()
|
||||
|
||||
|
||||
@app.command("remove")
|
||||
def fingerprint_remove(ctx: typer.Context, fingerprint: str) -> None:
|
||||
"""Remove a seed profile."""
|
||||
_vault, profile_service, _sync = _get_services(ctx)
|
||||
profile_service.remove_profile(ProfileRemoveRequest(fingerprint=fingerprint))
|
||||
|
||||
|
||||
@app.command("switch")
|
||||
def fingerprint_switch(ctx: typer.Context, fingerprint: str) -> None:
|
||||
"""Switch to another seed profile."""
|
||||
_vault, profile_service, _sync = _get_services(ctx)
|
||||
password = typer.prompt("Master password", hide_input=True)
|
||||
profile_service.switch_profile(
|
||||
ProfileSwitchRequest(fingerprint=fingerprint, password=password)
|
||||
)
|
67
src/seedpass/cli/nostr.py
Normal file
67
src/seedpass/cli/nostr.py
Normal file
@@ -0,0 +1,67 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import typer
|
||||
|
||||
from .common import _get_services, _get_nostr_service
|
||||
|
||||
|
||||
app = typer.Typer(
|
||||
help="Interact with Nostr relays. See docs/nostr_setup.md for configuration and troubleshooting."
|
||||
)
|
||||
|
||||
|
||||
@app.command("sync")
|
||||
def nostr_sync(ctx: typer.Context) -> None:
|
||||
"""Sync with configured Nostr relays."""
|
||||
_vault, _profile, sync_service = _get_services(ctx)
|
||||
model = sync_service.sync()
|
||||
if model:
|
||||
typer.echo("Event IDs:")
|
||||
typer.echo(f"- manifest: {model.manifest_id}")
|
||||
for cid in model.chunk_ids:
|
||||
typer.echo(f"- chunk: {cid}")
|
||||
for did in model.delta_ids:
|
||||
typer.echo(f"- delta: {did}")
|
||||
else:
|
||||
typer.echo("Error: Failed to sync vault")
|
||||
|
||||
|
||||
@app.command("get-pubkey")
|
||||
def nostr_get_pubkey(ctx: typer.Context) -> None:
|
||||
"""Display the active profile's npub."""
|
||||
service = _get_nostr_service(ctx)
|
||||
npub = service.get_pubkey()
|
||||
typer.echo(npub)
|
||||
|
||||
|
||||
@app.command("list-relays")
|
||||
def nostr_list_relays(ctx: typer.Context) -> None:
|
||||
"""Display configured Nostr relays."""
|
||||
service = _get_nostr_service(ctx)
|
||||
relays = service.list_relays()
|
||||
for i, r in enumerate(relays, 1):
|
||||
typer.echo(f"{i}: {r}")
|
||||
|
||||
|
||||
@app.command("add-relay")
|
||||
def nostr_add_relay(ctx: typer.Context, url: str) -> None:
|
||||
"""Add a relay URL."""
|
||||
service = _get_nostr_service(ctx)
|
||||
try:
|
||||
service.add_relay(url)
|
||||
except Exception as exc: # pragma: no cover - pass through errors
|
||||
typer.echo(f"Error: {exc}")
|
||||
raise typer.Exit(code=1)
|
||||
typer.echo("Added")
|
||||
|
||||
|
||||
@app.command("remove-relay")
|
||||
def nostr_remove_relay(ctx: typer.Context, idx: int) -> None:
|
||||
"""Remove a relay by index (1-based)."""
|
||||
service = _get_nostr_service(ctx)
|
||||
try:
|
||||
service.remove_relay(idx)
|
||||
except Exception as exc: # pragma: no cover - pass through errors
|
||||
typer.echo(f"Error: {exc}")
|
||||
raise typer.Exit(code=1)
|
||||
typer.echo("Removed")
|
74
src/seedpass/cli/util.py
Normal file
74
src/seedpass/cli/util.py
Normal file
@@ -0,0 +1,74 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Optional
|
||||
|
||||
import typer
|
||||
|
||||
from .common import _get_util_service
|
||||
|
||||
|
||||
app = typer.Typer(help="Utility commands")
|
||||
|
||||
|
||||
@app.command("generate-password")
|
||||
def generate_password(
|
||||
ctx: typer.Context,
|
||||
length: int = 24,
|
||||
no_special: bool = typer.Option(
|
||||
False, "--no-special", help="Exclude special characters", is_flag=True
|
||||
),
|
||||
allowed_special_chars: Optional[str] = typer.Option(
|
||||
None, "--allowed-special-chars", help="Explicit set of special characters"
|
||||
),
|
||||
special_mode: Optional[str] = typer.Option(
|
||||
None,
|
||||
"--special-mode",
|
||||
help="Special character mode",
|
||||
),
|
||||
exclude_ambiguous: bool = typer.Option(
|
||||
False,
|
||||
"--exclude-ambiguous",
|
||||
help="Exclude ambiguous characters",
|
||||
is_flag=True,
|
||||
),
|
||||
min_uppercase: Optional[int] = typer.Option(None, "--min-uppercase"),
|
||||
min_lowercase: Optional[int] = typer.Option(None, "--min-lowercase"),
|
||||
min_digits: Optional[int] = typer.Option(None, "--min-digits"),
|
||||
min_special: Optional[int] = typer.Option(None, "--min-special"),
|
||||
) -> None:
|
||||
"""Generate a strong password."""
|
||||
service = _get_util_service(ctx)
|
||||
kwargs = {}
|
||||
if no_special:
|
||||
kwargs["include_special_chars"] = False
|
||||
if allowed_special_chars is not None:
|
||||
kwargs["allowed_special_chars"] = allowed_special_chars
|
||||
if special_mode is not None:
|
||||
kwargs["special_mode"] = special_mode
|
||||
if exclude_ambiguous:
|
||||
kwargs["exclude_ambiguous"] = True
|
||||
if min_uppercase is not None:
|
||||
kwargs["min_uppercase"] = min_uppercase
|
||||
if min_lowercase is not None:
|
||||
kwargs["min_lowercase"] = min_lowercase
|
||||
if min_digits is not None:
|
||||
kwargs["min_digits"] = min_digits
|
||||
if min_special is not None:
|
||||
kwargs["min_special"] = min_special
|
||||
|
||||
password = service.generate_password(length, **kwargs)
|
||||
typer.echo(password)
|
||||
|
||||
|
||||
@app.command("verify-checksum")
|
||||
def verify_checksum(ctx: typer.Context) -> None:
|
||||
"""Verify the SeedPass script checksum."""
|
||||
service = _get_util_service(ctx)
|
||||
service.verify_checksum()
|
||||
|
||||
|
||||
@app.command("update-checksum")
|
||||
def update_checksum(ctx: typer.Context) -> None:
|
||||
"""Regenerate the script checksum file."""
|
||||
service = _get_util_service(ctx)
|
||||
service.update_checksum()
|
99
src/seedpass/cli/vault.py
Normal file
99
src/seedpass/cli/vault.py
Normal file
@@ -0,0 +1,99 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
import typer
|
||||
|
||||
from .common import (
|
||||
_get_services,
|
||||
ChangePasswordRequest,
|
||||
UnlockRequest,
|
||||
BackupParentSeedRequest,
|
||||
)
|
||||
|
||||
|
||||
app = typer.Typer(help="Manage the entire vault")
|
||||
|
||||
|
||||
@app.command("export")
|
||||
def vault_export(
|
||||
ctx: typer.Context, file: str = typer.Option(..., help="Output file")
|
||||
) -> None:
|
||||
"""Export the vault profile to an encrypted file."""
|
||||
vault_service, _profile, _sync = _get_services(ctx)
|
||||
data = vault_service.export_profile()
|
||||
Path(file).write_bytes(data)
|
||||
typer.echo(str(file))
|
||||
|
||||
|
||||
@app.command("import")
|
||||
def vault_import(
|
||||
ctx: typer.Context, file: str = typer.Option(..., help="Input file")
|
||||
) -> None:
|
||||
"""Import a vault profile from an encrypted file."""
|
||||
vault_service, _profile, _sync = _get_services(ctx)
|
||||
data = Path(file).read_bytes()
|
||||
vault_service.import_profile(data)
|
||||
typer.echo(str(file))
|
||||
|
||||
|
||||
@app.command("change-password")
|
||||
def vault_change_password(ctx: typer.Context) -> None:
|
||||
"""Change the master password used for encryption."""
|
||||
vault_service, _profile, _sync = _get_services(ctx)
|
||||
old_pw = typer.prompt("Current password", hide_input=True)
|
||||
new_pw = typer.prompt("New password", hide_input=True, confirmation_prompt=True)
|
||||
try:
|
||||
vault_service.change_password(
|
||||
ChangePasswordRequest(old_password=old_pw, new_password=new_pw)
|
||||
)
|
||||
except Exception as exc: # pragma: no cover - pass through errors
|
||||
typer.echo(f"Error: {exc}")
|
||||
raise typer.Exit(code=1)
|
||||
typer.echo("Password updated")
|
||||
|
||||
|
||||
@app.command("unlock")
|
||||
def vault_unlock(ctx: typer.Context) -> None:
|
||||
"""Unlock the vault for the active profile."""
|
||||
vault_service, _profile, _sync = _get_services(ctx)
|
||||
password = typer.prompt("Master password", hide_input=True)
|
||||
try:
|
||||
resp = vault_service.unlock(UnlockRequest(password=password))
|
||||
except Exception as exc: # pragma: no cover - pass through errors
|
||||
typer.echo(f"Error: {exc}")
|
||||
raise typer.Exit(code=1)
|
||||
typer.echo(f"Unlocked in {resp.duration:.2f}s")
|
||||
|
||||
|
||||
@app.command("lock")
|
||||
def vault_lock(ctx: typer.Context) -> None:
|
||||
"""Lock the vault and clear sensitive data from memory."""
|
||||
vault_service, _profile, _sync = _get_services(ctx)
|
||||
vault_service.lock()
|
||||
typer.echo("locked")
|
||||
|
||||
|
||||
@app.command("stats")
|
||||
def vault_stats(ctx: typer.Context) -> None:
|
||||
"""Display statistics about the current seed profile."""
|
||||
vault_service, _profile, _sync = _get_services(ctx)
|
||||
stats = vault_service.stats()
|
||||
typer.echo(json.dumps(stats, indent=2))
|
||||
|
||||
|
||||
@app.command("reveal-parent-seed")
|
||||
def vault_reveal_parent_seed(
|
||||
ctx: typer.Context,
|
||||
file: Optional[str] = typer.Option(
|
||||
None, "--file", help="Save encrypted seed to this path"
|
||||
),
|
||||
) -> None:
|
||||
"""Display the parent seed and optionally write an encrypted backup file."""
|
||||
vault_service, _profile, _sync = _get_services(ctx)
|
||||
password = typer.prompt("Master password", hide_input=True)
|
||||
vault_service.backup_parent_seed(
|
||||
BackupParentSeedRequest(path=Path(file) if file else None, password=password)
|
||||
)
|
@@ -148,7 +148,9 @@ class VaultService:
|
||||
"""Restore a profile from ``data`` and sync."""
|
||||
|
||||
with self._lock:
|
||||
decrypted = self._manager.vault.encryption_manager.decrypt_data(data)
|
||||
decrypted = self._manager.vault.encryption_manager.decrypt_data(
|
||||
data, context="profile"
|
||||
)
|
||||
index = json.loads(decrypted.decode("utf-8"))
|
||||
self._manager.vault.save_index(index)
|
||||
self._manager.sync_vault()
|
||||
@@ -263,13 +265,13 @@ class EntryService:
|
||||
def list_entries(
|
||||
self,
|
||||
sort_by: str = "index",
|
||||
filter_kind: str | None = None,
|
||||
filter_kinds: list[str] | None = None,
|
||||
include_archived: bool = False,
|
||||
):
|
||||
with self._lock:
|
||||
return self._manager.entry_manager.list_entries(
|
||||
sort_by=sort_by,
|
||||
filter_kind=filter_kind,
|
||||
filter_kinds=filter_kinds,
|
||||
include_archived=include_archived,
|
||||
)
|
||||
|
||||
@@ -303,9 +305,10 @@ class EntryService:
|
||||
|
||||
def get_totp_code(self, entry_id: int) -> str:
|
||||
with self._lock:
|
||||
return self._manager.entry_manager.get_totp_code(
|
||||
entry_id, self._manager.parent_seed
|
||||
key = getattr(self._manager, "KEY_TOTP_DET", None) or getattr(
|
||||
self._manager, "parent_seed", None
|
||||
)
|
||||
return self._manager.entry_manager.get_totp_code(entry_id, key)
|
||||
|
||||
def add_entry(
|
||||
self,
|
||||
@@ -513,9 +516,10 @@ class EntryService:
|
||||
|
||||
def export_totp_entries(self) -> dict:
|
||||
with self._lock:
|
||||
return self._manager.entry_manager.export_totp_entries(
|
||||
self._manager.parent_seed
|
||||
key = getattr(self._manager, "KEY_TOTP_DET", None) or getattr(
|
||||
self._manager, "parent_seed", None
|
||||
)
|
||||
return self._manager.entry_manager.export_totp_entries(key)
|
||||
|
||||
def display_totp_codes(self) -> None:
|
||||
with self._lock:
|
||||
|
@@ -15,7 +15,6 @@ import logging
|
||||
import os
|
||||
import shutil
|
||||
import time
|
||||
import traceback
|
||||
from pathlib import Path
|
||||
from termcolor import colored
|
||||
|
||||
|
@@ -47,11 +47,12 @@ class ConfigManager:
|
||||
"inactivity_timeout": INACTIVITY_TIMEOUT,
|
||||
"kdf_iterations": 50_000,
|
||||
"kdf_mode": "pbkdf2",
|
||||
"argon2_time_cost": 2,
|
||||
"additional_backup_path": "",
|
||||
"backup_interval": 0,
|
||||
"secret_mode_enabled": False,
|
||||
"clipboard_clear_delay": 45,
|
||||
"quick_unlock": False,
|
||||
"quick_unlock_enabled": False,
|
||||
"nostr_max_retries": MAX_RETRIES,
|
||||
"nostr_retry_delay": float(RETRY_DELAY),
|
||||
"min_uppercase": 2,
|
||||
@@ -76,11 +77,12 @@ class ConfigManager:
|
||||
data.setdefault("inactivity_timeout", INACTIVITY_TIMEOUT)
|
||||
data.setdefault("kdf_iterations", 50_000)
|
||||
data.setdefault("kdf_mode", "pbkdf2")
|
||||
data.setdefault("argon2_time_cost", 2)
|
||||
data.setdefault("additional_backup_path", "")
|
||||
data.setdefault("backup_interval", 0)
|
||||
data.setdefault("secret_mode_enabled", False)
|
||||
data.setdefault("clipboard_clear_delay", 45)
|
||||
data.setdefault("quick_unlock", False)
|
||||
data.setdefault("quick_unlock_enabled", data.get("quick_unlock", False))
|
||||
data.setdefault("nostr_max_retries", MAX_RETRIES)
|
||||
data.setdefault("nostr_retry_delay", float(RETRY_DELAY))
|
||||
data.setdefault("min_uppercase", 2)
|
||||
@@ -196,6 +198,19 @@ class ConfigManager:
|
||||
config = self.load_config(require_pin=False)
|
||||
return config.get("kdf_mode", "pbkdf2")
|
||||
|
||||
def set_argon2_time_cost(self, time_cost: int) -> None:
|
||||
"""Persist the Argon2 ``time_cost`` parameter."""
|
||||
if time_cost <= 0:
|
||||
raise ValueError("time_cost must be positive")
|
||||
config = self.load_config(require_pin=False)
|
||||
config["argon2_time_cost"] = int(time_cost)
|
||||
self.save_config(config)
|
||||
|
||||
def get_argon2_time_cost(self) -> int:
|
||||
"""Retrieve the Argon2 ``time_cost`` setting."""
|
||||
config = self.load_config(require_pin=False)
|
||||
return int(config.get("argon2_time_cost", 2))
|
||||
|
||||
def set_additional_backup_path(self, path: Optional[str]) -> None:
|
||||
"""Persist an optional additional backup path in the config."""
|
||||
config = self.load_config(require_pin=False)
|
||||
@@ -320,13 +335,13 @@ class ConfigManager:
|
||||
def set_quick_unlock(self, enabled: bool) -> None:
|
||||
"""Persist the quick unlock toggle."""
|
||||
cfg = self.load_config(require_pin=False)
|
||||
cfg["quick_unlock"] = bool(enabled)
|
||||
cfg["quick_unlock_enabled"] = bool(enabled)
|
||||
self.save_config(cfg)
|
||||
|
||||
def get_quick_unlock(self) -> bool:
|
||||
"""Retrieve whether quick unlock is enabled."""
|
||||
cfg = self.load_config(require_pin=False)
|
||||
return bool(cfg.get("quick_unlock", False))
|
||||
return bool(cfg.get("quick_unlock_enabled", False))
|
||||
|
||||
def set_nostr_max_retries(self, retries: int) -> None:
|
||||
"""Persist the maximum number of Nostr retry attempts."""
|
||||
|
@@ -1,7 +1,7 @@
|
||||
# /src/seedpass.core/encryption.py
|
||||
|
||||
import logging
|
||||
import traceback
|
||||
import unicodedata
|
||||
|
||||
try:
|
||||
import orjson as json_lib # type: ignore
|
||||
@@ -16,19 +16,41 @@ except Exception: # pragma: no cover - fallback for environments without orjson
|
||||
import hashlib
|
||||
import os
|
||||
import base64
|
||||
from dataclasses import asdict
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
from typing import Optional, Tuple
|
||||
|
||||
from cryptography.hazmat.primitives.ciphers.aead import AESGCM
|
||||
from cryptography.exceptions import InvalidTag
|
||||
from cryptography.fernet import Fernet, InvalidToken
|
||||
from termcolor import colored
|
||||
from utils.file_lock import exclusive_lock
|
||||
from mnemonic import Mnemonic
|
||||
from utils.password_prompt import prompt_existing_password
|
||||
from utils.key_derivation import KdfConfig, CURRENT_KDF_VERSION
|
||||
|
||||
# Instantiate the logger
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _derive_legacy_key_from_password(password: str, iterations: int = 100_000) -> bytes:
|
||||
"""Derive legacy Fernet key using password only (no fingerprint)."""
|
||||
normalized = unicodedata.normalize("NFKD", password).strip().encode("utf-8")
|
||||
key = hashlib.pbkdf2_hmac("sha256", normalized, b"", iterations, dklen=32)
|
||||
return base64.urlsafe_b64encode(key)
|
||||
|
||||
|
||||
class LegacyFormatRequiresMigrationError(Exception):
|
||||
"""Raised when legacy-encrypted data needs user-guided migration."""
|
||||
|
||||
def __init__(self, context: Optional[str] = None) -> None:
|
||||
msg = (
|
||||
f"Legacy data detected for {context}" if context else "Legacy data detected"
|
||||
)
|
||||
super().__init__(msg)
|
||||
self.context = context
|
||||
|
||||
|
||||
class EncryptionManager:
|
||||
"""
|
||||
Manages encryption and decryption, handling migration from legacy Fernet
|
||||
@@ -66,6 +88,10 @@ class EncryptionManager:
|
||||
)
|
||||
raise
|
||||
|
||||
# Track user preference for handling legacy indexes
|
||||
self._legacy_migrate_flag = True
|
||||
self.last_migration_performed = False
|
||||
|
||||
def encrypt_data(self, data: bytes) -> bytes:
|
||||
"""
|
||||
(2) Encrypts data using the NEW AES-GCM format, prepending a version
|
||||
@@ -79,132 +105,284 @@ class EncryptionManager:
|
||||
logger.error(f"Failed to encrypt data: {e}", exc_info=True)
|
||||
raise
|
||||
|
||||
def decrypt_data(self, encrypted_data: bytes) -> bytes:
|
||||
"""
|
||||
(3) The core migration logic. Tries the new format first, then falls back
|
||||
to the old one. This is the ONLY place decryption logic should live.
|
||||
"""
|
||||
# Try the new V2 format first
|
||||
if encrypted_data.startswith(b"V2:"):
|
||||
try:
|
||||
nonce = encrypted_data[3:15]
|
||||
ciphertext = encrypted_data[15:]
|
||||
if len(ciphertext) < 16:
|
||||
logger.error("AES-GCM payload too short")
|
||||
raise InvalidToken("AES-GCM payload too short")
|
||||
return self.cipher.decrypt(nonce, ciphertext, None)
|
||||
except InvalidTag as e:
|
||||
logger.error("AES-GCM decryption failed: Invalid authentication tag.")
|
||||
try:
|
||||
result = self.fernet.decrypt(encrypted_data[3:])
|
||||
logger.warning(
|
||||
"Legacy-format file had incorrect 'V2:' header; decrypted with Fernet"
|
||||
)
|
||||
return result
|
||||
except InvalidToken:
|
||||
raise InvalidToken("AES-GCM decryption failed.") from e
|
||||
def decrypt_data(
|
||||
self, encrypted_data: bytes, context: Optional[str] = None
|
||||
) -> bytes:
|
||||
"""Decrypt ``encrypted_data`` handling legacy fallbacks.
|
||||
|
||||
# If it's not V2, it must be the legacy Fernet format
|
||||
else:
|
||||
logger.warning("Data is in legacy Fernet format. Attempting migration.")
|
||||
Parameters
|
||||
----------
|
||||
encrypted_data:
|
||||
The bytes to decrypt.
|
||||
context:
|
||||
Optional string describing what is being decrypted ("seed", "index", etc.)
|
||||
for clearer error messages.
|
||||
"""
|
||||
|
||||
ctx = f" {context}" if context else ""
|
||||
|
||||
try:
|
||||
# Try the new V2 format first
|
||||
if encrypted_data.startswith(b"V2:"):
|
||||
try:
|
||||
nonce = encrypted_data[3:15]
|
||||
ciphertext = encrypted_data[15:]
|
||||
if len(ciphertext) < 16:
|
||||
logger.error("AES-GCM payload too short")
|
||||
raise InvalidToken("AES-GCM payload too short")
|
||||
return self.cipher.decrypt(nonce, ciphertext, None)
|
||||
except InvalidTag as e:
|
||||
logger.debug(
|
||||
"AES-GCM decryption failed: Invalid authentication tag."
|
||||
)
|
||||
try:
|
||||
result = self.fernet.decrypt(encrypted_data[3:])
|
||||
logger.warning(
|
||||
"Legacy-format file had incorrect 'V2:' header; decrypted with Fernet"
|
||||
)
|
||||
return result
|
||||
except InvalidToken:
|
||||
msg = f"Failed to decrypt{ctx}: invalid key or corrupt file"
|
||||
logger.error(msg)
|
||||
raise InvalidToken(msg) from e
|
||||
|
||||
# If it's not V2, it must be the legacy Fernet format
|
||||
else:
|
||||
logger.warning("Data is in legacy Fernet format. Attempting migration.")
|
||||
try:
|
||||
return self.fernet.decrypt(encrypted_data)
|
||||
except InvalidToken as e:
|
||||
logger.error(
|
||||
"Legacy Fernet decryption failed. Vault may be corrupt or key is incorrect."
|
||||
)
|
||||
raise e
|
||||
|
||||
except (InvalidToken, InvalidTag) as e:
|
||||
if encrypted_data.startswith(b"V2:"):
|
||||
# Already determined not to be legacy; re-raise
|
||||
raise
|
||||
if isinstance(e, InvalidToken) and str(e) == "AES-GCM payload too short":
|
||||
raise
|
||||
if not self._legacy_migrate_flag:
|
||||
raise
|
||||
logger.debug(f"Could not decrypt data{ctx}: {e}")
|
||||
raise LegacyFormatRequiresMigrationError(context)
|
||||
|
||||
def decrypt_legacy(
|
||||
self, encrypted_data: bytes, password: str, context: Optional[str] = None
|
||||
) -> bytes:
|
||||
"""Decrypt ``encrypted_data`` using legacy password-only key derivation."""
|
||||
|
||||
ctx = f" {context}" if context else ""
|
||||
last_exc: Optional[Exception] = None
|
||||
for iter_count in [50_000, 100_000]:
|
||||
try:
|
||||
return self.fernet.decrypt(encrypted_data)
|
||||
except InvalidToken as e:
|
||||
logger.error(
|
||||
"Legacy Fernet decryption failed. Vault may be corrupt or key is incorrect."
|
||||
legacy_key = _derive_legacy_key_from_password(
|
||||
password, iterations=iter_count
|
||||
)
|
||||
raise InvalidToken(
|
||||
"Could not decrypt data with any available method."
|
||||
) from e
|
||||
legacy_mgr = EncryptionManager(legacy_key, self.fingerprint_dir)
|
||||
legacy_mgr._legacy_migrate_flag = False
|
||||
result = legacy_mgr.decrypt_data(encrypted_data, context=context)
|
||||
try: # record iteration count for future runs
|
||||
from .vault import Vault
|
||||
from .config_manager import ConfigManager
|
||||
|
||||
cfg_mgr = ConfigManager(
|
||||
Vault(self, self.fingerprint_dir), self.fingerprint_dir
|
||||
)
|
||||
cfg_mgr.set_kdf_iterations(iter_count)
|
||||
except Exception: # pragma: no cover - best effort
|
||||
logger.error(
|
||||
"Failed to record PBKDF2 iteration count in config",
|
||||
exc_info=True,
|
||||
)
|
||||
logger.warning(
|
||||
"Data decrypted using legacy password-only key derivation."
|
||||
)
|
||||
return result
|
||||
except Exception as e2: # pragma: no cover - try next iteration
|
||||
last_exc = e2
|
||||
logger.error(f"Failed legacy decryption attempt: {last_exc}", exc_info=True)
|
||||
raise InvalidToken(
|
||||
f"Could not decrypt{ctx} with any available method."
|
||||
) from last_exc
|
||||
|
||||
# --- All functions below this point now use the smart `decrypt_data` method ---
|
||||
|
||||
def encrypt_parent_seed(self, parent_seed: str) -> None:
|
||||
def resolve_relative_path(self, relative_path: Path) -> Path:
|
||||
"""Resolve ``relative_path`` within ``fingerprint_dir`` and validate it.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
relative_path:
|
||||
The user-supplied path relative to ``fingerprint_dir``.
|
||||
|
||||
Returns
|
||||
-------
|
||||
Path
|
||||
The normalized absolute path inside ``fingerprint_dir``.
|
||||
|
||||
Raises
|
||||
------
|
||||
ValueError
|
||||
If the resulting path is absolute or escapes ``fingerprint_dir``.
|
||||
"""
|
||||
|
||||
candidate = (self.fingerprint_dir / relative_path).resolve()
|
||||
if not candidate.is_relative_to(self.fingerprint_dir.resolve()):
|
||||
raise ValueError("Invalid path outside fingerprint directory")
|
||||
return candidate
|
||||
|
||||
def encrypt_parent_seed(
|
||||
self, parent_seed: str, kdf: Optional[KdfConfig] = None
|
||||
) -> None:
|
||||
"""Encrypts and saves the parent seed to 'parent_seed.enc'."""
|
||||
data = parent_seed.encode("utf-8")
|
||||
encrypted_data = self.encrypt_data(data) # This now creates V2 format
|
||||
with exclusive_lock(self.parent_seed_file) as fh:
|
||||
fh.seek(0)
|
||||
fh.truncate()
|
||||
fh.write(encrypted_data)
|
||||
os.chmod(self.parent_seed_file, 0o600)
|
||||
self.encrypt_and_save_file(data, self.parent_seed_file, kdf=kdf)
|
||||
logger.info(f"Parent seed encrypted and saved to '{self.parent_seed_file}'.")
|
||||
|
||||
def decrypt_parent_seed(self) -> str:
|
||||
"""Decrypts and returns the parent seed, handling migration."""
|
||||
with exclusive_lock(self.parent_seed_file) as fh:
|
||||
fh.seek(0)
|
||||
encrypted_data = fh.read()
|
||||
blob = fh.read()
|
||||
|
||||
kdf, encrypted_data = self._deserialize(blob)
|
||||
is_legacy = not encrypted_data.startswith(b"V2:")
|
||||
decrypted_data = self.decrypt_data(encrypted_data)
|
||||
decrypted_data = self.decrypt_data(encrypted_data, context="seed")
|
||||
|
||||
if is_legacy:
|
||||
logger.info("Parent seed was in legacy format. Re-encrypting to V2 format.")
|
||||
self.encrypt_parent_seed(decrypted_data.decode("utf-8").strip())
|
||||
self.encrypt_parent_seed(decrypted_data.decode("utf-8").strip(), kdf=kdf)
|
||||
|
||||
return decrypted_data.decode("utf-8").strip()
|
||||
|
||||
def encrypt_and_save_file(self, data: bytes, relative_path: Path) -> None:
|
||||
file_path = self.fingerprint_dir / relative_path
|
||||
def _serialize(self, kdf: KdfConfig, ciphertext: bytes) -> bytes:
|
||||
payload = {"kdf": asdict(kdf), "ct": base64.b64encode(ciphertext).decode()}
|
||||
if USE_ORJSON:
|
||||
return json_lib.dumps(payload)
|
||||
return json_lib.dumps(payload, separators=(",", ":")).encode("utf-8")
|
||||
|
||||
def _deserialize(self, blob: bytes) -> Tuple[KdfConfig, bytes]:
|
||||
"""Return ``(KdfConfig, ciphertext)`` from serialized *blob*.
|
||||
|
||||
Legacy files stored the raw ciphertext without a JSON wrapper. If
|
||||
decoding the wrapper fails, treat ``blob`` as the ciphertext and return
|
||||
a default HKDF configuration.
|
||||
"""
|
||||
|
||||
try:
|
||||
if USE_ORJSON:
|
||||
obj = json_lib.loads(blob)
|
||||
else:
|
||||
obj = json_lib.loads(blob.decode("utf-8"))
|
||||
kdf = KdfConfig(**obj.get("kdf", {}))
|
||||
ct_b64 = obj.get("ct", "")
|
||||
ciphertext = base64.b64decode(ct_b64)
|
||||
if ciphertext:
|
||||
return kdf, ciphertext
|
||||
except Exception: # pragma: no cover - fall back to legacy path
|
||||
pass
|
||||
|
||||
# Legacy format: ``blob`` already contains the ciphertext
|
||||
return (
|
||||
KdfConfig(name="hkdf", version=CURRENT_KDF_VERSION, params={}, salt_b64=""),
|
||||
blob,
|
||||
)
|
||||
|
||||
def encrypt_and_save_file(
|
||||
self, data: bytes, relative_path: Path, *, kdf: Optional[KdfConfig] = None
|
||||
) -> None:
|
||||
if kdf is None:
|
||||
kdf = KdfConfig()
|
||||
file_path = self.resolve_relative_path(relative_path)
|
||||
file_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
encrypted_data = self.encrypt_data(data)
|
||||
payload = self._serialize(kdf, encrypted_data)
|
||||
with exclusive_lock(file_path) as fh:
|
||||
fh.seek(0)
|
||||
fh.truncate()
|
||||
fh.write(encrypted_data)
|
||||
fh.write(payload)
|
||||
fh.flush()
|
||||
os.fsync(fh.fileno())
|
||||
os.chmod(file_path, 0o600)
|
||||
|
||||
def decrypt_file(self, relative_path: Path) -> bytes:
|
||||
file_path = self.fingerprint_dir / relative_path
|
||||
file_path = self.resolve_relative_path(relative_path)
|
||||
with exclusive_lock(file_path) as fh:
|
||||
fh.seek(0)
|
||||
encrypted_data = fh.read()
|
||||
return self.decrypt_data(encrypted_data)
|
||||
blob = fh.read()
|
||||
_, encrypted_data = self._deserialize(blob)
|
||||
return self.decrypt_data(encrypted_data, context=str(relative_path))
|
||||
|
||||
def save_json_data(self, data: dict, relative_path: Optional[Path] = None) -> None:
|
||||
def get_file_kdf(self, relative_path: Path) -> KdfConfig:
|
||||
file_path = self.resolve_relative_path(relative_path)
|
||||
with exclusive_lock(file_path) as fh:
|
||||
fh.seek(0)
|
||||
blob = fh.read()
|
||||
kdf, _ = self._deserialize(blob)
|
||||
return kdf
|
||||
|
||||
def save_json_data(
|
||||
self,
|
||||
data: dict,
|
||||
relative_path: Optional[Path] = None,
|
||||
*,
|
||||
kdf: Optional[KdfConfig] = None,
|
||||
) -> None:
|
||||
if relative_path is None:
|
||||
relative_path = Path("seedpass_entries_db.json.enc")
|
||||
if USE_ORJSON:
|
||||
json_data = json_lib.dumps(data)
|
||||
else:
|
||||
json_data = json_lib.dumps(data, separators=(",", ":")).encode("utf-8")
|
||||
self.encrypt_and_save_file(json_data, relative_path)
|
||||
self.encrypt_and_save_file(json_data, relative_path, kdf=kdf)
|
||||
logger.debug(f"JSON data encrypted and saved to '{relative_path}'.")
|
||||
|
||||
def load_json_data(self, relative_path: Optional[Path] = None) -> dict:
|
||||
def load_json_data(
|
||||
self, relative_path: Optional[Path] = None, *, return_kdf: bool = False
|
||||
) -> dict | Tuple[dict, KdfConfig]:
|
||||
"""
|
||||
Loads and decrypts JSON data, automatically migrating and re-saving
|
||||
if it's in the legacy format.
|
||||
"""
|
||||
if relative_path is None:
|
||||
relative_path = Path("seedpass_entries_db.json.enc")
|
||||
|
||||
file_path = self.fingerprint_dir / relative_path
|
||||
file_path = self.resolve_relative_path(relative_path)
|
||||
if not file_path.exists():
|
||||
return {"entries": {}}
|
||||
empty: dict = {"entries": {}}
|
||||
if return_kdf:
|
||||
return empty, KdfConfig(
|
||||
name="hkdf", version=CURRENT_KDF_VERSION, params={}, salt_b64=""
|
||||
)
|
||||
return empty
|
||||
|
||||
with exclusive_lock(file_path) as fh:
|
||||
fh.seek(0)
|
||||
encrypted_data = fh.read()
|
||||
blob = fh.read()
|
||||
|
||||
kdf, encrypted_data = self._deserialize(blob)
|
||||
is_legacy = not encrypted_data.startswith(b"V2:")
|
||||
self.last_migration_performed = False
|
||||
|
||||
try:
|
||||
decrypted_data = self.decrypt_data(encrypted_data)
|
||||
decrypted_data = self.decrypt_data(
|
||||
encrypted_data, context=str(relative_path)
|
||||
)
|
||||
if USE_ORJSON:
|
||||
data = json_lib.loads(decrypted_data)
|
||||
else:
|
||||
data = json_lib.loads(decrypted_data.decode("utf-8"))
|
||||
|
||||
# If it was a legacy file, re-save it in the new format now
|
||||
if is_legacy:
|
||||
if is_legacy and self._legacy_migrate_flag:
|
||||
logger.info(f"Migrating and re-saving legacy vault file: {file_path}")
|
||||
self.save_json_data(data, relative_path)
|
||||
self.save_json_data(data, relative_path, kdf=kdf)
|
||||
self.update_checksum(relative_path)
|
||||
self.last_migration_performed = True
|
||||
|
||||
if return_kdf:
|
||||
return data, kdf
|
||||
return data
|
||||
except (InvalidToken, InvalidTag, JSONDecodeError) as e:
|
||||
logger.error(
|
||||
@@ -215,7 +393,7 @@ class EncryptionManager:
|
||||
|
||||
def get_encrypted_index(self) -> Optional[bytes]:
|
||||
relative_path = Path("seedpass_entries_db.json.enc")
|
||||
file_path = self.fingerprint_dir / relative_path
|
||||
file_path = self.resolve_relative_path(relative_path)
|
||||
if not file_path.exists():
|
||||
return None
|
||||
with exclusive_lock(file_path) as fh:
|
||||
@@ -244,13 +422,18 @@ class EncryptionManager:
|
||||
"""
|
||||
if relative_path is None:
|
||||
relative_path = Path("seedpass_entries_db.json.enc")
|
||||
try:
|
||||
decrypted_data = self.decrypt_data(encrypted_data)
|
||||
|
||||
kdf, ciphertext = self._deserialize(encrypted_data)
|
||||
is_legacy = not ciphertext.startswith(b"V2:")
|
||||
self.last_migration_performed = False
|
||||
|
||||
def _process(decrypted: bytes) -> dict:
|
||||
if USE_ORJSON:
|
||||
data = json_lib.loads(decrypted_data)
|
||||
data = json_lib.loads(decrypted)
|
||||
else:
|
||||
data = json_lib.loads(decrypted_data.decode("utf-8"))
|
||||
if merge and (self.fingerprint_dir / relative_path).exists():
|
||||
data = json_lib.loads(decrypted.decode("utf-8"))
|
||||
existing_file = self.resolve_relative_path(relative_path)
|
||||
if merge and existing_file.exists():
|
||||
current = self.load_json_data(relative_path)
|
||||
current_entries = current.get("entries", {})
|
||||
for idx, entry in data.get("entries", {}).items():
|
||||
@@ -264,11 +447,53 @@ class EncryptionManager:
|
||||
current.get("schema_version", 0), data.get("schema_version", 0)
|
||||
)
|
||||
data = current
|
||||
self.save_json_data(data, relative_path) # This always saves in V2 format
|
||||
return data
|
||||
|
||||
try:
|
||||
decrypted_data = self.decrypt_data(ciphertext, context=str(relative_path))
|
||||
data = _process(decrypted_data)
|
||||
self.save_json_data(data, relative_path, kdf=kdf)
|
||||
self.update_checksum(relative_path)
|
||||
logger.info("Index file from Nostr was processed and saved successfully.")
|
||||
print(colored("Index file updated from Nostr successfully.", "green"))
|
||||
self.last_migration_performed = is_legacy
|
||||
return True
|
||||
except (InvalidToken, LegacyFormatRequiresMigrationError):
|
||||
try:
|
||||
password = prompt_existing_password(
|
||||
"Enter your master password for legacy decryption: "
|
||||
)
|
||||
decrypted_data = self.decrypt_legacy(
|
||||
ciphertext, password, context=str(relative_path)
|
||||
)
|
||||
data = _process(decrypted_data)
|
||||
self.save_json_data(data, relative_path, kdf=kdf)
|
||||
self.update_checksum(relative_path)
|
||||
logger.warning(
|
||||
"Index decrypted using legacy password-only key derivation."
|
||||
)
|
||||
print(
|
||||
colored(
|
||||
"Warning: index decrypted with legacy key; it will be re-encrypted.",
|
||||
"yellow",
|
||||
)
|
||||
)
|
||||
self.last_migration_performed = True
|
||||
return True
|
||||
except Exception as e2:
|
||||
if strict:
|
||||
logger.error(
|
||||
f"Failed legacy decryption attempt: {e2}",
|
||||
exc_info=True,
|
||||
)
|
||||
print(
|
||||
colored(
|
||||
f"Error: Failed to decrypt and save data from Nostr: {e2}",
|
||||
"red",
|
||||
)
|
||||
)
|
||||
raise
|
||||
logger.warning(f"Failed to decrypt index from Nostr: {e2}")
|
||||
return False
|
||||
except Exception as e: # pragma: no cover - error handling
|
||||
if strict:
|
||||
logger.error(
|
||||
@@ -289,8 +514,7 @@ class EncryptionManager:
|
||||
"""Updates the checksum file for the specified file."""
|
||||
if relative_path is None:
|
||||
relative_path = Path("seedpass_entries_db.json.enc")
|
||||
|
||||
file_path = self.fingerprint_dir / relative_path
|
||||
file_path = self.resolve_relative_path(relative_path)
|
||||
if not file_path.exists():
|
||||
return
|
||||
|
||||
@@ -299,7 +523,22 @@ class EncryptionManager:
|
||||
fh.seek(0)
|
||||
encrypted_bytes = fh.read()
|
||||
checksum = hashlib.sha256(encrypted_bytes).hexdigest()
|
||||
checksum_file = file_path.parent / f"{file_path.stem}_checksum.txt"
|
||||
|
||||
# Build checksum path by stripping both `.json` and `.enc`
|
||||
checksum_base = file_path.with_suffix("").with_suffix("")
|
||||
checksum_file = checksum_base.parent / f"{checksum_base.name}_checksum.txt"
|
||||
|
||||
# Remove legacy checksum file if present
|
||||
legacy_checksum = file_path.parent / f"{file_path.stem}_checksum.txt"
|
||||
if legacy_checksum != checksum_file and legacy_checksum.exists():
|
||||
try:
|
||||
legacy_checksum.unlink()
|
||||
except Exception:
|
||||
logger.warning(
|
||||
f"Could not remove legacy checksum file '{legacy_checksum}'",
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
with exclusive_lock(checksum_file) as fh:
|
||||
fh.seek(0)
|
||||
fh.truncate()
|
||||
@@ -314,25 +553,21 @@ class EncryptionManager:
|
||||
)
|
||||
raise
|
||||
|
||||
# ... validate_seed and derive_seed_from_mnemonic can remain the same ...
|
||||
def validate_seed(self, seed_phrase: str) -> bool:
|
||||
def validate_seed(self, seed_phrase: str) -> tuple[bool, Optional[str]]:
|
||||
"""Validate a BIP-39 mnemonic.
|
||||
|
||||
Returns a tuple of ``(is_valid, error_message)`` where ``error_message``
|
||||
is ``None`` when the mnemonic is valid.
|
||||
"""
|
||||
try:
|
||||
words = seed_phrase.split()
|
||||
if len(words) != 12:
|
||||
logger.error("Seed phrase does not contain exactly 12 words.")
|
||||
print(
|
||||
colored(
|
||||
"Error: Seed phrase must contain exactly 12 words.",
|
||||
"red",
|
||||
)
|
||||
)
|
||||
return False
|
||||
logger.debug("Seed phrase validated successfully.")
|
||||
return True
|
||||
if Mnemonic("english").check(seed_phrase):
|
||||
logger.debug("Seed phrase validated successfully.")
|
||||
return True, None
|
||||
logger.error("Seed phrase failed BIP-39 validation.")
|
||||
return False, "Invalid seed phrase."
|
||||
except Exception as e:
|
||||
logging.error(f"Error validating seed phrase: {e}", exc_info=True)
|
||||
print(colored(f"Error: Failed to validate seed phrase: {e}", "red"))
|
||||
return False
|
||||
logger.error(f"Error validating seed phrase: {e}", exc_info=True)
|
||||
return False, f"Failed to validate seed phrase: {e}"
|
||||
|
||||
def derive_seed_from_mnemonic(self, mnemonic: str, passphrase: str = "") -> bytes:
|
||||
try:
|
||||
|
@@ -33,10 +33,11 @@ from pathlib import Path
|
||||
|
||||
from termcolor import colored
|
||||
from .migrations import LATEST_VERSION
|
||||
from .entry_types import EntryType
|
||||
from .entry_types import EntryType, ALL_ENTRY_TYPES
|
||||
from .totp import TotpManager
|
||||
from utils.fingerprint import generate_fingerprint
|
||||
from utils.checksum import canonical_json_dumps
|
||||
from utils.atomic_write import atomic_write
|
||||
from utils.key_validation import (
|
||||
validate_totp_secret,
|
||||
validate_ssh_key_pair,
|
||||
@@ -221,7 +222,9 @@ class EntryManager:
|
||||
|
||||
data["entries"][str(index)] = entry
|
||||
|
||||
logger.debug(f"Added entry at index {index}: {data['entries'][str(index)]}")
|
||||
logger.debug(
|
||||
f"Added entry at index {index} with label '{entry.get('label', '')}'."
|
||||
)
|
||||
|
||||
self._save_index(data)
|
||||
self.update_checksum()
|
||||
@@ -254,7 +257,7 @@ class EntryManager:
|
||||
def add_totp(
|
||||
self,
|
||||
label: str,
|
||||
parent_seed: str,
|
||||
parent_seed: str | bytes,
|
||||
*,
|
||||
archived: bool = False,
|
||||
secret: str | None = None,
|
||||
@@ -458,7 +461,7 @@ class EntryManager:
|
||||
|
||||
seed_bytes = Bip39SeedGenerator(parent_seed).Generate()
|
||||
bip85 = BIP85(seed_bytes)
|
||||
entropy = bip85.derive_entropy(index=index, bytes_len=32)
|
||||
entropy = bip85.derive_entropy(index=index, entropy_bytes=32)
|
||||
keys = Keys(priv_k=entropy.hex())
|
||||
npub = Keys.hex_to_bech32(keys.public_key_hex(), "npub")
|
||||
nsec = Keys.hex_to_bech32(keys.private_key_hex(), "nsec")
|
||||
@@ -536,7 +539,7 @@ class EntryManager:
|
||||
bip85 = BIP85(seed_bytes)
|
||||
|
||||
key_idx = int(entry.get("index", index))
|
||||
entropy = bip85.derive_entropy(index=key_idx, bytes_len=32)
|
||||
entropy = bip85.derive_entropy(index=key_idx, entropy_bytes=32)
|
||||
keys = Keys(priv_k=entropy.hex())
|
||||
npub = Keys.hex_to_bech32(keys.public_key_hex(), "npub")
|
||||
nsec = Keys.hex_to_bech32(keys.private_key_hex(), "nsec")
|
||||
@@ -686,7 +689,10 @@ class EntryManager:
|
||||
return derive_seed_phrase(bip85, seed_index, words)
|
||||
|
||||
def get_totp_code(
|
||||
self, index: int, parent_seed: str | None = None, timestamp: int | None = None
|
||||
self,
|
||||
index: int,
|
||||
parent_seed: str | bytes | None = None,
|
||||
timestamp: int | None = None,
|
||||
) -> str:
|
||||
"""Return the current TOTP code for the specified entry."""
|
||||
entry = self.retrieve_entry(index)
|
||||
@@ -716,7 +722,9 @@ class EntryManager:
|
||||
period = int(entry.get("period", 30))
|
||||
return TotpManager.time_remaining(period)
|
||||
|
||||
def export_totp_entries(self, parent_seed: str) -> dict[str, list[dict[str, Any]]]:
|
||||
def export_totp_entries(
|
||||
self, parent_seed: str | bytes
|
||||
) -> dict[str, list[dict[str, Any]]]:
|
||||
"""Return all TOTP secrets and metadata for external use."""
|
||||
data = self._load_index()
|
||||
entries = data.get("entries", {})
|
||||
@@ -779,7 +787,9 @@ class EntryManager:
|
||||
EntryType.MANAGED_ACCOUNT.value,
|
||||
):
|
||||
entry.setdefault("custom_fields", [])
|
||||
logger.debug(f"Retrieved entry at index {index}: {entry}")
|
||||
logger.debug(
|
||||
f"Retrieved entry at index {index} with label '{entry.get('label', '')}'."
|
||||
)
|
||||
clean = {k: v for k, v in entry.items() if k != "modified_ts"}
|
||||
return clean
|
||||
else:
|
||||
@@ -1009,13 +1019,11 @@ class EntryManager:
|
||||
|
||||
if custom_fields is not None:
|
||||
entry["custom_fields"] = custom_fields
|
||||
logger.debug(
|
||||
f"Updated custom fields for index {index}: {custom_fields}"
|
||||
)
|
||||
logger.debug(f"Updated custom fields for index {index}.")
|
||||
|
||||
if tags is not None:
|
||||
entry["tags"] = tags
|
||||
logger.debug(f"Updated tags for index {index}: {tags}")
|
||||
logger.debug(f"Updated tags for index {index}.")
|
||||
|
||||
policy_updates: dict[str, Any] = {}
|
||||
if include_special_chars is not None:
|
||||
@@ -1042,7 +1050,9 @@ class EntryManager:
|
||||
entry["modified_ts"] = int(time.time())
|
||||
|
||||
data["entries"][str(index)] = entry
|
||||
logger.debug(f"Modified entry at index {index}: {entry}")
|
||||
logger.debug(
|
||||
f"Modified entry at index {index} with label '{entry.get('label', '')}'."
|
||||
)
|
||||
|
||||
self._save_index(data)
|
||||
self.update_checksum()
|
||||
@@ -1071,7 +1081,7 @@ class EntryManager:
|
||||
def list_entries(
|
||||
self,
|
||||
sort_by: str = "index",
|
||||
filter_kind: str | None = None,
|
||||
filter_kinds: list[str] | None = None,
|
||||
*,
|
||||
include_archived: bool = False,
|
||||
verbose: bool = True,
|
||||
@@ -1083,8 +1093,9 @@ class EntryManager:
|
||||
sort_by:
|
||||
Field to sort by. Supported values are ``"index"``, ``"label"`` and
|
||||
``"updated"``.
|
||||
filter_kind:
|
||||
Optional entry kind to restrict the results.
|
||||
filter_kinds:
|
||||
Optional list of entry kinds to restrict the results. Defaults to
|
||||
``ALL_ENTRY_TYPES``.
|
||||
|
||||
Archived entries are omitted unless ``include_archived`` is ``True``.
|
||||
"""
|
||||
@@ -1113,12 +1124,14 @@ class EntryManager:
|
||||
|
||||
sorted_items = sorted(entries_data.items(), key=sort_key)
|
||||
|
||||
if filter_kinds is None:
|
||||
filter_kinds = ALL_ENTRY_TYPES
|
||||
|
||||
filtered_items: List[Tuple[int, Dict[str, Any]]] = []
|
||||
for idx_str, entry in sorted_items:
|
||||
if (
|
||||
filter_kind is not None
|
||||
and entry.get("type", entry.get("kind", EntryType.PASSWORD.value))
|
||||
!= filter_kind
|
||||
entry.get("type", entry.get("kind", EntryType.PASSWORD.value))
|
||||
not in filter_kinds
|
||||
):
|
||||
continue
|
||||
if not include_archived and entry.get(
|
||||
@@ -1312,8 +1325,7 @@ class EntryManager:
|
||||
# The checksum file path already includes the fingerprint directory
|
||||
checksum_path = self.checksum_file
|
||||
|
||||
with open(checksum_path, "w") as f:
|
||||
f.write(checksum)
|
||||
atomic_write(checksum_path, lambda f: f.write(checksum))
|
||||
|
||||
logger.debug(f"Checksum updated and written to '{checksum_path}'.")
|
||||
print(colored(f"[+] Checksum updated successfully.", "green"))
|
||||
@@ -1367,7 +1379,7 @@ class EntryManager:
|
||||
def list_all_entries(
|
||||
self,
|
||||
sort_by: str = "index",
|
||||
filter_kind: str | None = None,
|
||||
filter_kinds: list[str] | None = None,
|
||||
*,
|
||||
include_archived: bool = False,
|
||||
) -> None:
|
||||
@@ -1375,7 +1387,7 @@ class EntryManager:
|
||||
try:
|
||||
entries = self.list_entries(
|
||||
sort_by=sort_by,
|
||||
filter_kind=filter_kind,
|
||||
filter_kinds=filter_kinds,
|
||||
include_archived=include_archived,
|
||||
)
|
||||
if not entries:
|
||||
@@ -1399,7 +1411,7 @@ class EntryManager:
|
||||
|
||||
def get_entry_summaries(
|
||||
self,
|
||||
filter_kind: str | None = None,
|
||||
filter_kinds: list[str] | None = None,
|
||||
*,
|
||||
include_archived: bool = False,
|
||||
) -> list[tuple[int, str, str]]:
|
||||
@@ -1408,10 +1420,13 @@ class EntryManager:
|
||||
data = self._load_index()
|
||||
entries_data = data.get("entries", {})
|
||||
|
||||
if filter_kinds is None:
|
||||
filter_kinds = ALL_ENTRY_TYPES
|
||||
|
||||
summaries: list[tuple[int, str, str]] = []
|
||||
for idx_str, entry in entries_data.items():
|
||||
etype = entry.get("type", entry.get("kind", EntryType.PASSWORD.value))
|
||||
if filter_kind and etype != filter_kind:
|
||||
if etype not in filter_kinds:
|
||||
continue
|
||||
if not include_archived and entry.get(
|
||||
"archived", entry.get("blacklisted", False)
|
||||
|
233
src/seedpass/core/entry_service.py
Normal file
233
src/seedpass/core/entry_service.py
Normal file
@@ -0,0 +1,233 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import time
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from termcolor import colored
|
||||
|
||||
from constants import (
|
||||
DEFAULT_PASSWORD_LENGTH,
|
||||
MAX_PASSWORD_LENGTH,
|
||||
MIN_PASSWORD_LENGTH,
|
||||
)
|
||||
import seedpass.core.manager as manager_module
|
||||
from utils.terminal_utils import clear_header_with_notification, pause
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover - typing only
|
||||
from .manager import PasswordManager
|
||||
|
||||
|
||||
class EntryService:
|
||||
"""Entry management operations for :class:`PasswordManager`."""
|
||||
|
||||
def __init__(self, manager: PasswordManager) -> None:
|
||||
self.manager = manager
|
||||
|
||||
def handle_add_password(self) -> None:
|
||||
pm = self.manager
|
||||
try:
|
||||
fp, parent_fp, child_fp = pm.header_fingerprint_args
|
||||
clear_header_with_notification(
|
||||
pm,
|
||||
fp,
|
||||
"Main Menu > Add Entry > Password",
|
||||
parent_fingerprint=parent_fp,
|
||||
child_fingerprint=child_fp,
|
||||
)
|
||||
|
||||
def prompt_length() -> int | None:
|
||||
length_input = input(
|
||||
f"Enter desired password length (default {DEFAULT_PASSWORD_LENGTH}): "
|
||||
).strip()
|
||||
length = DEFAULT_PASSWORD_LENGTH
|
||||
if length_input:
|
||||
if not length_input.isdigit():
|
||||
print(
|
||||
colored("Error: Password length must be a number.", "red")
|
||||
)
|
||||
return None
|
||||
length = int(length_input)
|
||||
if not (MIN_PASSWORD_LENGTH <= length <= MAX_PASSWORD_LENGTH):
|
||||
print(
|
||||
colored(
|
||||
f"Error: Password length must be between {MIN_PASSWORD_LENGTH} and {MAX_PASSWORD_LENGTH}.",
|
||||
"red",
|
||||
)
|
||||
)
|
||||
return None
|
||||
return length
|
||||
|
||||
def finalize_entry(index: int, label: str, length: int) -> None:
|
||||
pm.is_dirty = True
|
||||
pm.last_update = time.time()
|
||||
|
||||
entry = pm.entry_manager.retrieve_entry(index)
|
||||
password = pm._generate_password_for_entry(entry, index, length)
|
||||
|
||||
print(
|
||||
colored(
|
||||
f"\n[+] Password generated and indexed with ID {index}.\n",
|
||||
"green",
|
||||
)
|
||||
)
|
||||
if pm.secret_mode_enabled:
|
||||
if manager_module.copy_to_clipboard(
|
||||
password, pm.clipboard_clear_delay
|
||||
):
|
||||
print(
|
||||
colored(
|
||||
f"[+] Password copied to clipboard. Will clear in {pm.clipboard_clear_delay} seconds.",
|
||||
"green",
|
||||
)
|
||||
)
|
||||
else:
|
||||
print(colored(f"Password for {label}: {password}\n", "yellow"))
|
||||
|
||||
try:
|
||||
pm.start_background_vault_sync()
|
||||
logging.info(
|
||||
"Encrypted index posted to Nostr after entry addition."
|
||||
)
|
||||
except Exception as nostr_error: # pragma: no cover - best effort
|
||||
logging.error(
|
||||
f"Failed to post updated index to Nostr: {nostr_error}",
|
||||
exc_info=True,
|
||||
)
|
||||
pause()
|
||||
|
||||
mode = input("Choose mode: [Q]uick or [A]dvanced? ").strip().lower()
|
||||
|
||||
website_name = input("Enter the label or website name: ").strip()
|
||||
if not website_name:
|
||||
print(colored("Error: Label cannot be empty.", "red"))
|
||||
return
|
||||
|
||||
username = input("Enter the username (optional): ").strip()
|
||||
url = input("Enter the URL (optional): ").strip()
|
||||
|
||||
if mode.startswith("q"):
|
||||
length = prompt_length()
|
||||
if length is None:
|
||||
return
|
||||
include_special_input = (
|
||||
input("Include special characters? (Y/n): ").strip().lower()
|
||||
)
|
||||
include_special_chars: bool | None = None
|
||||
if include_special_input:
|
||||
include_special_chars = include_special_input != "n"
|
||||
|
||||
index = pm.entry_manager.add_entry(
|
||||
website_name,
|
||||
length,
|
||||
username,
|
||||
url,
|
||||
include_special_chars=include_special_chars,
|
||||
)
|
||||
|
||||
finalize_entry(index, website_name, length)
|
||||
return
|
||||
|
||||
notes = input("Enter notes (optional): ").strip()
|
||||
tags_input = input("Enter tags (comma-separated, optional): ").strip()
|
||||
tags = (
|
||||
[t.strip() for t in tags_input.split(",") if t.strip()]
|
||||
if tags_input
|
||||
else []
|
||||
)
|
||||
|
||||
custom_fields: list[dict[str, object]] = []
|
||||
while True:
|
||||
add_field = input("Add custom field? (y/N): ").strip().lower()
|
||||
if add_field != "y":
|
||||
break
|
||||
label = input(" Field label: ").strip()
|
||||
value = input(" Field value: ").strip()
|
||||
hidden = input(" Hidden field? (y/N): ").strip().lower() == "y"
|
||||
custom_fields.append(
|
||||
{"label": label, "value": value, "is_hidden": hidden}
|
||||
)
|
||||
|
||||
length = prompt_length()
|
||||
if length is None:
|
||||
return
|
||||
|
||||
include_special_input = (
|
||||
input("Include special characters? (Y/n): ").strip().lower()
|
||||
)
|
||||
include_special_chars: bool | None = None
|
||||
if include_special_input:
|
||||
include_special_chars = include_special_input != "n"
|
||||
|
||||
allowed_special_chars = input(
|
||||
"Allowed special characters (leave blank for default): "
|
||||
).strip()
|
||||
if not allowed_special_chars:
|
||||
allowed_special_chars = None
|
||||
|
||||
special_mode = input("Special character mode (safe/leave blank): ").strip()
|
||||
if not special_mode:
|
||||
special_mode = None
|
||||
|
||||
exclude_ambiguous_input = (
|
||||
input("Exclude ambiguous characters? (y/N): ").strip().lower()
|
||||
)
|
||||
exclude_ambiguous: bool | None = None
|
||||
if exclude_ambiguous_input:
|
||||
exclude_ambiguous = exclude_ambiguous_input == "y"
|
||||
|
||||
min_uppercase_input = input(
|
||||
"Minimum uppercase letters (blank for default): "
|
||||
).strip()
|
||||
if min_uppercase_input and not min_uppercase_input.isdigit():
|
||||
print(colored("Error: Minimum uppercase must be a number.", "red"))
|
||||
return
|
||||
min_uppercase = int(min_uppercase_input) if min_uppercase_input else None
|
||||
|
||||
min_lowercase_input = input(
|
||||
"Minimum lowercase letters (blank for default): "
|
||||
).strip()
|
||||
if min_lowercase_input and not min_lowercase_input.isdigit():
|
||||
print(colored("Error: Minimum lowercase must be a number.", "red"))
|
||||
return
|
||||
min_lowercase = int(min_lowercase_input) if min_lowercase_input else None
|
||||
|
||||
min_digits_input = input("Minimum digits (blank for default): ").strip()
|
||||
if min_digits_input and not min_digits_input.isdigit():
|
||||
print(colored("Error: Minimum digits must be a number.", "red"))
|
||||
return
|
||||
min_digits = int(min_digits_input) if min_digits_input else None
|
||||
|
||||
min_special_input = input(
|
||||
"Minimum special characters (blank for default): "
|
||||
).strip()
|
||||
if min_special_input and not min_special_input.isdigit():
|
||||
print(colored("Error: Minimum special must be a number.", "red"))
|
||||
return
|
||||
min_special = int(min_special_input) if min_special_input else None
|
||||
|
||||
index = pm.entry_manager.add_entry(
|
||||
website_name,
|
||||
length,
|
||||
username,
|
||||
url,
|
||||
archived=False,
|
||||
notes=notes,
|
||||
custom_fields=custom_fields,
|
||||
tags=tags,
|
||||
include_special_chars=include_special_chars,
|
||||
allowed_special_chars=allowed_special_chars,
|
||||
special_mode=special_mode,
|
||||
exclude_ambiguous=exclude_ambiguous,
|
||||
min_uppercase=min_uppercase,
|
||||
min_lowercase=min_lowercase,
|
||||
min_digits=min_digits,
|
||||
min_special=min_special,
|
||||
)
|
||||
|
||||
finalize_entry(index, website_name, length)
|
||||
|
||||
except Exception as e: # pragma: no cover - defensive
|
||||
logging.error(f"Error during password generation: {e}", exc_info=True)
|
||||
print(colored(f"Error: Failed to generate password: {e}", "red"))
|
||||
pause()
|
@@ -15,3 +15,7 @@ class EntryType(str, Enum):
|
||||
NOSTR = "nostr"
|
||||
KEY_VALUE = "key_value"
|
||||
MANAGED_ACCOUNT = "managed_account"
|
||||
|
||||
|
||||
# List of all entry type values for convenience
|
||||
ALL_ENTRY_TYPES = [e.value for e in EntryType]
|
||||
|
File diff suppressed because it is too large
Load Diff
185
src/seedpass/core/menu_handler.py
Normal file
185
src/seedpass/core/menu_handler.py
Normal file
@@ -0,0 +1,185 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import sys
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from termcolor import colored
|
||||
|
||||
from .entry_types import EntryType, ALL_ENTRY_TYPES
|
||||
import seedpass.core.manager as manager_module
|
||||
from utils.color_scheme import color_text
|
||||
from utils.terminal_utils import clear_header_with_notification
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover - typing only
|
||||
from .manager import PasswordManager
|
||||
|
||||
|
||||
class MenuHandler:
|
||||
"""Handle interactive menu operations for :class:`PasswordManager`."""
|
||||
|
||||
def __init__(self, manager: PasswordManager) -> None:
|
||||
self.manager = manager
|
||||
|
||||
def handle_list_entries(self) -> None:
|
||||
"""List entries and optionally show details."""
|
||||
pm = self.manager
|
||||
try:
|
||||
while True:
|
||||
fp, parent_fp, child_fp = pm.header_fingerprint_args
|
||||
clear_header_with_notification(
|
||||
pm,
|
||||
fp,
|
||||
"Main Menu > List Entries",
|
||||
parent_fingerprint=parent_fp,
|
||||
child_fingerprint=child_fp,
|
||||
)
|
||||
print(color_text("\nList Entries:", "menu"))
|
||||
print(color_text("1. All", "menu"))
|
||||
option_map: dict[str, str] = {}
|
||||
for i, etype in enumerate(ALL_ENTRY_TYPES, start=2):
|
||||
label = etype.replace("_", " ").title()
|
||||
print(color_text(f"{i}. {label}", "menu"))
|
||||
option_map[str(i)] = etype
|
||||
choice = input("Select entry type or press Enter to go back: ").strip()
|
||||
if choice == "1":
|
||||
filter_kinds = None
|
||||
elif choice in option_map:
|
||||
filter_kinds = [option_map[choice]]
|
||||
elif not choice:
|
||||
return
|
||||
else:
|
||||
print(colored("Invalid choice.", "red"))
|
||||
continue
|
||||
|
||||
while True:
|
||||
summaries = pm.entry_manager.get_entry_summaries(
|
||||
filter_kinds, include_archived=False
|
||||
)
|
||||
if not summaries:
|
||||
break
|
||||
fp, parent_fp, child_fp = pm.header_fingerprint_args
|
||||
clear_header_with_notification(
|
||||
pm,
|
||||
fp,
|
||||
"Main Menu > List Entries",
|
||||
parent_fingerprint=parent_fp,
|
||||
child_fingerprint=child_fp,
|
||||
)
|
||||
print(colored("\n[+] Entries:\n", "green"))
|
||||
for idx, etype, label in summaries:
|
||||
if filter_kinds is None:
|
||||
display_type = etype.capitalize()
|
||||
print(colored(f"{idx}. {display_type} - {label}", "cyan"))
|
||||
else:
|
||||
print(colored(f"{idx}. {label}", "cyan"))
|
||||
idx_input = input(
|
||||
"Enter index to view details or press Enter to go back: "
|
||||
).strip()
|
||||
if not idx_input:
|
||||
break
|
||||
if not idx_input.isdigit():
|
||||
print(colored("Invalid index.", "red"))
|
||||
continue
|
||||
pm.show_entry_details_by_index(int(idx_input))
|
||||
except Exception as e: # pragma: no cover - defensive
|
||||
logging.error(f"Failed to list entries: {e}", exc_info=True)
|
||||
print(colored(f"Error: Failed to list entries: {e}", "red"))
|
||||
|
||||
def handle_display_totp_codes(self) -> None:
|
||||
"""Display all stored TOTP codes with a countdown progress bar."""
|
||||
pm = self.manager
|
||||
try:
|
||||
fp, parent_fp, child_fp = pm.header_fingerprint_args
|
||||
clear_header_with_notification(
|
||||
pm,
|
||||
fp,
|
||||
"Main Menu > 2FA Codes",
|
||||
parent_fingerprint=parent_fp,
|
||||
child_fingerprint=child_fp,
|
||||
)
|
||||
data = pm.entry_manager.vault.load_index()
|
||||
entries = data.get("entries", {})
|
||||
totp_list: list[tuple[str, int, int, bool]] = []
|
||||
for idx_str, entry in entries.items():
|
||||
if pm._entry_type_str(entry) == EntryType.TOTP.value and not entry.get(
|
||||
"archived", entry.get("blacklisted", False)
|
||||
):
|
||||
label = entry.get("label", "")
|
||||
period = int(entry.get("period", 30))
|
||||
imported = "secret" in entry
|
||||
totp_list.append((label, int(idx_str), period, imported))
|
||||
|
||||
if not totp_list:
|
||||
pm.notify("No 2FA entries found.", level="WARNING")
|
||||
return
|
||||
|
||||
totp_list.sort(key=lambda t: t[0].lower())
|
||||
print(colored("Press Enter to return to the menu.", "cyan"))
|
||||
while True:
|
||||
fp, parent_fp, child_fp = pm.header_fingerprint_args
|
||||
clear_header_with_notification(
|
||||
pm,
|
||||
fp,
|
||||
"Main Menu > 2FA Codes",
|
||||
parent_fingerprint=parent_fp,
|
||||
child_fingerprint=child_fp,
|
||||
)
|
||||
print(colored("Press Enter to return to the menu.", "cyan"))
|
||||
generated = [t for t in totp_list if not t[3]]
|
||||
imported_list = [t for t in totp_list if t[3]]
|
||||
if generated:
|
||||
print(colored("\nGenerated 2FA Codes:", "green"))
|
||||
for label, idx, period, _ in generated:
|
||||
key = getattr(pm, "KEY_TOTP_DET", None) or getattr(
|
||||
pm, "parent_seed", None
|
||||
)
|
||||
code = pm.entry_manager.get_totp_code(idx, key)
|
||||
remaining = pm.entry_manager.get_totp_time_remaining(idx)
|
||||
filled = int(20 * (period - remaining) / period)
|
||||
bar = "[" + "#" * filled + "-" * (20 - filled) + "]"
|
||||
if pm.secret_mode_enabled:
|
||||
if manager_module.copy_to_clipboard(
|
||||
code, pm.clipboard_clear_delay
|
||||
):
|
||||
print(
|
||||
f"[{idx}] {label}: [HIDDEN] {bar} {remaining:2d}s - copied to clipboard"
|
||||
)
|
||||
else:
|
||||
print(
|
||||
f"[{idx}] {label}: {color_text(code, 'deterministic')} {bar} {remaining:2d}s"
|
||||
)
|
||||
if imported_list:
|
||||
print(colored("\nImported 2FA Codes:", "green"))
|
||||
for label, idx, period, _ in imported_list:
|
||||
key = getattr(pm, "KEY_TOTP_DET", None) or getattr(
|
||||
pm, "parent_seed", None
|
||||
)
|
||||
code = pm.entry_manager.get_totp_code(idx, key)
|
||||
remaining = pm.entry_manager.get_totp_time_remaining(idx)
|
||||
filled = int(20 * (period - remaining) / period)
|
||||
bar = "[" + "#" * filled + "-" * (20 - filled) + "]"
|
||||
if pm.secret_mode_enabled:
|
||||
if manager_module.copy_to_clipboard(
|
||||
code, pm.clipboard_clear_delay
|
||||
):
|
||||
print(
|
||||
f"[{idx}] {label}: [HIDDEN] {bar} {remaining:2d}s - copied to clipboard"
|
||||
)
|
||||
else:
|
||||
print(
|
||||
f"[{idx}] {label}: {color_text(code, 'imported')} {bar} {remaining:2d}s"
|
||||
)
|
||||
sys.stdout.flush()
|
||||
try:
|
||||
user_input = manager_module.timed_input("", 1)
|
||||
if user_input.strip() == "" or user_input.strip().lower() == "b":
|
||||
break
|
||||
except TimeoutError:
|
||||
pass
|
||||
except KeyboardInterrupt:
|
||||
print()
|
||||
break
|
||||
except Exception as e: # pragma: no cover - defensive
|
||||
logging.error(f"Error displaying TOTP codes: {e}", exc_info=True)
|
||||
print(colored(f"Error: Failed to display TOTP codes: {e}", "red"))
|
@@ -11,14 +11,18 @@ Ensure that all dependencies are installed and properly configured in your envir
|
||||
|
||||
Never ever ever use Random Salt. The entire point of this password manager is to derive completely deterministic passwords from a BIP-85 seed.
|
||||
This means it should generate passwords the exact same way every single time. Salts would break this functionality and is not appropriate for this software's use case.
|
||||
To keep behaviour stable across Python versions, the shuffling logic uses an
|
||||
HMAC-SHA256-based Fisher–Yates shuffle instead of ``random.Random``. The HMAC
|
||||
is keyed with the derived password bytes, providing deterministic yet
|
||||
cryptographically strong pseudo-randomness without relying on Python's
|
||||
non-stable random implementation.
|
||||
"""
|
||||
|
||||
import os
|
||||
import logging
|
||||
import hashlib
|
||||
import string
|
||||
import random
|
||||
import traceback
|
||||
import hmac
|
||||
import base64
|
||||
from typing import Optional
|
||||
from dataclasses import dataclass
|
||||
@@ -109,10 +113,12 @@ class PasswordGenerator:
|
||||
self.bip85 = bip85
|
||||
self.policy = policy or PasswordPolicy()
|
||||
|
||||
# Derive seed bytes from parent_seed using BIP39 (handled by EncryptionManager)
|
||||
self.seed_bytes = self.encryption_manager.derive_seed_from_mnemonic(
|
||||
self.parent_seed
|
||||
)
|
||||
if isinstance(parent_seed, (bytes, bytearray)):
|
||||
self.seed_bytes = bytes(parent_seed)
|
||||
else:
|
||||
self.seed_bytes = self.encryption_manager.derive_seed_from_mnemonic(
|
||||
self.parent_seed
|
||||
)
|
||||
|
||||
logger.debug("PasswordGenerator initialized successfully.")
|
||||
except Exception as e:
|
||||
@@ -122,8 +128,8 @@ class PasswordGenerator:
|
||||
|
||||
def _derive_password_entropy(self, index: int) -> bytes:
|
||||
"""Derive deterministic entropy for password generation."""
|
||||
entropy = self.bip85.derive_entropy(index=index, bytes_len=64, app_no=32)
|
||||
logger.debug(f"Derived entropy: {entropy.hex()}")
|
||||
entropy = self.bip85.derive_entropy(index=index, entropy_bytes=64, app_no=32)
|
||||
logger.debug("Entropy derived for password generation.")
|
||||
|
||||
hkdf = HKDF(
|
||||
algorithm=hashes.SHA256(),
|
||||
@@ -133,26 +139,43 @@ class PasswordGenerator:
|
||||
backend=default_backend(),
|
||||
)
|
||||
hkdf_derived = hkdf.derive(entropy)
|
||||
logger.debug(f"Derived key using HKDF: {hkdf_derived.hex()}")
|
||||
logger.debug("Derived key using HKDF.")
|
||||
|
||||
dk = hashlib.pbkdf2_hmac("sha256", entropy, b"", 100000)
|
||||
logger.debug(f"Derived key using PBKDF2: {dk.hex()}")
|
||||
logger.debug("Derived key using PBKDF2.")
|
||||
return dk
|
||||
|
||||
def _map_entropy_to_chars(self, dk: bytes, alphabet: str) -> str:
|
||||
"""Map derived bytes to characters from the provided alphabet."""
|
||||
password = "".join(alphabet[byte % len(alphabet)] for byte in dk)
|
||||
logger.debug(f"Password after mapping to all allowed characters: {password}")
|
||||
logger.debug("Mapped entropy to allowed characters.")
|
||||
return password
|
||||
|
||||
def _fisher_yates_hmac(self, items: list[str], key: bytes) -> list[str]:
|
||||
"""Shuffle ``items`` in a deterministic yet cryptographically sound manner.
|
||||
|
||||
A Fisher–Yates shuffle is driven by an HMAC-SHA256 based
|
||||
pseudo-random number generator seeded with ``key``. Unlike
|
||||
:class:`random.Random`, this approach is stable across Python
|
||||
versions while still deriving all of its entropy from ``key``.
|
||||
"""
|
||||
|
||||
counter = 0
|
||||
for i in range(len(items) - 1, 0, -1):
|
||||
msg = counter.to_bytes(4, "big")
|
||||
digest = hmac.new(key, msg, hashlib.sha256).digest()
|
||||
j = int.from_bytes(digest, "big") % (i + 1)
|
||||
items[i], items[j] = items[j], items[i]
|
||||
counter += 1
|
||||
return items
|
||||
|
||||
def _shuffle_deterministically(self, password: str, dk: bytes) -> str:
|
||||
"""Deterministically shuffle characters using derived bytes."""
|
||||
shuffle_seed = int.from_bytes(dk, "big")
|
||||
rng = random.Random(shuffle_seed)
|
||||
"""Deterministically shuffle characters using an HMAC-based PRNG."""
|
||||
|
||||
password_chars = list(password)
|
||||
rng.shuffle(password_chars)
|
||||
shuffled = "".join(password_chars)
|
||||
logger.debug("Shuffled password deterministically.")
|
||||
shuffled_chars = self._fisher_yates_hmac(password_chars, dk)
|
||||
shuffled = "".join(shuffled_chars)
|
||||
logger.debug("Shuffled password deterministically using HMAC-Fisher-Yates.")
|
||||
return shuffled
|
||||
|
||||
def generate_password(
|
||||
@@ -226,7 +249,7 @@ class PasswordGenerator:
|
||||
extra = self._map_entropy_to_chars(dk, all_allowed)
|
||||
password += extra
|
||||
password = self._shuffle_deterministically(password, dk)
|
||||
logger.debug(f"Extended password: {password}")
|
||||
logger.debug("Extended password to meet length requirement.")
|
||||
|
||||
# Trim the password to the desired length and enforce complexity on
|
||||
# the final result. Complexity enforcement is repeated here because
|
||||
@@ -239,7 +262,7 @@ class PasswordGenerator:
|
||||
)
|
||||
password = self._shuffle_deterministically(password, dk)
|
||||
logger.debug(
|
||||
f"Final password (trimmed to {length} chars with complexity enforced): {password}"
|
||||
f"Generated final password of length {length} with complexity enforced."
|
||||
)
|
||||
|
||||
return password
|
||||
@@ -311,34 +334,28 @@ class PasswordGenerator:
|
||||
index = get_dk_value() % len(password_chars)
|
||||
char = uppercase[get_dk_value() % len(uppercase)]
|
||||
password_chars[index] = char
|
||||
logger.debug(
|
||||
f"Added uppercase letter '{char}' at position {index}."
|
||||
)
|
||||
logger.debug(f"Added uppercase letter at position {index}.")
|
||||
|
||||
if current_lower < min_lower:
|
||||
for _ in range(min_lower - current_lower):
|
||||
index = get_dk_value() % len(password_chars)
|
||||
char = lowercase[get_dk_value() % len(lowercase)]
|
||||
password_chars[index] = char
|
||||
logger.debug(
|
||||
f"Added lowercase letter '{char}' at position {index}."
|
||||
)
|
||||
logger.debug(f"Added lowercase letter at position {index}.")
|
||||
|
||||
if current_digits < min_digits:
|
||||
for _ in range(min_digits - current_digits):
|
||||
index = get_dk_value() % len(password_chars)
|
||||
char = digits[get_dk_value() % len(digits)]
|
||||
password_chars[index] = char
|
||||
logger.debug(f"Added digit '{char}' at position {index}.")
|
||||
logger.debug(f"Added digit at position {index}.")
|
||||
|
||||
if special and current_special < min_special:
|
||||
for _ in range(min_special - current_special):
|
||||
index = get_dk_value() % len(password_chars)
|
||||
char = special[get_dk_value() % len(special)]
|
||||
password_chars[index] = char
|
||||
logger.debug(
|
||||
f"Added special character '{char}' at position {index}."
|
||||
)
|
||||
logger.debug(f"Added special character at position {index}.")
|
||||
|
||||
# Additional deterministic inclusion of symbols to increase score
|
||||
if special:
|
||||
@@ -352,9 +369,7 @@ class PasswordGenerator:
|
||||
index = get_dk_value() % len(password_chars)
|
||||
char = special[get_dk_value() % len(special)]
|
||||
password_chars[index] = char
|
||||
logger.debug(
|
||||
f"Added additional symbol '{char}' at position {index}."
|
||||
)
|
||||
logger.debug(f"Added additional symbol at position {index}.")
|
||||
|
||||
# Ensure balanced distribution by assigning different character types to specific segments
|
||||
# Example: Divide password into segments and assign different types
|
||||
@@ -372,19 +387,15 @@ class PasswordGenerator:
|
||||
if i == 0 and password_chars[j] not in uppercase:
|
||||
char = uppercase[get_dk_value() % len(uppercase)]
|
||||
password_chars[j] = char
|
||||
logger.debug(
|
||||
f"Assigned uppercase letter '{char}' to position {j}."
|
||||
)
|
||||
logger.debug(f"Assigned uppercase letter to position {j}.")
|
||||
elif i == 1 and password_chars[j] not in lowercase:
|
||||
char = lowercase[get_dk_value() % len(lowercase)]
|
||||
password_chars[j] = char
|
||||
logger.debug(
|
||||
f"Assigned lowercase letter '{char}' to position {j}."
|
||||
)
|
||||
logger.debug(f"Assigned lowercase letter to position {j}.")
|
||||
elif i == 2 and password_chars[j] not in digits:
|
||||
char = digits[get_dk_value() % len(digits)]
|
||||
password_chars[j] = char
|
||||
logger.debug(f"Assigned digit '{char}' to position {j}.")
|
||||
logger.debug(f"Assigned digit to position {j}.")
|
||||
elif (
|
||||
special
|
||||
and i == len(char_types) - 1
|
||||
@@ -392,17 +403,18 @@ class PasswordGenerator:
|
||||
):
|
||||
char = special[get_dk_value() % len(special)]
|
||||
password_chars[j] = char
|
||||
logger.debug(
|
||||
f"Assigned special character '{char}' to position {j}."
|
||||
)
|
||||
logger.debug(f"Assigned special character to position {j}.")
|
||||
|
||||
# Shuffle again to distribute the characters more evenly
|
||||
shuffle_seed = (
|
||||
int.from_bytes(dk, "big") + dk_index
|
||||
) # Modify seed to vary shuffle
|
||||
rng = random.Random(shuffle_seed)
|
||||
rng.shuffle(password_chars)
|
||||
logger.debug(f"Shuffled password characters for balanced distribution.")
|
||||
# Shuffle again to distribute the characters more evenly. The key is
|
||||
# tweaked with the current ``dk_index`` so that each call produces a
|
||||
# unique but deterministic ordering.
|
||||
shuffle_key = hmac.new(
|
||||
dk, dk_index.to_bytes(4, "big"), hashlib.sha256
|
||||
).digest()
|
||||
password_chars = self._fisher_yates_hmac(password_chars, shuffle_key)
|
||||
logger.debug(
|
||||
"Shuffled password characters for balanced distribution using HMAC-Fisher-Yates."
|
||||
)
|
||||
|
||||
# Final counts after modifications
|
||||
final_upper = sum(1 for c in password_chars if c in uppercase)
|
||||
@@ -423,7 +435,7 @@ class PasswordGenerator:
|
||||
|
||||
def derive_ssh_key(bip85: BIP85, idx: int) -> bytes:
|
||||
"""Derive 32 bytes of entropy suitable for an SSH key."""
|
||||
return bip85.derive_entropy(index=idx, bytes_len=32, app_no=32)
|
||||
return bip85.derive_entropy(index=idx, entropy_bytes=32, app_no=32)
|
||||
|
||||
|
||||
def derive_ssh_key_pair(parent_seed: str, index: int) -> tuple[str, str]:
|
||||
@@ -457,7 +469,13 @@ def derive_seed_phrase(bip85: BIP85, idx: int, words: int = 24) -> str:
|
||||
def derive_pgp_key(
|
||||
bip85: BIP85, idx: int, key_type: str = "ed25519", user_id: str = ""
|
||||
) -> tuple[str, str]:
|
||||
"""Derive a deterministic PGP private key and return it with its fingerprint."""
|
||||
"""Derive a deterministic PGP private key and return it with its fingerprint.
|
||||
|
||||
For RSA keys the randomness required during key generation is provided by
|
||||
an HMAC-SHA256 based deterministic generator seeded from the BIP-85
|
||||
entropy. This avoids use of Python's ``random`` module while ensuring the
|
||||
output remains stable across Python versions.
|
||||
"""
|
||||
|
||||
from pgpy import PGPKey, PGPUID
|
||||
from pgpy.packet.packets import PrivKeyV4
|
||||
@@ -483,20 +501,24 @@ def derive_pgp_key(
|
||||
import hashlib
|
||||
import datetime
|
||||
|
||||
entropy = bip85.derive_entropy(index=idx, bytes_len=32, app_no=32)
|
||||
entropy = bip85.derive_entropy(index=idx, entropy_bytes=32, app_no=32)
|
||||
created = datetime.datetime(2000, 1, 1, tzinfo=datetime.timezone.utc)
|
||||
|
||||
if key_type.lower() == "rsa":
|
||||
|
||||
class DRNG:
|
||||
"""HMAC-SHA256 based deterministic random generator."""
|
||||
|
||||
def __init__(self, seed: bytes) -> None:
|
||||
self.seed = seed
|
||||
self.key = seed
|
||||
self.counter = 0
|
||||
|
||||
def __call__(self, n: int) -> bytes: # pragma: no cover - deterministic
|
||||
out = b""
|
||||
while len(out) < n:
|
||||
self.seed = hashlib.sha256(self.seed).digest()
|
||||
out += self.seed
|
||||
msg = self.counter.to_bytes(4, "big")
|
||||
out += hmac.new(self.key, msg, hashlib.sha256).digest()
|
||||
self.counter += 1
|
||||
return out[:n]
|
||||
|
||||
rsa_key = RSA.generate(2048, randfunc=DRNG(entropy))
|
||||
|
@@ -112,7 +112,7 @@ def import_backup(
|
||||
|
||||
raw = Path(path).read_bytes()
|
||||
if path.suffix.endswith(".enc"):
|
||||
raw = vault.encryption_manager.decrypt_data(raw)
|
||||
raw = vault.encryption_manager.decrypt_data(raw, context=str(path))
|
||||
|
||||
wrapper = json.loads(raw.decode("utf-8"))
|
||||
if wrapper.get("format_version") != FORMAT_VERSION:
|
||||
@@ -129,7 +129,8 @@ def import_backup(
|
||||
)
|
||||
key = _derive_export_key(seed)
|
||||
enc_mgr = EncryptionManager(key, vault.fingerprint_dir)
|
||||
index_bytes = enc_mgr.decrypt_data(payload)
|
||||
enc_mgr._legacy_migrate_flag = False
|
||||
index_bytes = enc_mgr.decrypt_data(payload, context="backup payload")
|
||||
index = json.loads(index_bytes.decode("utf-8"))
|
||||
|
||||
checksum = json_checksum(index)
|
||||
|
108
src/seedpass/core/profile_service.py
Normal file
108
src/seedpass/core/profile_service.py
Normal file
@@ -0,0 +1,108 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Optional, TYPE_CHECKING
|
||||
|
||||
from termcolor import colored
|
||||
|
||||
import seedpass.core.manager as manager_module
|
||||
from nostr.snapshot import MANIFEST_ID_PREFIX
|
||||
|
||||
from utils.password_prompt import prompt_existing_password
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover - typing only
|
||||
from .manager import PasswordManager
|
||||
from nostr.client import NostrClient
|
||||
|
||||
|
||||
class ProfileService:
|
||||
"""Profile-related operations for :class:`PasswordManager`."""
|
||||
|
||||
def __init__(self, manager: PasswordManager) -> None:
|
||||
self.manager = manager
|
||||
|
||||
def handle_switch_fingerprint(self, *, password: Optional[str] = None) -> bool:
|
||||
"""Handle switching to a different seed profile."""
|
||||
pm = self.manager
|
||||
try:
|
||||
print(colored("\nAvailable Seed Profiles:", "cyan"))
|
||||
fingerprints = pm.fingerprint_manager.list_fingerprints()
|
||||
for idx, fp in enumerate(fingerprints, start=1):
|
||||
display = (
|
||||
pm.fingerprint_manager.display_name(fp)
|
||||
if hasattr(pm.fingerprint_manager, "display_name")
|
||||
else fp
|
||||
)
|
||||
print(colored(f"{idx}. {display}", "cyan"))
|
||||
|
||||
choice = input("Select a seed profile by number to switch: ").strip()
|
||||
if not choice.isdigit() or not (1 <= int(choice) <= len(fingerprints)):
|
||||
print(colored("Invalid selection. Returning to main menu.", "red"))
|
||||
return False
|
||||
|
||||
selected_fingerprint = fingerprints[int(choice) - 1]
|
||||
pm.fingerprint_manager.current_fingerprint = selected_fingerprint
|
||||
pm.current_fingerprint = selected_fingerprint
|
||||
if not getattr(pm, "manifest_id", None):
|
||||
pm.manifest_id = f"{MANIFEST_ID_PREFIX}{selected_fingerprint}"
|
||||
|
||||
pm.fingerprint_dir = pm.fingerprint_manager.get_current_fingerprint_dir()
|
||||
if not pm.fingerprint_dir:
|
||||
print(
|
||||
colored(
|
||||
f"Error: Seed profile directory for {selected_fingerprint} not found.",
|
||||
"red",
|
||||
)
|
||||
)
|
||||
return False
|
||||
|
||||
if password is None:
|
||||
password = prompt_existing_password(
|
||||
"Enter the master password for the selected seed profile: "
|
||||
)
|
||||
|
||||
if not pm.setup_encryption_manager(
|
||||
pm.fingerprint_dir, password, exit_on_fail=False
|
||||
):
|
||||
return False
|
||||
|
||||
pm.initialize_bip85()
|
||||
pm.initialize_managers()
|
||||
pm.start_background_sync()
|
||||
print(colored(f"Switched to seed profile {selected_fingerprint}.", "green"))
|
||||
|
||||
try:
|
||||
pm.nostr_client = manager_module.NostrClient(
|
||||
encryption_manager=pm.encryption_manager,
|
||||
fingerprint=pm.current_fingerprint,
|
||||
config_manager=getattr(pm, "config_manager", None),
|
||||
parent_seed=getattr(pm, "parent_seed", None),
|
||||
)
|
||||
if getattr(pm, "manifest_id", None) and hasattr(
|
||||
pm.nostr_client, "_state_lock"
|
||||
):
|
||||
from nostr.backup_models import Manifest
|
||||
|
||||
with pm.nostr_client._state_lock:
|
||||
pm.nostr_client.current_manifest_id = pm.manifest_id
|
||||
pm.nostr_client.current_manifest = Manifest(
|
||||
ver=1,
|
||||
algo="gzip",
|
||||
chunks=[],
|
||||
delta_since=pm.delta_since or None,
|
||||
)
|
||||
logging.info(
|
||||
f"NostrClient re-initialized with seed profile {pm.current_fingerprint}."
|
||||
)
|
||||
except Exception as e:
|
||||
logging.error(f"Failed to re-initialize NostrClient: {e}")
|
||||
print(
|
||||
colored(f"Error: Failed to re-initialize NostrClient: {e}", "red")
|
||||
)
|
||||
return False
|
||||
|
||||
return True
|
||||
except Exception as e: # pragma: no cover - defensive
|
||||
logging.error(f"Error during seed profile switching: {e}", exc_info=True)
|
||||
print(colored(f"Error: Failed to switch seed profiles: {e}", "red"))
|
||||
return False
|
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import sys
|
||||
import time
|
||||
from typing import Union
|
||||
from urllib.parse import quote
|
||||
from urllib.parse import urlparse, parse_qs, unquote
|
||||
|
||||
@@ -18,13 +19,15 @@ class TotpManager:
|
||||
"""Helper methods for TOTP secrets and codes."""
|
||||
|
||||
@staticmethod
|
||||
def derive_secret(seed: str, index: int) -> str:
|
||||
"""Derive a TOTP secret from a BIP39 seed and index."""
|
||||
def derive_secret(seed: Union[str, bytes], index: int) -> str:
|
||||
"""Derive a TOTP secret from a seed or raw key and index."""
|
||||
return key_derivation.derive_totp_secret(seed, index)
|
||||
|
||||
@classmethod
|
||||
def current_code(cls, seed: str, index: int, timestamp: int | None = None) -> str:
|
||||
"""Return the TOTP code for the given seed and index."""
|
||||
def current_code(
|
||||
cls, seed: Union[str, bytes], index: int, timestamp: int | None = None
|
||||
) -> str:
|
||||
"""Return the TOTP code for the given seed/key and index."""
|
||||
secret = cls.derive_secret(seed, index)
|
||||
totp = pyotp.TOTP(secret)
|
||||
if timestamp is None:
|
||||
|
@@ -3,8 +3,19 @@
|
||||
from pathlib import Path
|
||||
from typing import Optional, Union
|
||||
from os import PathLike
|
||||
import shutil
|
||||
|
||||
from .encryption import EncryptionManager
|
||||
from termcolor import colored
|
||||
from cryptography.fernet import InvalidToken
|
||||
|
||||
from .encryption import (
|
||||
EncryptionManager,
|
||||
LegacyFormatRequiresMigrationError,
|
||||
USE_ORJSON,
|
||||
json_lib,
|
||||
)
|
||||
from utils.key_derivation import KdfConfig, CURRENT_KDF_VERSION
|
||||
from utils.password_prompt import prompt_existing_password
|
||||
|
||||
|
||||
class Vault:
|
||||
@@ -22,26 +33,164 @@ class Vault:
|
||||
self.fingerprint_dir = Path(fingerprint_dir)
|
||||
self.index_file = self.fingerprint_dir / self.INDEX_FILENAME
|
||||
self.config_file = self.fingerprint_dir / self.CONFIG_FILENAME
|
||||
self.migrated_from_legacy = False
|
||||
|
||||
def set_encryption_manager(self, manager: EncryptionManager) -> None:
|
||||
"""Replace the internal encryption manager."""
|
||||
self.encryption_manager = manager
|
||||
|
||||
def _hkdf_kdf(self) -> KdfConfig:
|
||||
return KdfConfig(
|
||||
name="hkdf", version=CURRENT_KDF_VERSION, params={}, salt_b64=""
|
||||
)
|
||||
|
||||
# ----- Password index helpers -----
|
||||
def load_index(self) -> dict:
|
||||
"""Return decrypted password index data as a dict, applying migrations."""
|
||||
def load_index(self, *, return_migration_flags: bool = False):
|
||||
"""Return decrypted password index data, applying migrations.
|
||||
|
||||
If a legacy ``seedpass_passwords_db.json.enc`` file is detected, the
|
||||
user is prompted to migrate it. A backup copy of the legacy file (and
|
||||
its checksum) is saved under ``legacy_backups`` within the fingerprint
|
||||
directory before renaming to the new filename.
|
||||
|
||||
When ``return_migration_flags`` is ``True`` the tuple
|
||||
``(data, migrated, last_migration_performed)`` is returned where
|
||||
``migrated`` indicates whether any migration occurred and
|
||||
``last_migration_performed`` reflects whether the underlying
|
||||
:class:`EncryptionManager` reported a conversion.
|
||||
"""
|
||||
|
||||
legacy_file = self.fingerprint_dir / "seedpass_passwords_db.json.enc"
|
||||
self.migrated_from_legacy = False
|
||||
legacy_detected = False
|
||||
backup_dir = None
|
||||
if legacy_file.exists() and not self.index_file.exists():
|
||||
print(colored("Legacy index detected.", "yellow"))
|
||||
resp = (
|
||||
input("Would you like to migrate this to the new index format? [y/N]: ")
|
||||
.strip()
|
||||
.lower()
|
||||
)
|
||||
if resp != "y":
|
||||
raise RuntimeError("Migration declined by user")
|
||||
|
||||
legacy_checksum = (
|
||||
self.fingerprint_dir / "seedpass_passwords_db_checksum.txt"
|
||||
)
|
||||
backup_dir = self.fingerprint_dir / "legacy_backups"
|
||||
backup_dir.mkdir(exist_ok=True)
|
||||
shutil.copy2(legacy_file, backup_dir / legacy_file.name)
|
||||
if legacy_checksum.exists():
|
||||
shutil.copy2(legacy_checksum, backup_dir / legacy_checksum.name)
|
||||
|
||||
legacy_file.rename(self.index_file)
|
||||
if legacy_checksum.exists():
|
||||
legacy_checksum.rename(
|
||||
self.fingerprint_dir / "seedpass_entries_db_checksum.txt"
|
||||
)
|
||||
|
||||
data = self.encryption_manager.load_json_data(self.index_file)
|
||||
# Remove any leftover legacy files to avoid triggering migration again
|
||||
for stray in self.fingerprint_dir.glob("seedpass_passwords_db*.enc"):
|
||||
try:
|
||||
stray.unlink()
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
stray_checksum = self.fingerprint_dir / "seedpass_passwords_db_checksum.txt"
|
||||
if stray_checksum.exists():
|
||||
stray_checksum.unlink()
|
||||
|
||||
legacy_detected = True
|
||||
print(
|
||||
colored(
|
||||
"Migration complete. Original index backed up to 'legacy_backups'",
|
||||
"green",
|
||||
)
|
||||
)
|
||||
|
||||
try:
|
||||
data, kdf = self.encryption_manager.load_json_data(
|
||||
self.index_file, return_kdf=True
|
||||
)
|
||||
migration_performed = getattr(
|
||||
self.encryption_manager, "last_migration_performed", False
|
||||
)
|
||||
if kdf.version < CURRENT_KDF_VERSION:
|
||||
new_kdf = KdfConfig(
|
||||
name=kdf.name,
|
||||
version=CURRENT_KDF_VERSION,
|
||||
params=kdf.params,
|
||||
salt_b64=kdf.salt_b64,
|
||||
)
|
||||
self.encryption_manager.save_json_data(
|
||||
data, self.index_file, kdf=new_kdf
|
||||
)
|
||||
self.encryption_manager.update_checksum(self.index_file)
|
||||
migration_performed = True
|
||||
except LegacyFormatRequiresMigrationError:
|
||||
print(
|
||||
colored(
|
||||
"Failed to decrypt index with current key. This may be a legacy index.",
|
||||
"red",
|
||||
)
|
||||
)
|
||||
resp = input(
|
||||
"\nChoose an option:\n"
|
||||
"1. Open legacy index without migrating\n"
|
||||
"2. Migrate to new format.\n"
|
||||
"Selection [1/2]: "
|
||||
).strip()
|
||||
if resp == "1":
|
||||
self.encryption_manager._legacy_migrate_flag = False
|
||||
self.encryption_manager.last_migration_performed = False
|
||||
elif resp == "2":
|
||||
self.encryption_manager._legacy_migrate_flag = True
|
||||
self.encryption_manager.last_migration_performed = True
|
||||
else:
|
||||
raise InvalidToken(
|
||||
"User declined legacy decryption or provided invalid choice."
|
||||
)
|
||||
password = prompt_existing_password(
|
||||
"Enter your master password for legacy decryption: "
|
||||
)
|
||||
with self.index_file.open("rb") as fh:
|
||||
encrypted_data = fh.read()
|
||||
decrypted = self.encryption_manager.decrypt_legacy(
|
||||
encrypted_data, password, context=str(self.index_file)
|
||||
)
|
||||
if USE_ORJSON:
|
||||
data = json_lib.loads(decrypted)
|
||||
else:
|
||||
data = json_lib.loads(decrypted.decode("utf-8"))
|
||||
if self.encryption_manager._legacy_migrate_flag:
|
||||
self.encryption_manager.save_json_data(
|
||||
data, self.index_file, kdf=self._hkdf_kdf()
|
||||
)
|
||||
self.encryption_manager.update_checksum(self.index_file)
|
||||
migration_performed = getattr(
|
||||
self.encryption_manager, "last_migration_performed", False
|
||||
)
|
||||
except Exception as exc: # noqa: BLE001 - surface clear error and restore
|
||||
if legacy_detected and backup_dir is not None:
|
||||
backup_file = backup_dir / legacy_file.name
|
||||
legacy_checksum_path = (
|
||||
self.fingerprint_dir / "seedpass_passwords_db_checksum.txt"
|
||||
)
|
||||
backup_checksum = backup_dir / legacy_checksum_path.name
|
||||
try:
|
||||
if self.index_file.exists():
|
||||
self.index_file.unlink()
|
||||
shutil.copy2(backup_file, legacy_file)
|
||||
checksum_new = (
|
||||
self.fingerprint_dir / "seedpass_entries_db_checksum.txt"
|
||||
)
|
||||
if checksum_new.exists():
|
||||
checksum_new.unlink()
|
||||
if backup_checksum.exists():
|
||||
shutil.copy2(backup_checksum, legacy_checksum_path)
|
||||
finally:
|
||||
self.migrated_from_legacy = False
|
||||
raise RuntimeError(f"Migration failed: {exc}") from exc
|
||||
|
||||
from .migrations import apply_migrations, LATEST_VERSION
|
||||
|
||||
version = data.get("schema_version", 0)
|
||||
@@ -49,24 +198,78 @@ class Vault:
|
||||
raise ValueError(
|
||||
f"File schema version {version} is newer than supported {LATEST_VERSION}"
|
||||
)
|
||||
data = apply_migrations(data)
|
||||
schema_migrated = version < LATEST_VERSION
|
||||
|
||||
try:
|
||||
data = apply_migrations(data)
|
||||
if schema_migrated:
|
||||
self.encryption_manager.save_json_data(
|
||||
data, self.index_file, kdf=self._hkdf_kdf()
|
||||
)
|
||||
self.encryption_manager.update_checksum(self.index_file)
|
||||
except Exception as exc: # noqa: BLE001 - surface clear error and restore
|
||||
if legacy_detected and backup_dir is not None:
|
||||
backup_file = backup_dir / legacy_file.name
|
||||
legacy_checksum_path = (
|
||||
self.fingerprint_dir / "seedpass_passwords_db_checksum.txt"
|
||||
)
|
||||
backup_checksum = backup_dir / legacy_checksum_path.name
|
||||
try:
|
||||
if self.index_file.exists():
|
||||
self.index_file.unlink()
|
||||
shutil.copy2(backup_file, legacy_file)
|
||||
checksum_new = (
|
||||
self.fingerprint_dir / "seedpass_entries_db_checksum.txt"
|
||||
)
|
||||
if checksum_new.exists():
|
||||
checksum_new.unlink()
|
||||
if backup_checksum.exists():
|
||||
shutil.copy2(backup_checksum, legacy_checksum_path)
|
||||
finally:
|
||||
self.migrated_from_legacy = False
|
||||
raise RuntimeError(f"Migration failed: {exc}") from exc
|
||||
|
||||
self.migrated_from_legacy = (
|
||||
legacy_detected or migration_performed or schema_migrated
|
||||
)
|
||||
if return_migration_flags:
|
||||
return data, self.migrated_from_legacy, migration_performed
|
||||
return data
|
||||
|
||||
def save_index(self, data: dict) -> None:
|
||||
"""Encrypt and write password index."""
|
||||
self.encryption_manager.save_json_data(data, self.index_file)
|
||||
self.encryption_manager.save_json_data(
|
||||
data, self.index_file, kdf=self._hkdf_kdf()
|
||||
)
|
||||
|
||||
def get_encrypted_index(self) -> Optional[bytes]:
|
||||
"""Return the encrypted index bytes if present."""
|
||||
return self.encryption_manager.get_encrypted_index()
|
||||
|
||||
def decrypt_and_save_index_from_nostr(
|
||||
self, encrypted_data: bytes, *, strict: bool = True, merge: bool = False
|
||||
) -> bool:
|
||||
"""Decrypt Nostr payload and update the local index."""
|
||||
return self.encryption_manager.decrypt_and_save_index_from_nostr(
|
||||
self,
|
||||
encrypted_data: bytes,
|
||||
*,
|
||||
strict: bool = True,
|
||||
merge: bool = False,
|
||||
return_migration_flag: bool = False,
|
||||
):
|
||||
"""Decrypt Nostr payload and update the local index.
|
||||
|
||||
Returns ``True``/``False`` for success by default. When
|
||||
``return_migration_flag`` is ``True`` a tuple ``(success, migrated)`` is
|
||||
returned, where ``migrated`` indicates whether any legacy migration
|
||||
occurred.
|
||||
"""
|
||||
result = self.encryption_manager.decrypt_and_save_index_from_nostr(
|
||||
encrypted_data, strict=strict, merge=merge
|
||||
)
|
||||
self.migrated_from_legacy = result and getattr(
|
||||
self.encryption_manager, "last_migration_performed", False
|
||||
)
|
||||
if return_migration_flag:
|
||||
return result, self.migrated_from_legacy
|
||||
return result
|
||||
|
||||
# ----- Config helpers -----
|
||||
def load_config(self) -> dict:
|
||||
@@ -75,4 +278,6 @@ class Vault:
|
||||
|
||||
def save_config(self, config: dict) -> None:
|
||||
"""Encrypt and persist configuration."""
|
||||
self.encryption_manager.save_json_data(config, self.config_file)
|
||||
self.encryption_manager.save_json_data(
|
||||
config, self.config_file, kdf=self._hkdf_kdf()
|
||||
)
|
||||
|
4
src/seedpass/errors.py
Normal file
4
src/seedpass/errors.py
Normal file
@@ -0,0 +1,4 @@
|
||||
class VaultLockedError(Exception):
|
||||
"""Raised when an operation requires an unlocked vault."""
|
||||
|
||||
pass
|
@@ -1,4 +1,4 @@
|
||||
from .app import main
|
||||
from . import main
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
@@ -393,7 +393,7 @@ class TotpViewerWindow(toga.Window):
|
||||
def refresh_codes(self) -> None:
|
||||
self.table.data = []
|
||||
for idx, label, *_rest in self.entries.list_entries(
|
||||
filter_kind=EntryType.TOTP.value
|
||||
filter_kinds=[EntryType.TOTP.value]
|
||||
):
|
||||
entry = self.entries.retrieve_entry(idx)
|
||||
code = self.entries.get_totp_code(idx)
|
||||
|
@@ -1,6 +1,25 @@
|
||||
import importlib.util
|
||||
import logging
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
sys.path.append(str(Path(__file__).resolve().parents[1]))
|
||||
|
||||
from helpers import create_vault, TEST_PASSWORD, TEST_SEED
|
||||
from seedpass.core.backup import BackupManager
|
||||
from seedpass.core.config_manager import ConfigManager
|
||||
from seedpass.core.entry_management import EntryManager
|
||||
from seedpass.core.manager import EncryptionMode, PasswordManager
|
||||
|
||||
|
||||
@pytest.fixture(
|
||||
params=["asyncio"] + (["trio"] if importlib.util.find_spec("trio") else [])
|
||||
)
|
||||
def anyio_backend(request):
|
||||
return request.param
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def mute_logging():
|
||||
@@ -49,3 +68,29 @@ def pytest_collection_modifyitems(
|
||||
for item in items:
|
||||
if "desktop" in item.keywords:
|
||||
item.add_marker(skip_desktop)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def vault(tmp_path):
|
||||
vault, _ = create_vault(tmp_path, TEST_SEED, TEST_PASSWORD)
|
||||
return vault
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def password_manager(vault, tmp_path):
|
||||
cfg_mgr = ConfigManager(vault, tmp_path)
|
||||
backup_mgr = BackupManager(tmp_path, cfg_mgr)
|
||||
entry_mgr = EntryManager(vault, backup_mgr)
|
||||
|
||||
pm = PasswordManager.__new__(PasswordManager)
|
||||
pm.encryption_mode = EncryptionMode.SEED_ONLY
|
||||
pm.encryption_manager = vault.encryption_manager
|
||||
pm.vault = vault
|
||||
pm.entry_manager = entry_mgr
|
||||
pm.backup_manager = backup_mgr
|
||||
pm.parent_seed = TEST_SEED
|
||||
pm.nostr_client = None
|
||||
pm.fingerprint_dir = tmp_path
|
||||
pm.is_dirty = False
|
||||
pm.secret_mode_enabled = False
|
||||
return pm
|
||||
|
@@ -11,6 +11,7 @@ from utils.key_derivation import (
|
||||
derive_index_key,
|
||||
derive_key_from_password,
|
||||
)
|
||||
from utils.fingerprint import generate_fingerprint
|
||||
|
||||
TEST_SEED = "abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon about"
|
||||
TEST_PASSWORD = "pw"
|
||||
@@ -22,7 +23,8 @@ def create_vault(
|
||||
password: str = TEST_PASSWORD,
|
||||
) -> tuple[Vault, EncryptionManager]:
|
||||
"""Create a Vault initialized for tests."""
|
||||
seed_key = derive_key_from_password(password)
|
||||
fp = generate_fingerprint(seed)
|
||||
seed_key = derive_key_from_password(password, fp)
|
||||
seed_mgr = EncryptionManager(seed_key, dir_path)
|
||||
seed_mgr.encrypt_parent_seed(seed)
|
||||
|
||||
|
35
src/tests/test_add_new_fingerprint_words.py
Normal file
35
src/tests/test_add_new_fingerprint_words.py
Normal file
@@ -0,0 +1,35 @@
|
||||
import builtins
|
||||
from types import SimpleNamespace
|
||||
|
||||
import seedpass.core.manager as manager_module
|
||||
from helpers import TEST_SEED
|
||||
|
||||
|
||||
def test_add_new_fingerprint_word_entry_exits(monkeypatch):
|
||||
pm = manager_module.PasswordManager.__new__(manager_module.PasswordManager)
|
||||
pm.fingerprint_manager = SimpleNamespace(current_fingerprint=None)
|
||||
pm.initialize_managers = lambda: None
|
||||
|
||||
calls = {"count": 0}
|
||||
original_setup = manager_module.PasswordManager.setup_existing_seed
|
||||
|
||||
def setup_wrapper(self, *a, **k):
|
||||
calls["count"] += 1
|
||||
return original_setup(self, *a, **k)
|
||||
|
||||
monkeypatch.setattr(
|
||||
manager_module.PasswordManager, "setup_existing_seed", setup_wrapper
|
||||
)
|
||||
monkeypatch.setattr(manager_module, "prompt_seed_words", lambda *a, **k: TEST_SEED)
|
||||
monkeypatch.setattr(
|
||||
manager_module.PasswordManager,
|
||||
"_finalize_existing_seed",
|
||||
lambda self, seed, password=None: "fp",
|
||||
)
|
||||
monkeypatch.setattr(builtins, "input", lambda *_a, **_k: "2")
|
||||
|
||||
result = pm.add_new_fingerprint()
|
||||
|
||||
assert result == "fp"
|
||||
assert calls["count"] == 1
|
||||
assert pm.fingerprint_manager.current_fingerprint == "fp"
|
@@ -3,7 +3,8 @@ from pathlib import Path
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
from httpx import ASGITransport, AsyncClient
|
||||
import hashlib
|
||||
|
||||
sys.path.append(str(Path(__file__).resolve().parents[1]))
|
||||
|
||||
@@ -12,7 +13,7 @@ from seedpass.core.entry_types import EntryType
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def client(monkeypatch):
|
||||
async def client(monkeypatch):
|
||||
dummy = SimpleNamespace(
|
||||
entry_manager=SimpleNamespace(
|
||||
search_entries=lambda q: [
|
||||
@@ -39,25 +40,36 @@ def client(monkeypatch):
|
||||
nostr_client=SimpleNamespace(
|
||||
key_manager=SimpleNamespace(get_npub=lambda: "np")
|
||||
),
|
||||
verify_password=lambda pw: True,
|
||||
)
|
||||
monkeypatch.setattr(api, "PasswordManager", lambda: dummy)
|
||||
monkeypatch.setenv("SEEDPASS_CORS_ORIGINS", "http://example.com")
|
||||
token = api.start_server()
|
||||
client = TestClient(api.app)
|
||||
return client, token
|
||||
transport = ASGITransport(app=api.app)
|
||||
async with AsyncClient(transport=transport, base_url="http://test") as ac:
|
||||
yield ac, token
|
||||
|
||||
|
||||
def test_cors_and_auth(client):
|
||||
@pytest.mark.anyio
|
||||
async def test_token_hashed(client):
|
||||
_, token = client
|
||||
assert api.app.state.token_hash != token
|
||||
assert api.app.state.token_hash == hashlib.sha256(token.encode()).hexdigest()
|
||||
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_cors_and_auth(client):
|
||||
cl, token = client
|
||||
headers = {"Authorization": f"Bearer {token}", "Origin": "http://example.com"}
|
||||
res = cl.get("/api/v1/entry", params={"query": "s"}, headers=headers)
|
||||
res = await cl.get("/api/v1/entry", params={"query": "s"}, headers=headers)
|
||||
assert res.status_code == 200
|
||||
assert res.headers.get("access-control-allow-origin") == "http://example.com"
|
||||
|
||||
|
||||
def test_invalid_token(client):
|
||||
@pytest.mark.anyio
|
||||
async def test_invalid_token(client):
|
||||
cl, _token = client
|
||||
res = cl.get(
|
||||
res = await cl.get(
|
||||
"/api/v1/entry",
|
||||
params={"query": "s"},
|
||||
headers={"Authorization": "Bearer bad"},
|
||||
@@ -65,59 +77,65 @@ def test_invalid_token(client):
|
||||
assert res.status_code == 401
|
||||
|
||||
|
||||
def test_get_entry_by_id(client):
|
||||
@pytest.mark.anyio
|
||||
async def test_get_entry_by_id(client):
|
||||
cl, token = client
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Origin": "http://example.com",
|
||||
"X-SeedPass-Password": "pw",
|
||||
}
|
||||
res = cl.get("/api/v1/entry/1", headers=headers)
|
||||
res = await cl.get("/api/v1/entry/1", headers=headers)
|
||||
assert res.status_code == 200
|
||||
assert res.json() == {"label": "Site"}
|
||||
assert res.headers.get("access-control-allow-origin") == "http://example.com"
|
||||
|
||||
|
||||
def test_get_config_value(client):
|
||||
@pytest.mark.anyio
|
||||
async def test_get_config_value(client):
|
||||
cl, token = client
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Origin": "http://example.com",
|
||||
}
|
||||
res = cl.get("/api/v1/config/k", headers=headers)
|
||||
res = await cl.get("/api/v1/config/k", headers=headers)
|
||||
assert res.status_code == 200
|
||||
assert res.json() == {"key": "k", "value": "v"}
|
||||
assert res.headers.get("access-control-allow-origin") == "http://example.com"
|
||||
|
||||
|
||||
def test_list_fingerprint(client):
|
||||
@pytest.mark.anyio
|
||||
async def test_list_fingerprint(client):
|
||||
cl, token = client
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Origin": "http://example.com",
|
||||
}
|
||||
res = cl.get("/api/v1/fingerprint", headers=headers)
|
||||
res = await cl.get("/api/v1/fingerprint", headers=headers)
|
||||
assert res.status_code == 200
|
||||
assert res.json() == ["fp"]
|
||||
assert res.headers.get("access-control-allow-origin") == "http://example.com"
|
||||
|
||||
|
||||
def test_get_nostr_pubkey(client):
|
||||
@pytest.mark.anyio
|
||||
async def test_get_nostr_pubkey(client):
|
||||
cl, token = client
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Origin": "http://example.com",
|
||||
}
|
||||
res = cl.get("/api/v1/nostr/pubkey", headers=headers)
|
||||
res = await cl.get("/api/v1/nostr/pubkey", headers=headers)
|
||||
assert res.status_code == 200
|
||||
assert res.json() == {"npub": "np"}
|
||||
assert res.headers.get("access-control-allow-origin") == "http://example.com"
|
||||
|
||||
|
||||
def test_create_modify_archive_entry(client):
|
||||
@pytest.mark.anyio
|
||||
async def test_create_modify_archive_entry(client):
|
||||
cl, token = client
|
||||
headers = {"Authorization": f"Bearer {token}", "Origin": "http://example.com"}
|
||||
|
||||
res = cl.post(
|
||||
res = await cl.post(
|
||||
"/api/v1/entry",
|
||||
json={"label": "test", "length": 12},
|
||||
headers=headers,
|
||||
@@ -125,7 +143,7 @@ def test_create_modify_archive_entry(client):
|
||||
assert res.status_code == 200
|
||||
assert res.json() == {"id": 1}
|
||||
|
||||
res = cl.put(
|
||||
res = await cl.put(
|
||||
"/api/v1/entry/1",
|
||||
json={"username": "bob"},
|
||||
headers=headers,
|
||||
@@ -133,25 +151,26 @@ def test_create_modify_archive_entry(client):
|
||||
assert res.status_code == 200
|
||||
assert res.json() == {"status": "ok"}
|
||||
|
||||
res = cl.post("/api/v1/entry/1/archive", headers=headers)
|
||||
res = await cl.post("/api/v1/entry/1/archive", headers=headers)
|
||||
assert res.status_code == 200
|
||||
assert res.json() == {"status": "archived"}
|
||||
|
||||
res = cl.post("/api/v1/entry/1/unarchive", headers=headers)
|
||||
res = await cl.post("/api/v1/entry/1/unarchive", headers=headers)
|
||||
assert res.status_code == 200
|
||||
assert res.json() == {"status": "active"}
|
||||
|
||||
|
||||
def test_update_config(client):
|
||||
@pytest.mark.anyio
|
||||
async def test_update_config(client):
|
||||
cl, token = client
|
||||
called = {}
|
||||
|
||||
def set_timeout(val):
|
||||
called["val"] = val
|
||||
|
||||
api._pm.config_manager.set_inactivity_timeout = set_timeout
|
||||
api.app.state.pm.config_manager.set_inactivity_timeout = set_timeout
|
||||
headers = {"Authorization": f"Bearer {token}", "Origin": "http://example.com"}
|
||||
res = cl.put(
|
||||
res = await cl.put(
|
||||
"/api/v1/config/inactivity_timeout",
|
||||
json={"value": 42},
|
||||
headers=headers,
|
||||
@@ -162,13 +181,15 @@ def test_update_config(client):
|
||||
assert res.headers.get("access-control-allow-origin") == "http://example.com"
|
||||
|
||||
|
||||
def test_update_config_quick_unlock(client):
|
||||
@pytest.mark.anyio
|
||||
async def test_update_config_quick_unlock(client):
|
||||
cl, token = client
|
||||
called = {}
|
||||
|
||||
api._pm.config_manager.set_quick_unlock = lambda v: called.setdefault("val", v)
|
||||
api.app.state.pm.config_manager.set_quick_unlock = lambda v: called.setdefault(
|
||||
"val", v
|
||||
)
|
||||
headers = {"Authorization": f"Bearer {token}", "Origin": "http://example.com"}
|
||||
res = cl.put(
|
||||
res = await cl.put(
|
||||
"/api/v1/config/quick_unlock",
|
||||
json={"value": True},
|
||||
headers=headers,
|
||||
@@ -178,13 +199,13 @@ def test_update_config_quick_unlock(client):
|
||||
assert called.get("val") is True
|
||||
|
||||
|
||||
def test_change_password_route(client):
|
||||
@pytest.mark.anyio
|
||||
async def test_change_password_route(client):
|
||||
cl, token = client
|
||||
called = {}
|
||||
|
||||
api._pm.change_password = lambda o, n: called.setdefault("called", (o, n))
|
||||
api.app.state.pm.change_password = lambda o, n: called.setdefault("called", (o, n))
|
||||
headers = {"Authorization": f"Bearer {token}", "Origin": "http://example.com"}
|
||||
res = cl.post(
|
||||
res = await cl.post(
|
||||
"/api/v1/change-password",
|
||||
headers=headers,
|
||||
json={"old": "old", "new": "new"},
|
||||
@@ -195,10 +216,11 @@ def test_change_password_route(client):
|
||||
assert res.headers.get("access-control-allow-origin") == "http://example.com"
|
||||
|
||||
|
||||
def test_update_config_unknown_key(client):
|
||||
@pytest.mark.anyio
|
||||
async def test_update_config_unknown_key(client):
|
||||
cl, token = client
|
||||
headers = {"Authorization": f"Bearer {token}", "Origin": "http://example.com"}
|
||||
res = cl.put(
|
||||
res = await cl.put(
|
||||
"/api/v1/config/bogus",
|
||||
json={"value": 1},
|
||||
headers=headers,
|
||||
@@ -206,7 +228,8 @@ def test_update_config_unknown_key(client):
|
||||
assert res.status_code == 400
|
||||
|
||||
|
||||
def test_shutdown(client, monkeypatch):
|
||||
@pytest.mark.anyio
|
||||
async def test_shutdown(client, monkeypatch):
|
||||
cl, token = client
|
||||
|
||||
calls = {}
|
||||
@@ -222,7 +245,7 @@ def test_shutdown(client, monkeypatch):
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Origin": "http://example.com",
|
||||
}
|
||||
res = cl.post("/api/v1/shutdown", headers=headers)
|
||||
res = await cl.post("/api/v1/shutdown", headers=headers)
|
||||
assert res.status_code == 200
|
||||
assert res.json() == {"status": "shutting down"}
|
||||
assert calls["func"] is sys.exit
|
||||
@@ -230,6 +253,7 @@ def test_shutdown(client, monkeypatch):
|
||||
assert res.headers.get("access-control-allow-origin") == "http://example.com"
|
||||
|
||||
|
||||
@pytest.mark.anyio
|
||||
@pytest.mark.parametrize(
|
||||
"method,path",
|
||||
[
|
||||
@@ -247,11 +271,11 @@ def test_shutdown(client, monkeypatch):
|
||||
("post", "/api/v1/vault/lock"),
|
||||
],
|
||||
)
|
||||
def test_invalid_token_other_endpoints(client, method, path):
|
||||
async def test_invalid_token_other_endpoints(client, method, path):
|
||||
cl, _token = client
|
||||
req = getattr(cl, method)
|
||||
kwargs = {"headers": {"Authorization": "Bearer bad"}}
|
||||
if method in {"post", "put"}:
|
||||
kwargs["json"] = {}
|
||||
res = req(path, **kwargs)
|
||||
res = await req(path, **kwargs)
|
||||
assert res.status_code == 401
|
||||
|
@@ -1,5 +1,7 @@
|
||||
from types import SimpleNamespace
|
||||
from pathlib import Path
|
||||
import os
|
||||
import base64
|
||||
import pytest
|
||||
|
||||
from seedpass import api
|
||||
@@ -7,10 +9,12 @@ from test_api import client
|
||||
from helpers import dummy_nostr_client
|
||||
import string
|
||||
from seedpass.core.password_generation import PasswordGenerator, PasswordPolicy
|
||||
from seedpass.core.encryption import EncryptionManager
|
||||
from nostr.client import NostrClient, DEFAULT_RELAYS
|
||||
|
||||
|
||||
def test_create_and_modify_totp_entry(client):
|
||||
@pytest.mark.anyio
|
||||
async def test_create_and_modify_totp_entry(client):
|
||||
cl, token = client
|
||||
calls = {}
|
||||
|
||||
@@ -21,13 +25,13 @@ def test_create_and_modify_totp_entry(client):
|
||||
def modify(idx, **kwargs):
|
||||
calls["modify"] = (idx, kwargs)
|
||||
|
||||
api._pm.entry_manager.add_totp = add_totp
|
||||
api._pm.entry_manager.modify_entry = modify
|
||||
api._pm.entry_manager.get_next_index = lambda: 5
|
||||
api._pm.parent_seed = "seed"
|
||||
api.app.state.pm.entry_manager.add_totp = add_totp
|
||||
api.app.state.pm.entry_manager.modify_entry = modify
|
||||
api.app.state.pm.entry_manager.get_next_index = lambda: 5
|
||||
api.app.state.pm.parent_seed = "seed"
|
||||
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
res = cl.post(
|
||||
res = await cl.post(
|
||||
"/api/v1/entry",
|
||||
json={
|
||||
"type": "totp",
|
||||
@@ -51,7 +55,7 @@ def test_create_and_modify_totp_entry(client):
|
||||
"archived": False,
|
||||
}
|
||||
|
||||
res = cl.put(
|
||||
res = await cl.put(
|
||||
"/api/v1/entry/5",
|
||||
json={"period": 90, "digits": 6},
|
||||
headers=headers,
|
||||
@@ -62,7 +66,8 @@ def test_create_and_modify_totp_entry(client):
|
||||
assert calls["modify"][1]["digits"] == 6
|
||||
|
||||
|
||||
def test_create_and_modify_ssh_entry(client):
|
||||
@pytest.mark.anyio
|
||||
async def test_create_and_modify_ssh_entry(client):
|
||||
cl, token = client
|
||||
calls = {}
|
||||
|
||||
@@ -73,12 +78,12 @@ def test_create_and_modify_ssh_entry(client):
|
||||
def modify(idx, **kwargs):
|
||||
calls["modify"] = (idx, kwargs)
|
||||
|
||||
api._pm.entry_manager.add_ssh_key = add_ssh
|
||||
api._pm.entry_manager.modify_entry = modify
|
||||
api._pm.parent_seed = "seed"
|
||||
api.app.state.pm.entry_manager.add_ssh_key = add_ssh
|
||||
api.app.state.pm.entry_manager.modify_entry = modify
|
||||
api.app.state.pm.parent_seed = "seed"
|
||||
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
res = cl.post(
|
||||
res = await cl.post(
|
||||
"/api/v1/entry",
|
||||
json={"type": "ssh", "label": "S", "index": 2, "notes": "n"},
|
||||
headers=headers,
|
||||
@@ -87,7 +92,7 @@ def test_create_and_modify_ssh_entry(client):
|
||||
assert res.json() == {"id": 2}
|
||||
assert calls["create"] == {"index": 2, "notes": "n", "archived": False}
|
||||
|
||||
res = cl.put(
|
||||
res = await cl.put(
|
||||
"/api/v1/entry/2",
|
||||
json={"notes": "x"},
|
||||
headers=headers,
|
||||
@@ -97,29 +102,31 @@ def test_create_and_modify_ssh_entry(client):
|
||||
assert calls["modify"][1]["notes"] == "x"
|
||||
|
||||
|
||||
def test_update_entry_error(client):
|
||||
@pytest.mark.anyio
|
||||
async def test_update_entry_error(client):
|
||||
cl, token = client
|
||||
|
||||
def modify(*a, **k):
|
||||
raise ValueError("nope")
|
||||
|
||||
api._pm.entry_manager.modify_entry = modify
|
||||
api.app.state.pm.entry_manager.modify_entry = modify
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
res = cl.put("/api/v1/entry/1", json={"username": "x"}, headers=headers)
|
||||
res = await cl.put("/api/v1/entry/1", json={"username": "x"}, headers=headers)
|
||||
assert res.status_code == 400
|
||||
assert res.json() == {"detail": "nope"}
|
||||
|
||||
|
||||
def test_update_config_secret_mode(client):
|
||||
@pytest.mark.anyio
|
||||
async def test_update_config_secret_mode(client):
|
||||
cl, token = client
|
||||
called = {}
|
||||
|
||||
def set_secret(val):
|
||||
called["val"] = val
|
||||
|
||||
api._pm.config_manager.set_secret_mode_enabled = set_secret
|
||||
api.app.state.pm.config_manager.set_secret_mode_enabled = set_secret
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
res = cl.put(
|
||||
res = await cl.put(
|
||||
"/api/v1/config/secret_mode_enabled",
|
||||
json={"value": True},
|
||||
headers=headers,
|
||||
@@ -129,24 +136,28 @@ def test_update_config_secret_mode(client):
|
||||
assert called["val"] is True
|
||||
|
||||
|
||||
def test_totp_export_endpoint(client):
|
||||
@pytest.mark.anyio
|
||||
async def test_totp_export_endpoint(client):
|
||||
cl, token = client
|
||||
api._pm.entry_manager.export_totp_entries = lambda seed: {"entries": ["x"]}
|
||||
api._pm.parent_seed = "seed"
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
res = cl.get("/api/v1/totp/export", headers=headers)
|
||||
api.app.state.pm.entry_manager.export_totp_entries = lambda seed: {"entries": ["x"]}
|
||||
api.app.state.pm.parent_seed = "seed"
|
||||
headers = {"Authorization": f"Bearer {token}", "X-SeedPass-Password": "pw"}
|
||||
res = await cl.get("/api/v1/totp/export", headers=headers)
|
||||
assert res.status_code == 200
|
||||
assert res.json() == {"entries": ["x"]}
|
||||
|
||||
|
||||
def test_totp_codes_endpoint(client):
|
||||
@pytest.mark.anyio
|
||||
async def test_totp_codes_endpoint(client):
|
||||
cl, token = client
|
||||
api._pm.entry_manager.list_entries = lambda **kw: [(0, "Email", None, None, False)]
|
||||
api._pm.entry_manager.get_totp_code = lambda i, s: "123456"
|
||||
api._pm.entry_manager.get_totp_time_remaining = lambda i: 30
|
||||
api._pm.parent_seed = "seed"
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
res = cl.get("/api/v1/totp", headers=headers)
|
||||
api.app.state.pm.entry_manager.list_entries = lambda **kw: [
|
||||
(0, "Email", None, None, False)
|
||||
]
|
||||
api.app.state.pm.entry_manager.get_totp_code = lambda i, s: "123456"
|
||||
api.app.state.pm.entry_manager.get_totp_time_remaining = lambda i: 30
|
||||
api.app.state.pm.parent_seed = "seed"
|
||||
headers = {"Authorization": f"Bearer {token}", "X-SeedPass-Password": "pw"}
|
||||
res = await cl.get("/api/v1/totp", headers=headers)
|
||||
assert res.status_code == 200
|
||||
assert res.json() == {
|
||||
"codes": [
|
||||
@@ -155,49 +166,39 @@ def test_totp_codes_endpoint(client):
|
||||
}
|
||||
|
||||
|
||||
def test_parent_seed_endpoint(client, tmp_path):
|
||||
@pytest.mark.anyio
|
||||
async def test_parent_seed_endpoint_removed(client):
|
||||
cl, token = client
|
||||
api._pm.parent_seed = "seed"
|
||||
called = {}
|
||||
api._pm.encryption_manager = SimpleNamespace(
|
||||
encrypt_and_save_file=lambda data, path: called.setdefault("path", path)
|
||||
res = await cl.get(
|
||||
"/api/v1/parent-seed", headers={"Authorization": f"Bearer {token}"}
|
||||
)
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
|
||||
res = cl.get("/api/v1/parent-seed", headers=headers)
|
||||
assert res.status_code == 200
|
||||
assert res.json() == {"seed": "seed"}
|
||||
|
||||
out = tmp_path / "bk.enc"
|
||||
res = cl.get("/api/v1/parent-seed", params={"file": str(out)}, headers=headers)
|
||||
assert res.status_code == 200
|
||||
assert res.json() == {"status": "saved", "path": str(out)}
|
||||
assert called["path"] == out
|
||||
assert res.status_code == 404
|
||||
|
||||
|
||||
def test_fingerprint_endpoints(client):
|
||||
@pytest.mark.anyio
|
||||
async def test_fingerprint_endpoints(client):
|
||||
cl, token = client
|
||||
calls = {}
|
||||
|
||||
api._pm.add_new_fingerprint = lambda: calls.setdefault("add", True)
|
||||
api._pm.fingerprint_manager.remove_fingerprint = lambda fp: calls.setdefault(
|
||||
"remove", fp
|
||||
api.app.state.pm.add_new_fingerprint = lambda: calls.setdefault("add", True)
|
||||
api.app.state.pm.fingerprint_manager.remove_fingerprint = (
|
||||
lambda fp: calls.setdefault("remove", fp)
|
||||
)
|
||||
api._pm.select_fingerprint = lambda fp: calls.setdefault("select", fp)
|
||||
api.app.state.pm.select_fingerprint = lambda fp: calls.setdefault("select", fp)
|
||||
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
|
||||
res = cl.post("/api/v1/fingerprint", headers=headers)
|
||||
res = await cl.post("/api/v1/fingerprint", headers=headers)
|
||||
assert res.status_code == 200
|
||||
assert res.json() == {"status": "ok"}
|
||||
assert calls.get("add") is True
|
||||
|
||||
res = cl.delete("/api/v1/fingerprint/abc", headers=headers)
|
||||
res = await cl.delete("/api/v1/fingerprint/abc", headers=headers)
|
||||
assert res.status_code == 200
|
||||
assert res.json() == {"status": "deleted"}
|
||||
assert calls.get("remove") == "abc"
|
||||
|
||||
res = cl.post(
|
||||
res = await cl.post(
|
||||
"/api/v1/fingerprint/select",
|
||||
json={"fingerprint": "xyz"},
|
||||
headers=headers,
|
||||
@@ -207,40 +208,47 @@ def test_fingerprint_endpoints(client):
|
||||
assert calls.get("select") == "xyz"
|
||||
|
||||
|
||||
def test_checksum_endpoints(client):
|
||||
@pytest.mark.anyio
|
||||
async def test_checksum_endpoints(client):
|
||||
cl, token = client
|
||||
calls = {}
|
||||
|
||||
api._pm.handle_verify_checksum = lambda: calls.setdefault("verify", True)
|
||||
api._pm.handle_update_script_checksum = lambda: calls.setdefault("update", True)
|
||||
api.app.state.pm.handle_verify_checksum = lambda: calls.setdefault("verify", True)
|
||||
api.app.state.pm.handle_update_script_checksum = lambda: calls.setdefault(
|
||||
"update", True
|
||||
)
|
||||
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
|
||||
res = cl.post("/api/v1/checksum/verify", headers=headers)
|
||||
res = await cl.post("/api/v1/checksum/verify", headers=headers)
|
||||
assert res.status_code == 200
|
||||
assert res.json() == {"status": "ok"}
|
||||
assert calls.get("verify") is True
|
||||
|
||||
res = cl.post("/api/v1/checksum/update", headers=headers)
|
||||
res = await cl.post("/api/v1/checksum/update", headers=headers)
|
||||
assert res.status_code == 200
|
||||
assert res.json() == {"status": "ok"}
|
||||
assert calls.get("update") is True
|
||||
|
||||
|
||||
def test_vault_import_via_path(client, tmp_path):
|
||||
@pytest.mark.anyio
|
||||
async def test_vault_import_via_path(client, tmp_path):
|
||||
cl, token = client
|
||||
called = {}
|
||||
|
||||
def import_db(path):
|
||||
called["path"] = path
|
||||
|
||||
api._pm.handle_import_database = import_db
|
||||
api._pm.sync_vault = lambda: called.setdefault("sync", True)
|
||||
file_path = tmp_path / "b.json"
|
||||
api.app.state.pm.handle_import_database = import_db
|
||||
api.app.state.pm.sync_vault = lambda: called.setdefault("sync", True)
|
||||
api.app.state.pm.encryption_manager = SimpleNamespace(
|
||||
resolve_relative_path=lambda p: p
|
||||
)
|
||||
file_path = tmp_path / "b.json.enc"
|
||||
file_path.write_text("{}")
|
||||
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
res = cl.post(
|
||||
res = await cl.post(
|
||||
"/api/v1/vault/import",
|
||||
json={"path": str(file_path)},
|
||||
headers=headers,
|
||||
@@ -251,21 +259,22 @@ def test_vault_import_via_path(client, tmp_path):
|
||||
assert called.get("sync") is True
|
||||
|
||||
|
||||
def test_vault_import_via_upload(client, tmp_path):
|
||||
@pytest.mark.anyio
|
||||
async def test_vault_import_via_upload(client, tmp_path):
|
||||
cl, token = client
|
||||
called = {}
|
||||
|
||||
def import_db(path):
|
||||
called["path"] = path
|
||||
|
||||
api._pm.handle_import_database = import_db
|
||||
api._pm.sync_vault = lambda: called.setdefault("sync", True)
|
||||
api.app.state.pm.handle_import_database = import_db
|
||||
api.app.state.pm.sync_vault = lambda: called.setdefault("sync", True)
|
||||
file_path = tmp_path / "c.json"
|
||||
file_path.write_text("{}")
|
||||
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
with open(file_path, "rb") as fh:
|
||||
res = cl.post(
|
||||
res = await cl.post(
|
||||
"/api/v1/vault/import",
|
||||
files={"file": ("c.json", fh.read())},
|
||||
headers=headers,
|
||||
@@ -276,29 +285,68 @@ def test_vault_import_via_upload(client, tmp_path):
|
||||
assert called.get("sync") is True
|
||||
|
||||
|
||||
def test_vault_lock_endpoint(client):
|
||||
@pytest.mark.anyio
|
||||
async def test_vault_import_invalid_extension(client):
|
||||
cl, token = client
|
||||
api.app.state.pm.handle_import_database = lambda path: None
|
||||
api.app.state.pm.sync_vault = lambda: None
|
||||
api.app.state.pm.encryption_manager = SimpleNamespace(
|
||||
resolve_relative_path=lambda p: p
|
||||
)
|
||||
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
res = await cl.post(
|
||||
"/api/v1/vault/import",
|
||||
json={"path": "bad.txt"},
|
||||
headers=headers,
|
||||
)
|
||||
assert res.status_code == 400
|
||||
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_vault_import_path_traversal_blocked(client, tmp_path):
|
||||
cl, token = client
|
||||
key = base64.urlsafe_b64encode(os.urandom(32))
|
||||
api.app.state.pm.encryption_manager = EncryptionManager(key, tmp_path)
|
||||
api.app.state.pm.handle_import_database = lambda path: None
|
||||
api.app.state.pm.sync_vault = lambda: None
|
||||
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
res = await cl.post(
|
||||
"/api/v1/vault/import",
|
||||
json={"path": "../evil.json.enc"},
|
||||
headers=headers,
|
||||
)
|
||||
assert res.status_code == 400
|
||||
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_vault_lock_endpoint(client):
|
||||
cl, token = client
|
||||
called = {}
|
||||
|
||||
def lock():
|
||||
called["locked"] = True
|
||||
api._pm.locked = True
|
||||
api.app.state.pm.locked = True
|
||||
|
||||
api._pm.lock_vault = lock
|
||||
api._pm.locked = False
|
||||
api.app.state.pm.lock_vault = lock
|
||||
api.app.state.pm.locked = False
|
||||
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
res = cl.post("/api/v1/vault/lock", headers=headers)
|
||||
res = await cl.post("/api/v1/vault/lock", headers=headers)
|
||||
assert res.status_code == 200
|
||||
assert res.json() == {"status": "locked"}
|
||||
assert called.get("locked") is True
|
||||
assert api._pm.locked is True
|
||||
api._pm.unlock_vault = lambda pw: setattr(api._pm, "locked", False)
|
||||
api._pm.unlock_vault("pw")
|
||||
assert api._pm.locked is False
|
||||
assert api.app.state.pm.locked is True
|
||||
api.app.state.pm.unlock_vault = lambda pw: setattr(
|
||||
api.app.state.pm, "locked", False
|
||||
)
|
||||
api.app.state.pm.unlock_vault("pw")
|
||||
assert api.app.state.pm.locked is False
|
||||
|
||||
|
||||
def test_secret_mode_endpoint(client):
|
||||
@pytest.mark.anyio
|
||||
async def test_secret_mode_endpoint(client):
|
||||
cl, token = client
|
||||
called = {}
|
||||
|
||||
@@ -308,11 +356,11 @@ def test_secret_mode_endpoint(client):
|
||||
def set_delay(val):
|
||||
called.setdefault("delay", val)
|
||||
|
||||
api._pm.config_manager.set_secret_mode_enabled = set_secret
|
||||
api._pm.config_manager.set_clipboard_clear_delay = set_delay
|
||||
api.app.state.pm.config_manager.set_secret_mode_enabled = set_secret
|
||||
api.app.state.pm.config_manager.set_clipboard_clear_delay = set_delay
|
||||
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
res = cl.post(
|
||||
res = await cl.post(
|
||||
"/api/v1/secret-mode",
|
||||
json={"enabled": True, "delay": 12},
|
||||
headers=headers,
|
||||
@@ -323,40 +371,79 @@ def test_secret_mode_endpoint(client):
|
||||
assert called["delay"] == 12
|
||||
|
||||
|
||||
def test_vault_export_endpoint(client, tmp_path):
|
||||
@pytest.mark.anyio
|
||||
async def test_vault_export_endpoint(client, tmp_path):
|
||||
cl, token = client
|
||||
out = tmp_path / "out.json"
|
||||
out.write_text("data")
|
||||
|
||||
api._pm.handle_export_database = lambda: out
|
||||
api.app.state.pm.handle_export_database = lambda: out
|
||||
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
res = cl.post("/api/v1/vault/export", headers=headers)
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"X-SeedPass-Password": "pw",
|
||||
}
|
||||
res = await cl.post("/api/v1/vault/export", headers=headers)
|
||||
assert res.status_code == 200
|
||||
assert res.content == b"data"
|
||||
|
||||
res = await cl.post(
|
||||
"/api/v1/vault/export", headers={"Authorization": f"Bearer {token}"}
|
||||
)
|
||||
assert res.status_code == 401
|
||||
|
||||
def test_backup_parent_seed_endpoint(client, tmp_path):
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_backup_parent_seed_endpoint(client, tmp_path):
|
||||
cl, token = client
|
||||
api.app.state.pm.parent_seed = "seed"
|
||||
called = {}
|
||||
api.app.state.pm.encryption_manager = SimpleNamespace(
|
||||
encrypt_and_save_file=lambda data, path: called.setdefault("path", path),
|
||||
resolve_relative_path=lambda p: p,
|
||||
)
|
||||
path = Path("seed.enc")
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"X-SeedPass-Password": "pw",
|
||||
}
|
||||
res = await cl.post(
|
||||
"/api/v1/vault/backup-parent-seed",
|
||||
json={"path": str(path), "confirm": True},
|
||||
headers=headers,
|
||||
)
|
||||
assert res.status_code == 200
|
||||
assert res.json() == {"status": "saved", "path": str(path)}
|
||||
assert called["path"] == path
|
||||
|
||||
def backup(path=None):
|
||||
called["path"] = path
|
||||
|
||||
api._pm.handle_backup_reveal_parent_seed = backup
|
||||
path = tmp_path / "seed.enc"
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
res = cl.post(
|
||||
res = await cl.post(
|
||||
"/api/v1/vault/backup-parent-seed",
|
||||
json={"path": str(path)},
|
||||
headers=headers,
|
||||
)
|
||||
assert res.status_code == 200
|
||||
assert res.json() == {"status": "ok"}
|
||||
assert called["path"] == path
|
||||
assert res.status_code == 400
|
||||
|
||||
|
||||
def test_relay_management_endpoints(client, dummy_nostr_client, monkeypatch):
|
||||
@pytest.mark.anyio
|
||||
async def test_backup_parent_seed_path_traversal_blocked(client, tmp_path):
|
||||
cl, token = client
|
||||
api.app.state.pm.parent_seed = "seed"
|
||||
key = base64.urlsafe_b64encode(os.urandom(32))
|
||||
api.app.state.pm.encryption_manager = EncryptionManager(key, tmp_path)
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"X-SeedPass-Password": "pw",
|
||||
}
|
||||
res = await cl.post(
|
||||
"/api/v1/vault/backup-parent-seed",
|
||||
json={"path": "../evil.enc", "confirm": True},
|
||||
headers=headers,
|
||||
)
|
||||
assert res.status_code == 400
|
||||
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_relay_management_endpoints(client, dummy_nostr_client, monkeypatch):
|
||||
cl, token = client
|
||||
nostr_client, _ = dummy_nostr_client
|
||||
relays = ["wss://a", "wss://b"]
|
||||
@@ -369,8 +456,8 @@ def test_relay_management_endpoints(client, dummy_nostr_client, monkeypatch):
|
||||
def set_relays(new, require_pin=False):
|
||||
called["set"] = new
|
||||
|
||||
api._pm.config_manager.load_config = load_config
|
||||
api._pm.config_manager.set_relays = set_relays
|
||||
api.app.state.pm.config_manager.load_config = load_config
|
||||
api.app.state.pm.config_manager.set_relays = set_relays
|
||||
monkeypatch.setattr(
|
||||
NostrClient,
|
||||
"initialize_client_pool",
|
||||
@@ -379,33 +466,34 @@ def test_relay_management_endpoints(client, dummy_nostr_client, monkeypatch):
|
||||
monkeypatch.setattr(
|
||||
nostr_client, "close_client_pool", lambda: called.setdefault("close", True)
|
||||
)
|
||||
api._pm.nostr_client = nostr_client
|
||||
api._pm.nostr_client.relays = relays.copy()
|
||||
api.app.state.pm.nostr_client = nostr_client
|
||||
api.app.state.pm.nostr_client.relays = relays.copy()
|
||||
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
|
||||
res = cl.get("/api/v1/relays", headers=headers)
|
||||
res = await cl.get("/api/v1/relays", headers=headers)
|
||||
assert res.status_code == 200
|
||||
assert res.json() == {"relays": relays}
|
||||
|
||||
res = cl.post("/api/v1/relays", json={"url": "wss://c"}, headers=headers)
|
||||
res = await cl.post("/api/v1/relays", json={"url": "wss://c"}, headers=headers)
|
||||
assert res.status_code == 200
|
||||
assert called["set"] == ["wss://a", "wss://b", "wss://c"]
|
||||
|
||||
api._pm.config_manager.load_config = lambda require_pin=False: {
|
||||
api.app.state.pm.config_manager.load_config = lambda require_pin=False: {
|
||||
"relays": ["wss://a", "wss://b", "wss://c"]
|
||||
}
|
||||
res = cl.delete("/api/v1/relays/2", headers=headers)
|
||||
res = await cl.delete("/api/v1/relays/2", headers=headers)
|
||||
assert res.status_code == 200
|
||||
assert called["set"] == ["wss://a", "wss://c"]
|
||||
|
||||
res = cl.post("/api/v1/relays/reset", headers=headers)
|
||||
res = await cl.post("/api/v1/relays/reset", headers=headers)
|
||||
assert res.status_code == 200
|
||||
assert called.get("init") is True
|
||||
assert api._pm.nostr_client.relays == list(DEFAULT_RELAYS)
|
||||
assert api.app.state.pm.nostr_client.relays == list(DEFAULT_RELAYS)
|
||||
|
||||
|
||||
def test_generate_password_no_special_chars(client):
|
||||
@pytest.mark.anyio
|
||||
async def test_generate_password_no_special_chars(client):
|
||||
cl, token = client
|
||||
|
||||
class DummyEnc:
|
||||
@@ -413,14 +501,18 @@ def test_generate_password_no_special_chars(client):
|
||||
return b"\x00" * 32
|
||||
|
||||
class DummyBIP85:
|
||||
def derive_entropy(self, index: int, bytes_len: int, app_no: int = 32) -> bytes:
|
||||
return bytes(range(bytes_len))
|
||||
def derive_entropy(
|
||||
self, index: int, entropy_bytes: int, app_no: int = 32
|
||||
) -> bytes:
|
||||
return bytes(range(entropy_bytes))
|
||||
|
||||
api._pm.password_generator = PasswordGenerator(DummyEnc(), "seed", DummyBIP85())
|
||||
api._pm.parent_seed = "seed"
|
||||
api.app.state.pm.password_generator = PasswordGenerator(
|
||||
DummyEnc(), "seed", DummyBIP85()
|
||||
)
|
||||
api.app.state.pm.parent_seed = "seed"
|
||||
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
res = cl.post(
|
||||
res = await cl.post(
|
||||
"/api/v1/password",
|
||||
json={"length": 16, "include_special_chars": False},
|
||||
headers=headers,
|
||||
@@ -430,7 +522,8 @@ def test_generate_password_no_special_chars(client):
|
||||
assert not any(c in string.punctuation for c in pw)
|
||||
|
||||
|
||||
def test_generate_password_allowed_chars(client):
|
||||
@pytest.mark.anyio
|
||||
async def test_generate_password_allowed_chars(client):
|
||||
cl, token = client
|
||||
|
||||
class DummyEnc:
|
||||
@@ -438,15 +531,19 @@ def test_generate_password_allowed_chars(client):
|
||||
return b"\x00" * 32
|
||||
|
||||
class DummyBIP85:
|
||||
def derive_entropy(self, index: int, bytes_len: int, app_no: int = 32) -> bytes:
|
||||
return bytes((index + i) % 256 for i in range(bytes_len))
|
||||
def derive_entropy(
|
||||
self, index: int, entropy_bytes: int, app_no: int = 32
|
||||
) -> bytes:
|
||||
return bytes((index + i) % 256 for i in range(entropy_bytes))
|
||||
|
||||
api._pm.password_generator = PasswordGenerator(DummyEnc(), "seed", DummyBIP85())
|
||||
api._pm.parent_seed = "seed"
|
||||
api.app.state.pm.password_generator = PasswordGenerator(
|
||||
DummyEnc(), "seed", DummyBIP85()
|
||||
)
|
||||
api.app.state.pm.parent_seed = "seed"
|
||||
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
allowed = "@$"
|
||||
res = cl.post(
|
||||
res = await cl.post(
|
||||
"/api/v1/password",
|
||||
json={"length": 16, "allowed_special_chars": allowed},
|
||||
headers=headers,
|
||||
|
@@ -1,45 +1,59 @@
|
||||
from test_api import client
|
||||
from types import SimpleNamespace
|
||||
import queue
|
||||
import pytest
|
||||
import seedpass.api as api
|
||||
|
||||
|
||||
def test_notifications_endpoint(client):
|
||||
@pytest.mark.anyio
|
||||
async def test_notifications_endpoint(client):
|
||||
cl, token = client
|
||||
api._pm.notifications = queue.Queue()
|
||||
api._pm.notifications.put(SimpleNamespace(message="m1", level="INFO"))
|
||||
api._pm.notifications.put(SimpleNamespace(message="m2", level="WARNING"))
|
||||
res = cl.get("/api/v1/notifications", headers={"Authorization": f"Bearer {token}"})
|
||||
api.app.state.pm.notifications = queue.Queue()
|
||||
api.app.state.pm.notifications.put(SimpleNamespace(message="m1", level="INFO"))
|
||||
api.app.state.pm.notifications.put(SimpleNamespace(message="m2", level="WARNING"))
|
||||
res = await cl.get(
|
||||
"/api/v1/notifications", headers={"Authorization": f"Bearer {token}"}
|
||||
)
|
||||
assert res.status_code == 200
|
||||
assert res.json() == [
|
||||
{"level": "INFO", "message": "m1"},
|
||||
{"level": "WARNING", "message": "m2"},
|
||||
]
|
||||
assert api._pm.notifications.empty()
|
||||
assert api.app.state.pm.notifications.empty()
|
||||
|
||||
|
||||
def test_notifications_endpoint_clears_queue(client):
|
||||
@pytest.mark.anyio
|
||||
async def test_notifications_endpoint_clears_queue(client):
|
||||
cl, token = client
|
||||
api._pm.notifications = queue.Queue()
|
||||
api._pm.notifications.put(SimpleNamespace(message="hi", level="INFO"))
|
||||
res = cl.get("/api/v1/notifications", headers={"Authorization": f"Bearer {token}"})
|
||||
api.app.state.pm.notifications = queue.Queue()
|
||||
api.app.state.pm.notifications.put(SimpleNamespace(message="hi", level="INFO"))
|
||||
res = await cl.get(
|
||||
"/api/v1/notifications", headers={"Authorization": f"Bearer {token}"}
|
||||
)
|
||||
assert res.status_code == 200
|
||||
assert res.json() == [{"level": "INFO", "message": "hi"}]
|
||||
assert api._pm.notifications.empty()
|
||||
res = cl.get("/api/v1/notifications", headers={"Authorization": f"Bearer {token}"})
|
||||
assert api.app.state.pm.notifications.empty()
|
||||
res = await cl.get(
|
||||
"/api/v1/notifications", headers={"Authorization": f"Bearer {token}"}
|
||||
)
|
||||
assert res.json() == []
|
||||
|
||||
|
||||
def test_notifications_endpoint_does_not_clear_current(client):
|
||||
@pytest.mark.anyio
|
||||
async def test_notifications_endpoint_does_not_clear_current(client):
|
||||
cl, token = client
|
||||
api._pm.notifications = queue.Queue()
|
||||
api.app.state.pm.notifications = queue.Queue()
|
||||
msg = SimpleNamespace(message="keep", level="INFO")
|
||||
api._pm.notifications.put(msg)
|
||||
api._pm._current_notification = msg
|
||||
api._pm.get_current_notification = lambda: api._pm._current_notification
|
||||
api.app.state.pm.notifications.put(msg)
|
||||
api.app.state.pm._current_notification = msg
|
||||
api.app.state.pm.get_current_notification = (
|
||||
lambda: api.app.state.pm._current_notification
|
||||
)
|
||||
|
||||
res = cl.get("/api/v1/notifications", headers={"Authorization": f"Bearer {token}"})
|
||||
res = await cl.get(
|
||||
"/api/v1/notifications", headers={"Authorization": f"Bearer {token}"}
|
||||
)
|
||||
assert res.status_code == 200
|
||||
assert res.json() == [{"level": "INFO", "message": "keep"}]
|
||||
assert api._pm.notifications.empty()
|
||||
assert api._pm.get_current_notification() is msg
|
||||
assert api.app.state.pm.notifications.empty()
|
||||
assert api.app.state.pm.get_current_notification() is msg
|
||||
|
@@ -1,13 +1,14 @@
|
||||
from test_api import client
|
||||
import pytest
|
||||
|
||||
|
||||
def test_profile_stats_endpoint(client):
|
||||
@pytest.mark.anyio
|
||||
async def test_profile_stats_endpoint(client):
|
||||
cl, token = client
|
||||
stats = {"total_entries": 1}
|
||||
# monkeypatch set _pm.get_profile_stats after client fixture started
|
||||
import seedpass.api as api
|
||||
|
||||
api._pm.get_profile_stats = lambda: stats
|
||||
res = cl.get("/api/v1/stats", headers={"Authorization": f"Bearer {token}"})
|
||||
api.app.state.pm.get_profile_stats = lambda: stats
|
||||
res = await cl.get("/api/v1/stats", headers={"Authorization": f"Bearer {token}"})
|
||||
assert res.status_code == 200
|
||||
assert res.json() == stats
|
||||
|
47
src/tests/test_api_rate_limit.py
Normal file
47
src/tests/test_api_rate_limit.py
Normal file
@@ -0,0 +1,47 @@
|
||||
import importlib
|
||||
from pathlib import Path
|
||||
from types import SimpleNamespace
|
||||
import importlib
|
||||
import pytest
|
||||
from httpx import ASGITransport, AsyncClient
|
||||
|
||||
import sys
|
||||
|
||||
sys.path.append(str(Path(__file__).resolve().parents[1]))
|
||||
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_rate_limit_exceeded(monkeypatch):
|
||||
monkeypatch.setenv("SEEDPASS_RATE_LIMIT", "2")
|
||||
monkeypatch.setenv("SEEDPASS_RATE_WINDOW", "60")
|
||||
import seedpass.api as api
|
||||
|
||||
importlib.reload(api)
|
||||
|
||||
dummy = SimpleNamespace(
|
||||
entry_manager=SimpleNamespace(
|
||||
search_entries=lambda q: [
|
||||
(1, "Site", "user", "url", False, SimpleNamespace(value="password"))
|
||||
]
|
||||
),
|
||||
config_manager=SimpleNamespace(load_config=lambda require_pin=False: {}),
|
||||
fingerprint_manager=SimpleNamespace(list_fingerprints=lambda: []),
|
||||
nostr_client=SimpleNamespace(
|
||||
key_manager=SimpleNamespace(get_npub=lambda: "np")
|
||||
),
|
||||
verify_password=lambda pw: True,
|
||||
)
|
||||
monkeypatch.setattr(api, "PasswordManager", lambda: dummy)
|
||||
token = api.start_server()
|
||||
transport = ASGITransport(app=api.app)
|
||||
async with AsyncClient(transport=transport, base_url="http://test") as client:
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
|
||||
for _ in range(2):
|
||||
res = await client.get(
|
||||
"/api/v1/entry", params={"query": "s"}, headers=headers
|
||||
)
|
||||
assert res.status_code == 200
|
||||
|
||||
res = await client.get("/api/v1/entry", params={"query": "s"}, headers=headers)
|
||||
assert res.status_code == 429
|
29
src/tests/test_api_reload_relays.py
Normal file
29
src/tests/test_api_reload_relays.py
Normal file
@@ -0,0 +1,29 @@
|
||||
import logging
|
||||
from types import SimpleNamespace
|
||||
|
||||
from seedpass import api
|
||||
|
||||
|
||||
def test_reload_relays_logs_errors(caplog):
|
||||
def close():
|
||||
raise RuntimeError("close fail")
|
||||
|
||||
def init():
|
||||
raise OSError("init fail")
|
||||
|
||||
pm = SimpleNamespace(
|
||||
nostr_client=SimpleNamespace(
|
||||
close_client_pool=close,
|
||||
initialize_client_pool=init,
|
||||
relays=[],
|
||||
)
|
||||
)
|
||||
request = SimpleNamespace(app=SimpleNamespace(state=SimpleNamespace(pm=pm)))
|
||||
|
||||
with caplog.at_level(logging.WARNING):
|
||||
api._reload_relays(request, ["ws://relay"])
|
||||
|
||||
assert "Failed to close NostrClient pool" in caplog.text
|
||||
assert "close fail" in caplog.text
|
||||
assert "Failed to initialize NostrClient with relays" in caplog.text
|
||||
assert "init fail" in caplog.text
|
30
src/tests/test_atomic_write.py
Normal file
30
src/tests/test_atomic_write.py
Normal file
@@ -0,0 +1,30 @@
|
||||
import json
|
||||
from multiprocessing import Process
|
||||
from pathlib import Path
|
||||
|
||||
from utils.atomic_write import atomic_write
|
||||
|
||||
|
||||
def _writer(path: Path, content: dict, loops: int) -> None:
|
||||
for _ in range(loops):
|
||||
atomic_write(path, lambda f: json.dump(content, f), mode="w")
|
||||
|
||||
|
||||
def test_atomic_write_concurrent(tmp_path: Path) -> None:
|
||||
"""Concurrent writers should not leave partial files."""
|
||||
|
||||
file_path = tmp_path / "data.json"
|
||||
contents = [{"proc": i} for i in range(5)]
|
||||
|
||||
procs = [
|
||||
Process(target=_writer, args=(file_path, content, 50)) for content in contents
|
||||
]
|
||||
|
||||
for p in procs:
|
||||
p.start()
|
||||
for p in procs:
|
||||
p.join()
|
||||
|
||||
final_text = file_path.read_text()
|
||||
final_obj = json.loads(final_text)
|
||||
assert final_obj in contents
|
84
src/tests/test_audit_logger.py
Normal file
84
src/tests/test_audit_logger.py
Normal file
@@ -0,0 +1,84 @@
|
||||
import json
|
||||
import hashlib
|
||||
import hmac
|
||||
import queue
|
||||
from pathlib import Path
|
||||
from types import SimpleNamespace
|
||||
|
||||
import importlib
|
||||
import pytest
|
||||
|
||||
from seedpass.core.manager import PasswordManager, AuditLogger
|
||||
import seedpass.core.manager as manager_module
|
||||
|
||||
|
||||
def test_audit_logger_records_events(monkeypatch, tmp_path):
|
||||
monkeypatch.setattr(Path, "home", lambda: tmp_path)
|
||||
|
||||
pm = PasswordManager.__new__(PasswordManager)
|
||||
pm.fingerprint_dir = tmp_path
|
||||
pm.current_fingerprint = "user123"
|
||||
pm.profile_stack = []
|
||||
pm.setup_encryption_manager = lambda *a, **k: None
|
||||
pm.initialize_bip85 = lambda: None
|
||||
pm.initialize_managers = lambda: None
|
||||
pm.update_activity = lambda: None
|
||||
pm.verify_password = lambda pw: True
|
||||
pm.notifications = queue.Queue()
|
||||
pm.parent_seed = "seed phrase"
|
||||
pm.config_manager = SimpleNamespace(get_quick_unlock=lambda: True)
|
||||
|
||||
manager_module.clear_header_with_notification = lambda *a, **k: None
|
||||
|
||||
pm.unlock_vault(password="pw")
|
||||
|
||||
dest = tmp_path / "db.json.enc"
|
||||
monkeypatch.setattr(manager_module, "export_backup", lambda *a, **k: dest)
|
||||
pm.vault = object()
|
||||
pm.backup_manager = object()
|
||||
pm.handle_export_database(dest)
|
||||
|
||||
confirms = iter([True, False])
|
||||
monkeypatch.setattr(
|
||||
"seedpass.core.manager.confirm_action", lambda *_a, **_k: next(confirms)
|
||||
)
|
||||
pm.encryption_manager = SimpleNamespace(encrypt_and_save_file=lambda *a, **k: None)
|
||||
pm.handle_backup_reveal_parent_seed(password="pw")
|
||||
|
||||
log_path = tmp_path / ".seedpass" / "audit.log"
|
||||
lines = [json.loads(l) for l in log_path.read_text().splitlines()]
|
||||
events = [e["event"] for e in lines]
|
||||
assert "quick_unlock" in events
|
||||
assert "backup_export" in events
|
||||
assert "seed_reveal" in events
|
||||
|
||||
|
||||
def _verify_chain(path: Path, key: bytes) -> bool:
|
||||
prev = "0" * 64
|
||||
for line in path.read_text().splitlines():
|
||||
data = json.loads(line)
|
||||
sig = data.pop("sig")
|
||||
payload = json.dumps(data, sort_keys=True, separators=(",", ":"))
|
||||
expected = hmac.new(
|
||||
key, f"{prev}{payload}".encode(), hashlib.sha256
|
||||
).hexdigest()
|
||||
if sig != expected:
|
||||
return False
|
||||
prev = sig
|
||||
return True
|
||||
|
||||
|
||||
def test_audit_log_tamper_evident(monkeypatch, tmp_path):
|
||||
monkeypatch.setattr(Path, "home", lambda: tmp_path)
|
||||
key = hashlib.sha256(b"seed").digest()
|
||||
logger = AuditLogger(key)
|
||||
logger.log("one", {})
|
||||
logger.log("two", {})
|
||||
log_path = tmp_path / ".seedpass" / "audit.log"
|
||||
assert _verify_chain(log_path, key)
|
||||
lines = log_path.read_text().splitlines()
|
||||
tampered = json.loads(lines[0])
|
||||
tampered["event"] = "evil"
|
||||
lines[0] = json.dumps(tampered)
|
||||
log_path.write_text("\n".join(lines) + "\n")
|
||||
assert not _verify_chain(log_path, key)
|
@@ -15,6 +15,7 @@ def test_auto_sync_triggers_post(monkeypatch):
|
||||
is_dirty=True,
|
||||
last_update=time.time() - 0.2,
|
||||
last_activity=time.time(),
|
||||
current_fingerprint="fp",
|
||||
nostr_client=SimpleNamespace(close_client_pool=lambda: None),
|
||||
handle_add_password=lambda: None,
|
||||
handle_retrieve_entry=lambda: None,
|
||||
|
56
src/tests/test_background_error_reporting.py
Normal file
56
src/tests/test_background_error_reporting.py
Normal file
@@ -0,0 +1,56 @@
|
||||
import logging
|
||||
import queue
|
||||
|
||||
import seedpass.core.manager as manager_module
|
||||
|
||||
|
||||
def _make_pm():
|
||||
pm = manager_module.PasswordManager.__new__(manager_module.PasswordManager)
|
||||
pm.offline_mode = False
|
||||
pm.notifications = queue.Queue()
|
||||
pm.error_queue = queue.Queue()
|
||||
pm.notify = lambda msg, level="INFO": pm.notifications.put(
|
||||
manager_module.Notification(msg, level)
|
||||
)
|
||||
pm.nostr_client = object()
|
||||
return pm
|
||||
|
||||
|
||||
def test_start_background_sync_error(monkeypatch, caplog):
|
||||
pm = _make_pm()
|
||||
|
||||
async def failing_sync(*_args, **_kwargs):
|
||||
raise RuntimeError("boom")
|
||||
|
||||
monkeypatch.setattr(pm, "attempt_initial_sync_async", failing_sync)
|
||||
monkeypatch.setattr(pm, "sync_index_from_nostr_async", failing_sync)
|
||||
|
||||
pm.start_background_sync()
|
||||
pm._sync_task.join(timeout=1)
|
||||
|
||||
with caplog.at_level(logging.WARNING):
|
||||
pm.poll_background_errors()
|
||||
|
||||
note = pm.notifications.get_nowait()
|
||||
assert "boom" in note.message
|
||||
assert "boom" in caplog.text
|
||||
|
||||
|
||||
def test_start_background_relay_check_error(monkeypatch, caplog):
|
||||
pm = _make_pm()
|
||||
|
||||
class DummyClient:
|
||||
def check_relay_health(self, *_args, **_kwargs):
|
||||
raise RuntimeError("relay boom")
|
||||
|
||||
pm.nostr_client = DummyClient()
|
||||
|
||||
pm.start_background_relay_check()
|
||||
pm._relay_thread.join(timeout=1)
|
||||
|
||||
with caplog.at_level(logging.WARNING):
|
||||
pm.poll_background_errors()
|
||||
|
||||
note = pm.notifications.get_nowait()
|
||||
assert "relay boom" in note.message
|
||||
assert "relay boom" in caplog.text
|
56
src/tests/test_backup_restore_startup.py
Normal file
56
src/tests/test_backup_restore_startup.py
Normal file
@@ -0,0 +1,56 @@
|
||||
import main
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def test_cli_flag_restores_before_init(monkeypatch, tmp_path):
|
||||
calls = []
|
||||
backup = tmp_path / "bak.json"
|
||||
backup.write_text("{}")
|
||||
|
||||
def fake_restore(path, fingerprint):
|
||||
calls.append(("restore", Path(path), fingerprint))
|
||||
|
||||
class DummyPM:
|
||||
def __init__(self, fingerprint=None):
|
||||
calls.append(("init", fingerprint))
|
||||
self.secret_mode_enabled = True
|
||||
self.inactivity_timeout = 0
|
||||
|
||||
monkeypatch.setattr(main, "restore_backup_index", fake_restore)
|
||||
monkeypatch.setattr(main, "PasswordManager", DummyPM)
|
||||
monkeypatch.setattr(main, "display_menu", lambda pm, **k: None)
|
||||
|
||||
rc = main.main(["--fingerprint", "fp", "--restore-backup", str(backup)])
|
||||
assert rc == 0
|
||||
assert calls[0][0] == "restore"
|
||||
assert calls[1][0] == "init"
|
||||
assert calls[0][1] == backup
|
||||
assert calls[0][2] == "fp"
|
||||
|
||||
|
||||
def test_menu_option_restores_before_init(monkeypatch, tmp_path):
|
||||
calls = []
|
||||
backup = tmp_path / "bak.json"
|
||||
backup.write_text("{}")
|
||||
|
||||
def fake_restore(path, fingerprint):
|
||||
calls.append(("restore", Path(path), fingerprint))
|
||||
|
||||
class DummyPM:
|
||||
def __init__(self, fingerprint=None):
|
||||
calls.append(("init", fingerprint))
|
||||
self.secret_mode_enabled = True
|
||||
self.inactivity_timeout = 0
|
||||
|
||||
monkeypatch.setattr(main, "restore_backup_index", fake_restore)
|
||||
monkeypatch.setattr(main, "PasswordManager", DummyPM)
|
||||
monkeypatch.setattr(main, "display_menu", lambda pm, **k: None)
|
||||
inputs = iter(["2", str(backup)])
|
||||
monkeypatch.setattr("builtins.input", lambda _prompt="": next(inputs))
|
||||
|
||||
rc = main.main(["--fingerprint", "fp"])
|
||||
assert rc == 0
|
||||
assert calls[0][0] == "restore"
|
||||
assert calls[1][0] == "init"
|
||||
assert calls[0][1] == backup
|
||||
assert calls[0][2] == "fp"
|
52
src/tests/test_bip85_derivation_path.py
Normal file
52
src/tests/test_bip85_derivation_path.py
Normal file
@@ -0,0 +1,52 @@
|
||||
from local_bip85.bip85 import BIP85
|
||||
|
||||
|
||||
class DummyChild:
|
||||
def PrivateKey(self):
|
||||
return self
|
||||
|
||||
def Raw(self):
|
||||
return self
|
||||
|
||||
def ToBytes(self):
|
||||
return b"\x00" * 32
|
||||
|
||||
|
||||
class DummyCtx:
|
||||
def __init__(self):
|
||||
self.last_path = None
|
||||
|
||||
def DerivePath(self, path: str):
|
||||
self.last_path = path
|
||||
return DummyChild()
|
||||
|
||||
|
||||
def test_derivation_paths_for_entropy_lengths():
|
||||
bip85 = BIP85(b"\x00" * 64)
|
||||
ctx = DummyCtx()
|
||||
bip85.bip32_ctx = ctx
|
||||
|
||||
vectors = [
|
||||
(16, 12),
|
||||
(24, 18),
|
||||
(32, 24),
|
||||
]
|
||||
|
||||
for entropy_bytes, word_count in vectors:
|
||||
bip85.derive_entropy(
|
||||
index=0,
|
||||
entropy_bytes=entropy_bytes,
|
||||
app_no=39,
|
||||
word_count=word_count,
|
||||
)
|
||||
assert ctx.last_path == f"m/83696968'/39'/0'/{word_count}'/0'"
|
||||
|
||||
|
||||
def test_default_word_count_from_entropy_bytes():
|
||||
bip85 = BIP85(b"\x00" * 64)
|
||||
ctx = DummyCtx()
|
||||
bip85.bip32_ctx = ctx
|
||||
|
||||
bip85.derive_entropy(index=5, entropy_bytes=20, app_no=39)
|
||||
|
||||
assert ctx.last_path == "m/83696968'/39'/0'/20'/5'"
|
21
src/tests/test_bip85_init.py
Normal file
21
src/tests/test_bip85_init.py
Normal file
@@ -0,0 +1,21 @@
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
sys.path.append(str(Path(__file__).resolve().parents[1]))
|
||||
|
||||
from bip_utils import Bip39SeedGenerator
|
||||
from local_bip85.bip85 import BIP85
|
||||
from helpers import TEST_SEED
|
||||
|
||||
MASTER_XPRV = "xprv9s21ZrQH143K2LBWUUQRFXhucrQqBpKdRRxNVq2zBqsx8HVqFk2uYo8kmbaLLHRdqtQpUm98uKfu3vca1LqdGhUtyoFnCNkfmXRyPXLjbKb"
|
||||
|
||||
|
||||
def test_init_with_seed_bytes():
|
||||
seed_bytes = Bip39SeedGenerator(TEST_SEED).Generate()
|
||||
bip85 = BIP85(seed_bytes)
|
||||
assert isinstance(bip85, BIP85)
|
||||
|
||||
|
||||
def test_init_with_xprv():
|
||||
bip85 = BIP85(MASTER_XPRV)
|
||||
assert isinstance(bip85, BIP85)
|
44
src/tests/test_cli_clipboard_flag.py
Normal file
44
src/tests/test_cli_clipboard_flag.py
Normal file
@@ -0,0 +1,44 @@
|
||||
from typer.testing import CliRunner
|
||||
|
||||
from seedpass.cli import app, entry as cli_entry
|
||||
from seedpass.core.entry_types import EntryType
|
||||
from utils.clipboard import ClipboardUnavailableError
|
||||
|
||||
|
||||
runner = CliRunner()
|
||||
|
||||
|
||||
def _stub_service(ctx, raise_error=True):
|
||||
class Service:
|
||||
def search_entries(self, query, kinds=None):
|
||||
return [(1, "label", None, None, False, EntryType.PASSWORD)]
|
||||
|
||||
def retrieve_entry(self, idx):
|
||||
return {"type": EntryType.PASSWORD.value, "length": 12}
|
||||
|
||||
def generate_password(self, length, index):
|
||||
if raise_error and not ctx.obj.get("no_clipboard"):
|
||||
raise ClipboardUnavailableError("missing")
|
||||
return "pwd"
|
||||
|
||||
return Service()
|
||||
|
||||
|
||||
def test_entry_get_handles_missing_clipboard(monkeypatch):
|
||||
monkeypatch.setattr(
|
||||
cli_entry, "_get_entry_service", lambda ctx: _stub_service(ctx, True)
|
||||
)
|
||||
result = runner.invoke(app, ["entry", "get", "label"], catch_exceptions=False)
|
||||
assert result.exit_code == 1
|
||||
assert "no-clipboard" in result.stderr.lower()
|
||||
|
||||
|
||||
def test_entry_get_no_clipboard_flag(monkeypatch):
|
||||
monkeypatch.setattr(
|
||||
cli_entry, "_get_entry_service", lambda ctx: _stub_service(ctx, True)
|
||||
)
|
||||
result = runner.invoke(
|
||||
app, ["--no-clipboard", "entry", "get", "label"], catch_exceptions=False
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
assert "pwd" in result.stdout
|
@@ -3,7 +3,7 @@ from types import SimpleNamespace
|
||||
from typer.testing import CliRunner
|
||||
|
||||
from seedpass.cli import app
|
||||
from seedpass import cli
|
||||
from seedpass.cli import common as cli_common
|
||||
|
||||
runner = CliRunner()
|
||||
|
||||
@@ -39,7 +39,7 @@ def test_config_set_variants(monkeypatch, key, value, method, expected):
|
||||
config_manager=SimpleNamespace(**{method: func}),
|
||||
select_fingerprint=lambda fp: None,
|
||||
)
|
||||
monkeypatch.setattr(cli, "PasswordManager", lambda: pm)
|
||||
monkeypatch.setattr(cli_common, "PasswordManager", lambda: pm)
|
||||
|
||||
result = runner.invoke(app, ["config", "set", key, value])
|
||||
|
||||
|
@@ -5,6 +5,7 @@ from typer.testing import CliRunner
|
||||
|
||||
from seedpass import cli
|
||||
from seedpass.cli import app
|
||||
from seedpass.cli import common as cli_common
|
||||
from seedpass.core.entry_types import EntryType
|
||||
|
||||
runner = CliRunner()
|
||||
@@ -18,7 +19,7 @@ def test_cli_vault_unlock(monkeypatch):
|
||||
return 0.5
|
||||
|
||||
pm = SimpleNamespace(unlock_vault=unlock_vault, select_fingerprint=lambda fp: None)
|
||||
monkeypatch.setattr(cli, "PasswordManager", lambda: pm)
|
||||
monkeypatch.setattr(cli_common, "PasswordManager", lambda: pm)
|
||||
monkeypatch.setattr(cli.typer, "prompt", lambda *a, **k: "pw")
|
||||
result = runner.invoke(app, ["vault", "unlock"])
|
||||
assert result.exit_code == 0
|
||||
@@ -49,7 +50,7 @@ def test_cli_entry_add_search_sync(monkeypatch):
|
||||
sync_vault=lambda: {"manifest_id": "m", "chunk_ids": [], "delta_ids": []},
|
||||
select_fingerprint=lambda fp: None,
|
||||
)
|
||||
monkeypatch.setattr(cli, "PasswordManager", lambda: pm)
|
||||
monkeypatch.setattr(cli_common, "PasswordManager", lambda: pm)
|
||||
|
||||
# entry add
|
||||
result = runner.invoke(app, ["entry", "add", "Label"])
|
||||
|
@@ -8,13 +8,15 @@ sys.path.append(str(Path(__file__).resolve().parents[1] / "src"))
|
||||
|
||||
from typer.testing import CliRunner
|
||||
from seedpass import cli
|
||||
from seedpass.cli import common as cli_common
|
||||
from seedpass.cli import api as cli_api
|
||||
from seedpass.core.entry_types import EntryType
|
||||
|
||||
|
||||
class DummyPM:
|
||||
def __init__(self):
|
||||
self.entry_manager = SimpleNamespace(
|
||||
list_entries=lambda sort_by="index", filter_kind=None, include_archived=False: [
|
||||
list_entries=lambda sort_by="index", filter_kinds=None, include_archived=False: [
|
||||
(1, "Label", "user", "url", False)
|
||||
],
|
||||
search_entries=lambda q, kinds=None: [
|
||||
@@ -97,9 +99,9 @@ runner = CliRunner()
|
||||
|
||||
|
||||
def _setup(monkeypatch):
|
||||
monkeypatch.setattr(cli, "PasswordManager", lambda: DummyPM())
|
||||
monkeypatch.setattr(cli.uvicorn, "run", lambda *a, **kw: None)
|
||||
monkeypatch.setattr(cli.api_module, "start_server", lambda fp: "token")
|
||||
monkeypatch.setattr(cli_common, "PasswordManager", lambda: DummyPM())
|
||||
monkeypatch.setattr(cli_api.uvicorn, "run", lambda *a, **kw: None)
|
||||
monkeypatch.setattr(cli_api.api_module, "start_server", lambda fp: "token")
|
||||
monkeypatch.setitem(
|
||||
sys.modules, "requests", SimpleNamespace(post=lambda *a, **kw: None)
|
||||
)
|
||||
|
@@ -3,7 +3,7 @@ from types import SimpleNamespace
|
||||
from typer.testing import CliRunner
|
||||
|
||||
from seedpass.cli import app
|
||||
from seedpass import cli
|
||||
from seedpass.cli import common as cli_common
|
||||
from helpers import TEST_SEED
|
||||
|
||||
runner = CliRunner()
|
||||
@@ -148,7 +148,7 @@ def test_entry_add_commands(
|
||||
select_fingerprint=lambda fp: None,
|
||||
start_background_vault_sync=start_background_vault_sync,
|
||||
)
|
||||
monkeypatch.setattr(cli, "PasswordManager", lambda: pm)
|
||||
monkeypatch.setattr(cli_common, "PasswordManager", lambda: pm)
|
||||
result = runner.invoke(app, ["entry", command] + cli_args)
|
||||
assert result.exit_code == 0
|
||||
assert stdout in result.stdout
|
||||
|
@@ -1,11 +1,9 @@
|
||||
import importlib
|
||||
import shutil
|
||||
from contextlib import redirect_stdout
|
||||
from io import StringIO
|
||||
from pathlib import Path
|
||||
from types import SimpleNamespace
|
||||
|
||||
from tests.helpers import TEST_PASSWORD, TEST_SEED
|
||||
from helpers import TEST_PASSWORD, TEST_SEED
|
||||
|
||||
import colorama
|
||||
import constants
|
||||
@@ -58,31 +56,11 @@ def test_cli_integration(monkeypatch, tmp_path):
|
||||
monkeypatch.setattr(manager_module.PasswordManager, "add_new_fingerprint", auto_add)
|
||||
monkeypatch.setattr("builtins.input", lambda *a, **k: "1")
|
||||
|
||||
buf = StringIO()
|
||||
with redirect_stdout(buf):
|
||||
try:
|
||||
cli_module.app(["fingerprint", "add"])
|
||||
except SystemExit as e:
|
||||
assert e.code == 0
|
||||
buf.truncate(0)
|
||||
buf.seek(0)
|
||||
cli_module.app(["fingerprint", "add"], standalone_mode=False)
|
||||
|
||||
with redirect_stdout(buf):
|
||||
try:
|
||||
cli_module.app(["entry", "add", "Example", "--length", "8"])
|
||||
except SystemExit as e:
|
||||
assert e.code == 0
|
||||
buf.truncate(0)
|
||||
buf.seek(0)
|
||||
cli_module.app(["entry", "add", "Example", "--length", "8"], standalone_mode=False)
|
||||
|
||||
with redirect_stdout(buf):
|
||||
try:
|
||||
cli_module.app(["entry", "get", "Example"])
|
||||
except SystemExit as e:
|
||||
assert e.code == 0
|
||||
lines = [line for line in buf.getvalue().splitlines() if line.strip()]
|
||||
password = lines[-1]
|
||||
assert len(password.strip()) >= 8
|
||||
cli_module.app(["entry", "get", "Example"], standalone_mode=False)
|
||||
|
||||
fm = manager_module.FingerprintManager(constants.APP_DIR)
|
||||
fp = fm.current_fingerprint
|
||||
|
@@ -2,7 +2,7 @@ from types import SimpleNamespace
|
||||
from typer.testing import CliRunner
|
||||
|
||||
from seedpass.cli import app
|
||||
from seedpass import cli
|
||||
from seedpass.cli import common as cli_common
|
||||
|
||||
|
||||
class DummyService:
|
||||
@@ -37,8 +37,8 @@ def test_cli_relay_crud(monkeypatch):
|
||||
def pm_factory(*a, **k):
|
||||
return SimpleNamespace()
|
||||
|
||||
monkeypatch.setattr(cli, "PasswordManager", pm_factory)
|
||||
monkeypatch.setattr(cli, "NostrService", lambda pm: DummyService(relays))
|
||||
monkeypatch.setattr(cli_common, "PasswordManager", pm_factory)
|
||||
monkeypatch.setattr(cli_common, "NostrService", lambda pm: DummyService(relays))
|
||||
|
||||
result = runner.invoke(app, ["nostr", "list-relays"])
|
||||
assert "1: wss://a" in result.stdout
|
||||
|
@@ -60,7 +60,7 @@ def test_totp_command(monkeypatch, capsys):
|
||||
monkeypatch.setattr(main, "initialize_app", lambda: None)
|
||||
monkeypatch.setattr(main.signal, "signal", lambda *a, **k: None)
|
||||
monkeypatch.setattr(
|
||||
main, "copy_to_clipboard", lambda v, d: called.setdefault("val", v)
|
||||
main, "copy_to_clipboard", lambda v, d: (called.setdefault("val", v), True)[1]
|
||||
)
|
||||
rc = main.main(["totp", "ex"])
|
||||
assert rc == 0
|
||||
|
@@ -2,7 +2,7 @@ from types import SimpleNamespace
|
||||
from typer.testing import CliRunner
|
||||
|
||||
from seedpass.cli import app
|
||||
from seedpass import cli
|
||||
from seedpass.cli import common as cli_common
|
||||
|
||||
runner = CliRunner()
|
||||
|
||||
@@ -23,7 +23,7 @@ def _make_pm(called, enabled=False):
|
||||
def test_toggle_offline_updates(monkeypatch):
|
||||
called = {}
|
||||
pm = _make_pm(called)
|
||||
monkeypatch.setattr(cli, "PasswordManager", lambda: pm)
|
||||
monkeypatch.setattr(cli_common, "PasswordManager", lambda: pm)
|
||||
result = runner.invoke(app, ["config", "toggle-offline"], input="y\n")
|
||||
assert result.exit_code == 0
|
||||
assert called == {"enabled": True}
|
||||
@@ -33,7 +33,7 @@ def test_toggle_offline_updates(monkeypatch):
|
||||
def test_toggle_offline_keep(monkeypatch):
|
||||
called = {}
|
||||
pm = _make_pm(called, enabled=True)
|
||||
monkeypatch.setattr(cli, "PasswordManager", lambda: pm)
|
||||
monkeypatch.setattr(cli_common, "PasswordManager", lambda: pm)
|
||||
result = runner.invoke(app, ["config", "toggle-offline"], input="\n")
|
||||
assert result.exit_code == 0
|
||||
assert called == {"enabled": True}
|
||||
|
@@ -3,7 +3,7 @@ from types import SimpleNamespace
|
||||
from typer.testing import CliRunner
|
||||
|
||||
from seedpass.cli import app
|
||||
from seedpass import cli
|
||||
from seedpass.cli import common as cli_common
|
||||
|
||||
runner = CliRunner()
|
||||
|
||||
@@ -27,7 +27,7 @@ def _make_pm(called, enabled=False, delay=45):
|
||||
def test_toggle_secret_mode_updates(monkeypatch):
|
||||
called = {}
|
||||
pm = _make_pm(called)
|
||||
monkeypatch.setattr(cli, "PasswordManager", lambda: pm)
|
||||
monkeypatch.setattr(cli_common, "PasswordManager", lambda: pm)
|
||||
result = runner.invoke(app, ["config", "toggle-secret-mode"], input="y\n10\n")
|
||||
assert result.exit_code == 0
|
||||
assert called == {"enabled": True, "delay": 10}
|
||||
@@ -37,7 +37,7 @@ def test_toggle_secret_mode_updates(monkeypatch):
|
||||
def test_toggle_secret_mode_keep(monkeypatch):
|
||||
called = {}
|
||||
pm = _make_pm(called, enabled=True, delay=30)
|
||||
monkeypatch.setattr(cli, "PasswordManager", lambda: pm)
|
||||
monkeypatch.setattr(cli_common, "PasswordManager", lambda: pm)
|
||||
result = runner.invoke(app, ["config", "toggle-secret-mode"], input="\n\n")
|
||||
assert result.exit_code == 0
|
||||
assert called == {"enabled": True, "delay": 30}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user