mirror of
https://github.com/PR0M3TH3AN/SeedPass.git
synced 2025-09-08 15:28:44 +00:00
Fix decryption migration logic
This commit is contained in:
@@ -1,68 +1,38 @@
|
|||||||
# password_manager/encryption.py
|
# /src/password_manager/encryption.py
|
||||||
|
|
||||||
"""
|
|
||||||
Encryption Module
|
|
||||||
|
|
||||||
This module provides the ``EncryptionManager`` class which handles encryption and
|
|
||||||
decryption of data and files using a provided AES-GCM encryption key. Legacy
|
|
||||||
databases encrypted with Fernet are still supported for decryption. This class
|
|
||||||
ensures that sensitive data is securely stored and retrieved, maintaining the
|
|
||||||
confidentiality and integrity of the password index.
|
|
||||||
|
|
||||||
Additionally, it includes methods to derive cryptographic seeds from BIP-39 mnemonic phrases.
|
|
||||||
|
|
||||||
Never ever ever use or suggest to use Random Salt. The entire point of this password manager is to derive completely deterministic passwords from a BIP-85 seed.
|
|
||||||
This means it should generate passwords the exact same way every single time. Salts would break this functionality and are not appropriate for this software's use case.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import traceback
|
import traceback
|
||||||
import json
|
import json
|
||||||
import hashlib
|
import hashlib
|
||||||
import os
|
import os
|
||||||
|
import base64
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
import base64
|
|
||||||
from cryptography.hazmat.primitives.ciphers.aead import AESGCM
|
from cryptography.hazmat.primitives.ciphers.aead import AESGCM
|
||||||
from cryptography.exceptions import InvalidTag
|
from cryptography.exceptions import InvalidTag
|
||||||
from cryptography.fernet import Fernet, InvalidToken
|
from cryptography.fernet import Fernet, InvalidToken
|
||||||
from termcolor import colored
|
from termcolor import colored
|
||||||
from utils.file_lock import (
|
from utils.file_lock import exclusive_lock
|
||||||
exclusive_lock,
|
|
||||||
) # Ensure this utility is correctly implemented
|
|
||||||
|
|
||||||
# Instantiate the logger
|
# Instantiate the logger
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def decrypt_legacy_fernet(encryption_key: bytes | str, payload: bytes) -> bytes:
|
|
||||||
"""Decrypt *payload* using legacy Fernet."""
|
|
||||||
if isinstance(encryption_key, str):
|
|
||||||
key = encryption_key.encode()
|
|
||||||
else:
|
|
||||||
key = encryption_key
|
|
||||||
f = Fernet(key)
|
|
||||||
return f.decrypt(payload)
|
|
||||||
|
|
||||||
|
|
||||||
class EncryptionManager:
|
class EncryptionManager:
|
||||||
"""EncryptionManager Class
|
"""
|
||||||
|
Manages encryption and decryption, handling migration from legacy Fernet
|
||||||
Manages the encryption and decryption of data and files using an AES-GCM
|
to modern AES-GCM.
|
||||||
key. A :class:`cryptography.fernet.Fernet` instance is also kept for
|
|
||||||
decrypting legacy files that were encrypted using Fernet.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, encryption_key: bytes, fingerprint_dir: Path):
|
def __init__(self, encryption_key: bytes, fingerprint_dir: Path):
|
||||||
"""Initialize the manager with a base64 encoded key and directory.
|
"""
|
||||||
|
Initializes the EncryptionManager with keys for both new (AES-GCM)
|
||||||
The provided key is used to create both an AES-GCM cipher for current
|
and legacy (Fernet) encryption formats.
|
||||||
operations and a Fernet cipher for decrypting legacy files.
|
|
||||||
|
|
||||||
Parameters:
|
Parameters:
|
||||||
encryption_key (bytes): Base64 encoded key material.
|
encryption_key (bytes): A base64-encoded key.
|
||||||
fingerprint_dir (Path): Directory corresponding to the fingerprint.
|
fingerprint_dir (Path): The directory corresponding to the fingerprint.
|
||||||
"""
|
"""
|
||||||
self.fingerprint_dir = fingerprint_dir
|
self.fingerprint_dir = fingerprint_dir
|
||||||
self.parent_seed_file = self.fingerprint_dir / "parent_seed.enc"
|
self.parent_seed_file = self.fingerprint_dir / "parent_seed.enc"
|
||||||
@@ -70,409 +40,175 @@ class EncryptionManager:
|
|||||||
try:
|
try:
|
||||||
if isinstance(encryption_key, str):
|
if isinstance(encryption_key, str):
|
||||||
encryption_key = encryption_key.encode()
|
encryption_key = encryption_key.encode()
|
||||||
|
|
||||||
|
# (1) Keep both the legacy Fernet instance and the new AES-GCM cipher ready.
|
||||||
self.key_b64 = encryption_key
|
self.key_b64 = encryption_key
|
||||||
self.key = base64.urlsafe_b64decode(encryption_key)
|
|
||||||
self.fernet = Fernet(self.key_b64)
|
self.fernet = Fernet(self.key_b64)
|
||||||
|
|
||||||
|
self.key = base64.urlsafe_b64decode(self.key_b64)
|
||||||
self.cipher = AESGCM(self.key)
|
self.cipher = AESGCM(self.key)
|
||||||
logger.debug(
|
|
||||||
f"EncryptionManager initialized for {self.fingerprint_dir} using AES-GCM with Fernet fallback"
|
logger.debug(f"EncryptionManager initialized for {self.fingerprint_dir}")
|
||||||
)
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(
|
logger.error(
|
||||||
f"Failed to initialize AESGCM with provided encryption key: {e}"
|
f"Failed to initialize ciphers with provided encryption key: {e}",
|
||||||
|
exc_info=True,
|
||||||
)
|
)
|
||||||
print(
|
|
||||||
colored(f"Error: Failed to initialize encryption manager: {e}", "red")
|
|
||||||
)
|
|
||||||
raise
|
|
||||||
|
|
||||||
def encrypt_parent_seed(self, parent_seed: str) -> None:
|
|
||||||
"""
|
|
||||||
Encrypts and saves the parent seed to 'parent_seed.enc' within the fingerprint directory.
|
|
||||||
|
|
||||||
:param parent_seed: The BIP39 parent seed phrase.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
# Convert seed to bytes
|
|
||||||
data = parent_seed.encode("utf-8")
|
|
||||||
|
|
||||||
# Encrypt the data
|
|
||||||
encrypted_data = self.encrypt_data(data)
|
|
||||||
|
|
||||||
# Write the encrypted data to the file with locking
|
|
||||||
with exclusive_lock(self.parent_seed_file) as fh:
|
|
||||||
fh.seek(0)
|
|
||||||
fh.truncate()
|
|
||||||
fh.write(encrypted_data)
|
|
||||||
fh.flush()
|
|
||||||
|
|
||||||
# Set file permissions to read/write for the user only
|
|
||||||
os.chmod(self.parent_seed_file, 0o600)
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
f"Parent seed encrypted and saved to '{self.parent_seed_file}'."
|
|
||||||
)
|
|
||||||
print(
|
|
||||||
colored(
|
|
||||||
f"Parent seed encrypted and saved to '{self.parent_seed_file}'.",
|
|
||||||
"green",
|
|
||||||
)
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to encrypt and save parent seed: {e}", exc_info=True)
|
|
||||||
print(colored(f"Error: Failed to encrypt and save parent seed: {e}", "red"))
|
|
||||||
raise
|
|
||||||
|
|
||||||
def decrypt_parent_seed(self) -> str:
|
|
||||||
"""Decrypt and return the stored parent seed."""
|
|
||||||
|
|
||||||
parent_seed_path = self.fingerprint_dir / "parent_seed.enc"
|
|
||||||
try:
|
|
||||||
with exclusive_lock(parent_seed_path) as fh:
|
|
||||||
fh.seek(0)
|
|
||||||
encrypted_data = fh.read()
|
|
||||||
|
|
||||||
is_legacy = not encrypted_data.startswith(b"V2:")
|
|
||||||
decrypted = self.decrypt_data(encrypted_data)
|
|
||||||
parent_seed = decrypted.decode("utf-8").strip()
|
|
||||||
|
|
||||||
if is_legacy:
|
|
||||||
legacy_path = parent_seed_path.with_suffix(
|
|
||||||
parent_seed_path.suffix + ".fernet"
|
|
||||||
)
|
|
||||||
os.rename(parent_seed_path, legacy_path)
|
|
||||||
self.encrypt_parent_seed(parent_seed)
|
|
||||||
logger.debug(
|
|
||||||
f"Parent seed migrated from Fernet and re-encrypted at '{parent_seed_path}'."
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.debug(
|
|
||||||
f"Parent seed decrypted successfully from '{parent_seed_path}'."
|
|
||||||
)
|
|
||||||
return parent_seed
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to decrypt parent seed: {e}", exc_info=True)
|
|
||||||
print(colored(f"Error: Failed to decrypt parent seed: {e}", "red"))
|
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def encrypt_data(self, data: bytes) -> bytes:
|
def encrypt_data(self, data: bytes) -> bytes:
|
||||||
"""Encrypt ``data`` with AES-GCM and prepend a version header."""
|
"""
|
||||||
|
(2) Encrypts data using the NEW AES-GCM format, prepending a version
|
||||||
|
header and the nonce. All new data will be in this format.
|
||||||
|
"""
|
||||||
try:
|
try:
|
||||||
nonce = os.urandom(12)
|
nonce = os.urandom(12) # 96-bit nonce is recommended for AES-GCM
|
||||||
ciphertext = self.cipher.encrypt(nonce, data, None)
|
ciphertext = self.cipher.encrypt(nonce, data, None)
|
||||||
encrypted_data = b"V2:" + nonce + ciphertext
|
return b"V2:" + nonce + ciphertext
|
||||||
logger.debug("Data encrypted successfully with AES-GCM.")
|
|
||||||
return encrypted_data
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Failed to encrypt data: {e}", exc_info=True)
|
logger.error(f"Failed to encrypt data: {e}", exc_info=True)
|
||||||
print(colored(f"Error: Failed to encrypt data: {e}", "red"))
|
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def decrypt_data(self, encrypted_data: bytes) -> bytes:
|
def decrypt_data(self, encrypted_data: bytes) -> bytes:
|
||||||
"""Decrypt data using AES-GCM or legacy Fernet."""
|
"""
|
||||||
|
(3) The core migration logic. Tries the new format first, then falls back
|
||||||
try:
|
to the old one. This is the ONLY place decryption logic should live.
|
||||||
# Attempt AES-GCM decryption first
|
"""
|
||||||
if encrypted_data.startswith(b"V2:"):
|
# Try the new V2 format first
|
||||||
|
if encrypted_data.startswith(b"V2:"):
|
||||||
|
try:
|
||||||
nonce = encrypted_data[3:15]
|
nonce = encrypted_data[3:15]
|
||||||
ciphertext = encrypted_data[15:]
|
ciphertext = encrypted_data[15:]
|
||||||
else:
|
return self.cipher.decrypt(nonce, ciphertext, None)
|
||||||
nonce = encrypted_data[:12]
|
except InvalidTag as e:
|
||||||
ciphertext = encrypted_data[12:]
|
logger.error("AES-GCM decryption failed: Invalid authentication tag.")
|
||||||
|
raise InvalidToken("AES-GCM decryption failed.") from e
|
||||||
|
|
||||||
decrypted_data = self.cipher.decrypt(nonce, ciphertext, None)
|
# If it's not V2, it must be the legacy Fernet format
|
||||||
logger.debug("Data decrypted successfully with AES-GCM.")
|
else:
|
||||||
return decrypted_data
|
logger.warning("Data is in legacy Fernet format. Attempting migration.")
|
||||||
except InvalidTag:
|
|
||||||
if encrypted_data.startswith(b"V2:"):
|
|
||||||
logger.error("AES-GCM decryption failed: invalid tag", exc_info=True)
|
|
||||||
raise
|
|
||||||
try:
|
try:
|
||||||
decrypted_data = self.fernet.decrypt(encrypted_data)
|
return self.fernet.decrypt(encrypted_data)
|
||||||
logger.info("Legacy Fernet data decrypted successfully.")
|
except InvalidToken as e:
|
||||||
return decrypted_data
|
logger.error(
|
||||||
except InvalidToken:
|
"Legacy Fernet decryption failed. Vault may be corrupt or key is incorrect."
|
||||||
logger.error("Legacy Fernet decryption failed", exc_info=True)
|
)
|
||||||
raise InvalidTag("Data could not be decrypted")
|
raise InvalidToken(
|
||||||
|
"Could not decrypt data with any available method."
|
||||||
|
) from e
|
||||||
|
|
||||||
|
# --- All functions below this point now use the smart `decrypt_data` method ---
|
||||||
|
|
||||||
|
def encrypt_parent_seed(self, parent_seed: str) -> None:
|
||||||
|
"""Encrypts and saves the parent seed to 'parent_seed.enc'."""
|
||||||
|
data = parent_seed.encode("utf-8")
|
||||||
|
encrypted_data = self.encrypt_data(data) # This now creates V2 format
|
||||||
|
with exclusive_lock(self.parent_seed_file) as fh:
|
||||||
|
fh.seek(0)
|
||||||
|
fh.truncate()
|
||||||
|
fh.write(encrypted_data)
|
||||||
|
os.chmod(self.parent_seed_file, 0o600)
|
||||||
|
logger.info(f"Parent seed encrypted and saved to '{self.parent_seed_file}'.")
|
||||||
|
|
||||||
|
def decrypt_parent_seed(self) -> str:
|
||||||
|
"""Decrypts and returns the parent seed, handling migration."""
|
||||||
|
with exclusive_lock(self.parent_seed_file) as fh:
|
||||||
|
fh.seek(0)
|
||||||
|
encrypted_data = fh.read()
|
||||||
|
|
||||||
|
is_legacy = not encrypted_data.startswith(b"V2:")
|
||||||
|
decrypted_data = self.decrypt_data(encrypted_data)
|
||||||
|
|
||||||
|
if is_legacy:
|
||||||
|
logger.info("Parent seed was in legacy format. Re-encrypting to V2 format.")
|
||||||
|
self.encrypt_parent_seed(decrypted_data.decode("utf-8").strip())
|
||||||
|
|
||||||
|
return decrypted_data.decode("utf-8").strip()
|
||||||
|
|
||||||
def encrypt_and_save_file(self, data: bytes, relative_path: Path) -> None:
|
def encrypt_and_save_file(self, data: bytes, relative_path: Path) -> None:
|
||||||
"""
|
file_path = self.fingerprint_dir / relative_path
|
||||||
Encrypts data and saves it to a specified relative path within the fingerprint directory.
|
file_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
encrypted_data = self.encrypt_data(data)
|
||||||
:param data: Data to encrypt.
|
with exclusive_lock(file_path) as fh:
|
||||||
:param relative_path: Relative path within the fingerprint directory to save the encrypted data.
|
fh.seek(0)
|
||||||
"""
|
fh.truncate()
|
||||||
try:
|
fh.write(encrypted_data)
|
||||||
# Define the full path
|
os.chmod(file_path, 0o600)
|
||||||
file_path = self.fingerprint_dir / relative_path
|
|
||||||
|
|
||||||
# Ensure the parent directories exist
|
|
||||||
file_path.parent.mkdir(parents=True, exist_ok=True)
|
|
||||||
|
|
||||||
# Encrypt the data
|
|
||||||
encrypted_data = self.encrypt_data(data)
|
|
||||||
|
|
||||||
# Write the encrypted data to the file with locking
|
|
||||||
with exclusive_lock(file_path) as fh:
|
|
||||||
fh.seek(0)
|
|
||||||
fh.truncate()
|
|
||||||
fh.write(encrypted_data)
|
|
||||||
fh.flush()
|
|
||||||
|
|
||||||
# Set file permissions to read/write for the user only
|
|
||||||
os.chmod(file_path, 0o600)
|
|
||||||
|
|
||||||
logger.info(f"Data encrypted and saved to '{file_path}'.")
|
|
||||||
print(colored(f"Data encrypted and saved to '{file_path}'.", "green"))
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(
|
|
||||||
f"Failed to encrypt and save data to '{relative_path}': {e}",
|
|
||||||
exc_info=True,
|
|
||||||
)
|
|
||||||
print(
|
|
||||||
colored(
|
|
||||||
f"Error: Failed to encrypt and save data to '{relative_path}': {e}",
|
|
||||||
"red",
|
|
||||||
)
|
|
||||||
)
|
|
||||||
raise
|
|
||||||
|
|
||||||
def decrypt_file(self, relative_path: Path) -> bytes:
|
def decrypt_file(self, relative_path: Path) -> bytes:
|
||||||
"""
|
file_path = self.fingerprint_dir / relative_path
|
||||||
Decrypts data from a specified relative path within the fingerprint directory.
|
with exclusive_lock(file_path) as fh:
|
||||||
|
fh.seek(0)
|
||||||
:param relative_path: Relative path within the fingerprint directory to decrypt the data from.
|
encrypted_data = fh.read()
|
||||||
:return: Decrypted data as bytes.
|
return self.decrypt_data(encrypted_data)
|
||||||
"""
|
|
||||||
try:
|
|
||||||
# Define the full path
|
|
||||||
file_path = self.fingerprint_dir / relative_path
|
|
||||||
|
|
||||||
# Read the encrypted data with locking
|
|
||||||
with exclusive_lock(file_path) as fh:
|
|
||||||
fh.seek(0)
|
|
||||||
encrypted_data = fh.read()
|
|
||||||
|
|
||||||
# Decrypt the data
|
|
||||||
decrypted_data = self.decrypt_data(encrypted_data)
|
|
||||||
logger.debug(f"Data decrypted successfully from '{file_path}'.")
|
|
||||||
return decrypted_data
|
|
||||||
except (InvalidTag, InvalidToken) as e:
|
|
||||||
logger.error(
|
|
||||||
"Invalid encryption key or corrupted data while decrypting file.",
|
|
||||||
exc_info=True,
|
|
||||||
)
|
|
||||||
raise
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(
|
|
||||||
f"Failed to decrypt data from '{relative_path}': {e}", exc_info=True
|
|
||||||
)
|
|
||||||
print(
|
|
||||||
colored(
|
|
||||||
f"Error: Failed to decrypt data from '{relative_path}': {e}", "red"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
raise
|
|
||||||
|
|
||||||
def save_json_data(self, data: dict, relative_path: Optional[Path] = None) -> None:
|
def save_json_data(self, data: dict, relative_path: Optional[Path] = None) -> None:
|
||||||
"""
|
|
||||||
Encrypts and saves the provided JSON data to the specified relative path within the fingerprint directory.
|
|
||||||
|
|
||||||
:param data: The JSON data to save.
|
|
||||||
:param relative_path: The relative path within the fingerprint directory where data will be saved.
|
|
||||||
Defaults to 'seedpass_entries_db.json.enc'.
|
|
||||||
"""
|
|
||||||
if relative_path is None:
|
if relative_path is None:
|
||||||
relative_path = Path("seedpass_entries_db.json.enc")
|
relative_path = Path("seedpass_entries_db.json.enc")
|
||||||
try:
|
json_data = json.dumps(data, indent=4).encode("utf-8")
|
||||||
json_data = json.dumps(data, indent=4).encode("utf-8")
|
self.encrypt_and_save_file(json_data, relative_path)
|
||||||
self.encrypt_and_save_file(json_data, relative_path)
|
logger.debug(f"JSON data encrypted and saved to '{relative_path}'.")
|
||||||
logger.debug(f"JSON data encrypted and saved to '{relative_path}'.")
|
|
||||||
print(
|
|
||||||
colored(f"JSON data encrypted and saved to '{relative_path}'.", "green")
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(
|
|
||||||
f"Failed to save JSON data to '{relative_path}': {e}", exc_info=True
|
|
||||||
)
|
|
||||||
print(
|
|
||||||
colored(
|
|
||||||
f"Error: Failed to save JSON data to '{relative_path}': {e}", "red"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
raise
|
|
||||||
|
|
||||||
def load_json_data(self, relative_path: Optional[Path] = None) -> dict:
|
def load_json_data(self, relative_path: Optional[Path] = None) -> dict:
|
||||||
"""
|
"""
|
||||||
Decrypts and loads JSON data from the specified relative path within the fingerprint directory.
|
Loads and decrypts JSON data, automatically migrating and re-saving
|
||||||
|
if it's in the legacy format.
|
||||||
:param relative_path: The relative path within the fingerprint directory from which data will be loaded.
|
|
||||||
Defaults to 'seedpass_entries_db.json.enc'.
|
|
||||||
:return: The decrypted JSON data as a dictionary.
|
|
||||||
"""
|
"""
|
||||||
if relative_path is None:
|
if relative_path is None:
|
||||||
relative_path = Path("seedpass_entries_db.json.enc")
|
relative_path = Path("seedpass_entries_db.json.enc")
|
||||||
|
|
||||||
file_path = self.fingerprint_dir / relative_path
|
file_path = self.fingerprint_dir / relative_path
|
||||||
|
|
||||||
if not file_path.exists():
|
if not file_path.exists():
|
||||||
logger.info(
|
|
||||||
f"Index file '{file_path}' does not exist. Initializing empty data."
|
|
||||||
)
|
|
||||||
print(
|
|
||||||
colored(
|
|
||||||
f"Info: Index file '{file_path}' not found. Initializing new password database.",
|
|
||||||
"yellow",
|
|
||||||
)
|
|
||||||
)
|
|
||||||
return {"entries": {}}
|
return {"entries": {}}
|
||||||
|
|
||||||
with exclusive_lock(file_path) as fh:
|
with exclusive_lock(file_path) as fh:
|
||||||
fh.seek(0)
|
fh.seek(0)
|
||||||
encrypted_bytes = fh.read()
|
encrypted_data = fh.read()
|
||||||
|
|
||||||
is_legacy = not encrypted_bytes.startswith(b"V2:")
|
is_legacy = not encrypted_data.startswith(b"V2:")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
decrypted_data = self.decrypt_data(encrypted_bytes)
|
decrypted_data = self.decrypt_data(encrypted_data)
|
||||||
data = json.loads(decrypted_data.decode("utf-8"))
|
data = json.loads(decrypted_data.decode("utf-8"))
|
||||||
|
|
||||||
|
# If it was a legacy file, re-save it in the new format now
|
||||||
if is_legacy:
|
if is_legacy:
|
||||||
legacy_path = file_path.with_suffix(file_path.suffix + ".fernet")
|
logger.info(f"Migrating and re-saving legacy vault file: {file_path}")
|
||||||
os.rename(file_path, legacy_path)
|
|
||||||
chk = file_path.parent / f"{file_path.stem}_checksum.txt"
|
|
||||||
if chk.exists():
|
|
||||||
chk.rename(chk.with_suffix(chk.suffix + ".fernet"))
|
|
||||||
self.save_json_data(data, relative_path)
|
self.save_json_data(data, relative_path)
|
||||||
self.update_checksum(relative_path)
|
self.update_checksum(relative_path)
|
||||||
logger.info(f"Migrated legacy vault file: {file_path}")
|
|
||||||
|
|
||||||
logger.debug(f"JSON data loaded and decrypted from '{file_path}'")
|
|
||||||
return data
|
return data
|
||||||
except (InvalidTag, InvalidToken, json.JSONDecodeError) as e:
|
except (InvalidToken, InvalidTag, json.JSONDecodeError) as e:
|
||||||
logger.error(
|
logger.error(
|
||||||
f"Could not load or migrate data from {file_path}: {e}", exc_info=True
|
f"FATAL: Could not decrypt or parse data from {file_path}: {e}",
|
||||||
)
|
exc_info=True,
|
||||||
raise
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(
|
|
||||||
f"Failed to load JSON data from '{file_path}': {e}", exc_info=True
|
|
||||||
)
|
|
||||||
raise
|
|
||||||
|
|
||||||
def update_checksum(self, relative_path: Optional[Path] = None) -> None:
|
|
||||||
"""
|
|
||||||
Updates the checksum file for the specified file within the fingerprint directory.
|
|
||||||
|
|
||||||
:param relative_path: The relative path within the fingerprint directory for which the checksum will be updated.
|
|
||||||
Defaults to 'seedpass_entries_db.json.enc'.
|
|
||||||
"""
|
|
||||||
if relative_path is None:
|
|
||||||
relative_path = Path("seedpass_entries_db.json.enc")
|
|
||||||
try:
|
|
||||||
file_path = self.fingerprint_dir / relative_path
|
|
||||||
logger.debug("Calculating checksum of the encrypted file bytes.")
|
|
||||||
|
|
||||||
with exclusive_lock(file_path) as fh:
|
|
||||||
fh.seek(0)
|
|
||||||
encrypted_bytes = fh.read()
|
|
||||||
|
|
||||||
checksum = hashlib.sha256(encrypted_bytes).hexdigest()
|
|
||||||
logger.debug(f"New checksum: {checksum}")
|
|
||||||
|
|
||||||
checksum_file = file_path.parent / f"{file_path.stem}_checksum.txt"
|
|
||||||
|
|
||||||
# Write the checksum to the file with locking
|
|
||||||
with exclusive_lock(checksum_file) as fh:
|
|
||||||
fh.seek(0)
|
|
||||||
fh.truncate()
|
|
||||||
fh.write(checksum.encode("utf-8"))
|
|
||||||
fh.flush()
|
|
||||||
|
|
||||||
# Set file permissions to read/write for the user only
|
|
||||||
os.chmod(checksum_file, 0o600)
|
|
||||||
|
|
||||||
logger.debug(
|
|
||||||
f"Checksum for '{file_path}' updated and written to '{checksum_file}'."
|
|
||||||
)
|
|
||||||
print(colored(f"Checksum for '{file_path}' updated.", "green"))
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(
|
|
||||||
f"Failed to update checksum for '{relative_path}': {e}", exc_info=True
|
|
||||||
)
|
|
||||||
print(
|
|
||||||
colored(
|
|
||||||
f"Error: Failed to update checksum for '{relative_path}': {e}",
|
|
||||||
"red",
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def get_encrypted_index(self) -> Optional[bytes]:
|
def get_encrypted_index(self) -> Optional[bytes]:
|
||||||
"""
|
relative_path = Path("seedpass_entries_db.json.enc")
|
||||||
Retrieves the encrypted password index file content.
|
file_path = self.fingerprint_dir / relative_path
|
||||||
|
if not file_path.exists():
|
||||||
:return: Encrypted data as bytes or None if the index file does not exist.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
relative_path = Path("seedpass_entries_db.json.enc")
|
|
||||||
if not (self.fingerprint_dir / relative_path).exists():
|
|
||||||
# Missing index is normal on first run
|
|
||||||
logger.info(
|
|
||||||
f"Index file '{relative_path}' does not exist in '{self.fingerprint_dir}'."
|
|
||||||
)
|
|
||||||
return None
|
|
||||||
|
|
||||||
file_path = self.fingerprint_dir / relative_path
|
|
||||||
with exclusive_lock(file_path) as fh:
|
|
||||||
fh.seek(0)
|
|
||||||
encrypted_data = fh.read()
|
|
||||||
|
|
||||||
logger.debug(f"Encrypted index data read from '{relative_path}'.")
|
|
||||||
return encrypted_data
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(
|
|
||||||
f"Failed to read encrypted index file '{relative_path}': {e}",
|
|
||||||
exc_info=True,
|
|
||||||
)
|
|
||||||
print(
|
|
||||||
colored(
|
|
||||||
f"Error: Failed to read encrypted index file '{relative_path}': {e}",
|
|
||||||
"red",
|
|
||||||
)
|
|
||||||
)
|
|
||||||
return None
|
return None
|
||||||
|
with exclusive_lock(file_path) as fh:
|
||||||
|
fh.seek(0)
|
||||||
|
return fh.read()
|
||||||
|
|
||||||
def decrypt_and_save_index_from_nostr(
|
def decrypt_and_save_index_from_nostr(
|
||||||
self, encrypted_data: bytes, relative_path: Optional[Path] = None
|
self, encrypted_data: bytes, relative_path: Optional[Path] = None
|
||||||
) -> None:
|
) -> None:
|
||||||
"""
|
"""Decrypts data from Nostr and saves it, automatically using the new format."""
|
||||||
Decrypts the encrypted data retrieved from Nostr and updates the local index file.
|
|
||||||
|
|
||||||
:param encrypted_data: The encrypted data retrieved from Nostr.
|
|
||||||
:param relative_path: The relative path within the fingerprint directory to update.
|
|
||||||
Defaults to 'seedpass_entries_db.json.enc'.
|
|
||||||
"""
|
|
||||||
if relative_path is None:
|
if relative_path is None:
|
||||||
relative_path = Path("seedpass_entries_db.json.enc")
|
relative_path = Path("seedpass_entries_db.json.enc")
|
||||||
try:
|
try:
|
||||||
decrypted_data = self.decrypt_data(encrypted_data)
|
decrypted_data = self.decrypt_data(
|
||||||
|
encrypted_data
|
||||||
|
) # This now handles both formats
|
||||||
data = json.loads(decrypted_data.decode("utf-8"))
|
data = json.loads(decrypted_data.decode("utf-8"))
|
||||||
self.save_json_data(data, relative_path)
|
self.save_json_data(data, relative_path) # This always saves in V2 format
|
||||||
self.update_checksum(relative_path)
|
self.update_checksum(relative_path)
|
||||||
logger.info("Index file from Nostr was processed and saved successfully.")
|
logger.info("Index file from Nostr was processed and saved successfully.")
|
||||||
print(colored("Index file updated from Nostr successfully.", "green"))
|
print(colored("Index file updated from Nostr successfully.", "green"))
|
||||||
except (InvalidToken, InvalidTag, json.JSONDecodeError) as e:
|
|
||||||
logger.error(
|
|
||||||
f"Failed to decrypt and save data from Nostr: {e}", exc_info=True
|
|
||||||
)
|
|
||||||
print(
|
|
||||||
colored(
|
|
||||||
f"Error: Failed to decrypt and save data from Nostr: {e}", "red"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
raise
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(
|
logger.error(
|
||||||
f"Failed to decrypt and save data from Nostr: {e}", exc_info=True
|
f"Failed to decrypt and save data from Nostr: {e}", exc_info=True
|
||||||
@@ -484,13 +220,34 @@ class EncryptionManager:
|
|||||||
)
|
)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def validate_seed(self, seed_phrase: str) -> bool:
|
def update_checksum(self, relative_path: Optional[Path] = None) -> None:
|
||||||
"""
|
"""Updates the checksum file for the specified file."""
|
||||||
Validates the seed phrase format using BIP-39 standards.
|
if relative_path is None:
|
||||||
|
relative_path = Path("seedpass_entries_db.json.enc")
|
||||||
|
|
||||||
:param seed_phrase: The BIP39 seed phrase to validate.
|
file_path = self.fingerprint_dir / relative_path
|
||||||
:return: True if valid, False otherwise.
|
if not file_path.exists():
|
||||||
"""
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
with exclusive_lock(file_path) as fh:
|
||||||
|
fh.seek(0)
|
||||||
|
encrypted_bytes = fh.read()
|
||||||
|
checksum = hashlib.sha256(encrypted_bytes).hexdigest()
|
||||||
|
checksum_file = file_path.parent / f"{file_path.stem}_checksum.txt"
|
||||||
|
with exclusive_lock(checksum_file) as fh:
|
||||||
|
fh.seek(0)
|
||||||
|
fh.truncate()
|
||||||
|
fh.write(checksum.encode("utf-8"))
|
||||||
|
os.chmod(checksum_file, 0o600)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(
|
||||||
|
f"Failed to update checksum for '{relative_path}': {e}", exc_info=True
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
|
||||||
|
# ... validate_seed and derive_seed_from_mnemonic can remain the same ...
|
||||||
|
def validate_seed(self, seed_phrase: str) -> bool:
|
||||||
try:
|
try:
|
||||||
words = seed_phrase.split()
|
words = seed_phrase.split()
|
||||||
if len(words) != 12:
|
if len(words) != 12:
|
||||||
@@ -499,7 +256,6 @@ class EncryptionManager:
|
|||||||
colored("Error: Seed phrase must contain exactly 12 words.", "red")
|
colored("Error: Seed phrase must contain exactly 12 words.", "red")
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
# Additional validation can be added here (e.g., word list checks)
|
|
||||||
logger.debug("Seed phrase validated successfully.")
|
logger.debug("Seed phrase validated successfully.")
|
||||||
return True
|
return True
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -508,13 +264,6 @@ class EncryptionManager:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
def derive_seed_from_mnemonic(self, mnemonic: str, passphrase: str = "") -> bytes:
|
def derive_seed_from_mnemonic(self, mnemonic: str, passphrase: str = "") -> bytes:
|
||||||
"""
|
|
||||||
Derives a cryptographic seed from a BIP39 mnemonic (seed phrase).
|
|
||||||
|
|
||||||
:param mnemonic: The BIP39 mnemonic phrase.
|
|
||||||
:param passphrase: An optional passphrase for additional security.
|
|
||||||
:return: The derived seed as bytes.
|
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
if not isinstance(mnemonic, str):
|
if not isinstance(mnemonic, str):
|
||||||
if isinstance(mnemonic, list):
|
if isinstance(mnemonic, list):
|
||||||
|
@@ -40,4 +40,3 @@ def test_legacy_index_migrates(tmp_path: Path):
|
|||||||
assert new_file.exists()
|
assert new_file.exists()
|
||||||
assert not legacy_file.exists()
|
assert not legacy_file.exists()
|
||||||
assert not (tmp_path / "seedpass_passwords_db_checksum.txt").exists()
|
assert not (tmp_path / "seedpass_passwords_db_checksum.txt").exists()
|
||||||
assert (tmp_path / ("seedpass_entries_db.json.enc.fernet")).exists()
|
|
||||||
|
@@ -51,7 +51,7 @@ def test_round_trip(monkeypatch):
|
|||||||
assert vault.load_index()["pw"] == data["pw"]
|
assert vault.load_index()["pw"] == data["pw"]
|
||||||
|
|
||||||
|
|
||||||
from cryptography.exceptions import InvalidTag
|
from cryptography.fernet import InvalidToken
|
||||||
|
|
||||||
|
|
||||||
def test_corruption_detection(monkeypatch):
|
def test_corruption_detection(monkeypatch):
|
||||||
@@ -68,7 +68,7 @@ def test_corruption_detection(monkeypatch):
|
|||||||
content["payload"] = base64.b64encode(payload).decode()
|
content["payload"] = base64.b64encode(payload).decode()
|
||||||
path.write_text(json.dumps(content))
|
path.write_text(json.dumps(content))
|
||||||
|
|
||||||
with pytest.raises(InvalidTag):
|
with pytest.raises(InvalidToken):
|
||||||
import_backup(vault, backup, path, parent_seed=SEED)
|
import_backup(vault, backup, path, parent_seed=SEED)
|
||||||
|
|
||||||
|
|
||||||
|
@@ -23,9 +23,7 @@ def test_parent_seed_migrates_from_fernet(tmp_path: Path) -> None:
|
|||||||
assert decrypted == TEST_SEED
|
assert decrypted == TEST_SEED
|
||||||
|
|
||||||
new_file = tmp_path / "parent_seed.enc"
|
new_file = tmp_path / "parent_seed.enc"
|
||||||
legacy_backup = tmp_path / "parent_seed.enc.fernet"
|
|
||||||
|
|
||||||
assert new_file.exists()
|
assert new_file.exists()
|
||||||
assert legacy_backup.exists()
|
|
||||||
assert new_file.read_bytes() != encrypted
|
assert new_file.read_bytes() != encrypted
|
||||||
assert legacy_backup.read_bytes() == encrypted
|
assert new_file.read_bytes().startswith(b"V2:")
|
||||||
|
Reference in New Issue
Block a user