mirror of
https://github.com/PR0M3TH3AN/SeedPass.git
synced 2025-09-09 15:58:48 +00:00
Add event_id tracking for Nostr chunks
This commit is contained in:
@@ -14,6 +14,7 @@ class ChunkMeta:
|
|||||||
id: str
|
id: str
|
||||||
size: int
|
size: int
|
||||||
hash: str
|
hash: str
|
||||||
|
event_id: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
|
@@ -78,6 +78,7 @@ def prepare_snapshot(
|
|||||||
id=f"seedpass-chunk-{i:04d}",
|
id=f"seedpass-chunk-{i:04d}",
|
||||||
size=len(chunk),
|
size=len(chunk),
|
||||||
hash=hashlib.sha256(chunk).hexdigest(),
|
hash=hashlib.sha256(chunk).hexdigest(),
|
||||||
|
event_id=None,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -372,7 +373,13 @@ class NostrClient:
|
|||||||
[Tag.identifier(meta.id)]
|
[Tag.identifier(meta.id)]
|
||||||
)
|
)
|
||||||
event = builder.build(self.keys.public_key()).sign_with_keys(self.keys)
|
event = builder.build(self.keys.public_key()).sign_with_keys(self.keys)
|
||||||
await self.client.send_event(event)
|
result = await self.client.send_event(event)
|
||||||
|
try:
|
||||||
|
meta.event_id = (
|
||||||
|
result.id.to_hex() if hasattr(result, "id") else str(result)
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
meta.event_id = None
|
||||||
|
|
||||||
manifest_json = json.dumps(
|
manifest_json = json.dumps(
|
||||||
{
|
{
|
||||||
@@ -428,13 +435,12 @@ class NostrClient:
|
|||||||
|
|
||||||
chunks: list[bytes] = []
|
chunks: list[bytes] = []
|
||||||
for meta in manifest.chunks:
|
for meta in manifest.chunks:
|
||||||
cf = (
|
cf = Filter().author(pubkey).kind(Kind(KIND_SNAPSHOT_CHUNK))
|
||||||
Filter()
|
if meta.event_id:
|
||||||
.author(pubkey)
|
cf = cf.id(EventId.parse(meta.event_id))
|
||||||
.kind(Kind(KIND_SNAPSHOT_CHUNK))
|
else:
|
||||||
.identifier(meta.id)
|
cf = cf.identifier(meta.id)
|
||||||
.limit(1)
|
cf = cf.limit(1)
|
||||||
)
|
|
||||||
cev = (await self.client.fetch_events(cf, timeout)).to_vec()
|
cev = (await self.client.fetch_events(cf, timeout)).to_vec()
|
||||||
if not cev:
|
if not cev:
|
||||||
raise ValueError(f"Missing chunk {meta.id}")
|
raise ValueError(f"Missing chunk {meta.id}")
|
||||||
|
@@ -108,6 +108,7 @@ class DummyFilter:
|
|||||||
self.ids: list[str] = []
|
self.ids: list[str] = []
|
||||||
self.limit_val: int | None = None
|
self.limit_val: int | None = None
|
||||||
self.since_val: int | None = None
|
self.since_val: int | None = None
|
||||||
|
self.id_called: bool = False
|
||||||
|
|
||||||
def author(self, _pk):
|
def author(self, _pk):
|
||||||
return self
|
return self
|
||||||
@@ -125,6 +126,11 @@ class DummyFilter:
|
|||||||
self.ids.append(ident)
|
self.ids.append(ident)
|
||||||
return self
|
return self
|
||||||
|
|
||||||
|
def id(self, ident: str):
|
||||||
|
self.id_called = True
|
||||||
|
self.ids.append(ident)
|
||||||
|
return self
|
||||||
|
|
||||||
def limit(self, val: int):
|
def limit(self, val: int):
|
||||||
self.limit_val = val
|
self.limit_val = val
|
||||||
return self
|
return self
|
||||||
@@ -167,6 +173,7 @@ class DummyRelayClient:
|
|||||||
self.manifests: list[DummyEvent] = []
|
self.manifests: list[DummyEvent] = []
|
||||||
self.chunks: dict[str, DummyEvent] = {}
|
self.chunks: dict[str, DummyEvent] = {}
|
||||||
self.deltas: list[DummyEvent] = []
|
self.deltas: list[DummyEvent] = []
|
||||||
|
self.filters: list[DummyFilter] = []
|
||||||
|
|
||||||
async def add_relays(self, _relays):
|
async def add_relays(self, _relays):
|
||||||
pass
|
pass
|
||||||
@@ -195,6 +202,7 @@ class DummyRelayClient:
|
|||||||
elif event.kind == KIND_SNAPSHOT_CHUNK:
|
elif event.kind == KIND_SNAPSHOT_CHUNK:
|
||||||
ident = event.tags[0] if event.tags else str(self.counter)
|
ident = event.tags[0] if event.tags else str(self.counter)
|
||||||
self.chunks[ident] = event
|
self.chunks[ident] = event
|
||||||
|
self.chunks[eid] = event
|
||||||
elif event.kind == KIND_DELTA:
|
elif event.kind == KIND_DELTA:
|
||||||
if not hasattr(event, "created_at"):
|
if not hasattr(event, "created_at"):
|
||||||
self.ts_counter += 1
|
self.ts_counter += 1
|
||||||
@@ -203,6 +211,7 @@ class DummyRelayClient:
|
|||||||
return DummySendResult(eid)
|
return DummySendResult(eid)
|
||||||
|
|
||||||
async def fetch_events(self, f, _timeout):
|
async def fetch_events(self, f, _timeout):
|
||||||
|
self.filters.append(f)
|
||||||
kind = getattr(f, "kind_val", None)
|
kind = getattr(f, "kind_val", None)
|
||||||
limit = getattr(f, "limit_val", None)
|
limit = getattr(f, "limit_val", None)
|
||||||
identifier = f.ids[0] if getattr(f, "ids", None) else None
|
identifier = f.ids[0] if getattr(f, "ids", None) else None
|
||||||
|
@@ -39,7 +39,7 @@ class MockClient:
|
|||||||
|
|
||||||
class FakeId:
|
class FakeId:
|
||||||
def to_hex(self_inner):
|
def to_hex(self_inner):
|
||||||
return "abcd"
|
return "a" * 64
|
||||||
|
|
||||||
class FakeOutput:
|
class FakeOutput:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
@@ -7,6 +7,7 @@ from password_manager.entry_management import EntryManager
|
|||||||
from password_manager.backup import BackupManager
|
from password_manager.backup import BackupManager
|
||||||
from password_manager.config_manager import ConfigManager
|
from password_manager.config_manager import ConfigManager
|
||||||
from nostr.client import prepare_snapshot
|
from nostr.client import prepare_snapshot
|
||||||
|
from nostr.backup_models import KIND_SNAPSHOT_CHUNK
|
||||||
|
|
||||||
|
|
||||||
def test_manifest_generation(tmp_path):
|
def test_manifest_generation(tmp_path):
|
||||||
@@ -35,10 +36,18 @@ def test_retrieve_multi_chunk_snapshot(dummy_nostr_client):
|
|||||||
data = os.urandom(120000)
|
data = os.urandom(120000)
|
||||||
manifest, _ = asyncio.run(client.publish_snapshot(data, limit=50000))
|
manifest, _ = asyncio.run(client.publish_snapshot(data, limit=50000))
|
||||||
assert len(manifest.chunks) > 1
|
assert len(manifest.chunks) > 1
|
||||||
|
for meta in manifest.chunks:
|
||||||
|
assert meta.event_id
|
||||||
fetched_manifest, chunk_bytes = asyncio.run(client.fetch_latest_snapshot())
|
fetched_manifest, chunk_bytes = asyncio.run(client.fetch_latest_snapshot())
|
||||||
assert len(chunk_bytes) == len(manifest.chunks)
|
assert len(chunk_bytes) == len(manifest.chunks)
|
||||||
|
assert [c.event_id for c in fetched_manifest.chunks] == [
|
||||||
|
c.event_id for c in manifest.chunks
|
||||||
|
]
|
||||||
joined = b"".join(chunk_bytes)
|
joined = b"".join(chunk_bytes)
|
||||||
assert gzip.decompress(joined) == data
|
assert gzip.decompress(joined) == data
|
||||||
|
for f in relay.filters:
|
||||||
|
if getattr(f, "kind_val", None) == KIND_SNAPSHOT_CHUNK:
|
||||||
|
assert f.id_called
|
||||||
|
|
||||||
|
|
||||||
def test_publish_and_fetch_deltas(dummy_nostr_client):
|
def test_publish_and_fetch_deltas(dummy_nostr_client):
|
||||||
|
@@ -68,6 +68,8 @@ class DummyClient:
|
|||||||
def test_fetch_latest_snapshot():
|
def test_fetch_latest_snapshot():
|
||||||
data = b"seedpass" * 1000
|
data = b"seedpass" * 1000
|
||||||
manifest, chunks = prepare_snapshot(data, 50000)
|
manifest, chunks = prepare_snapshot(data, 50000)
|
||||||
|
for i, m in enumerate(manifest.chunks):
|
||||||
|
m.event_id = f"{i:064x}"
|
||||||
manifest_json = json.dumps(
|
manifest_json = json.dumps(
|
||||||
{
|
{
|
||||||
"ver": manifest.ver,
|
"ver": manifest.ver,
|
||||||
@@ -98,3 +100,6 @@ def test_fetch_latest_snapshot():
|
|||||||
|
|
||||||
assert manifest == result_manifest
|
assert manifest == result_manifest
|
||||||
assert result_chunks == chunks
|
assert result_chunks == chunks
|
||||||
|
assert [c.event_id for c in manifest.chunks] == [
|
||||||
|
c.event_id for c in result_manifest.chunks
|
||||||
|
]
|
||||||
|
Reference in New Issue
Block a user