This commit is contained in:
thePR0M3TH3AN
2025-05-16 21:14:32 -04:00
parent 37e75a1162
commit 45d4f57733
15 changed files with 968 additions and 554 deletions

25
Cargo.lock generated
View File

@@ -412,6 +412,12 @@ version = "1.70.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf"
[[package]]
name = "itoa"
version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
[[package]] [[package]]
name = "js-sys" name = "js-sys"
version = "0.3.77" version = "0.3.77"
@@ -481,6 +487,7 @@ dependencies = [
"glob", "glob",
"predicates", "predicates",
"rusqlite", "rusqlite",
"serde_json",
"shellexpand", "shellexpand",
"shlex", "shlex",
"tempfile", "tempfile",
@@ -712,6 +719,12 @@ version = "1.0.20"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eded382c5f5f786b989652c49544c4877d9f015cc22e145a5ea8ea66c2921cd2" checksum = "eded382c5f5f786b989652c49544c4877d9f015cc22e145a5ea8ea66c2921cd2"
[[package]]
name = "ryu"
version = "1.0.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
[[package]] [[package]]
name = "same-file" name = "same-file"
version = "1.0.6" version = "1.0.6"
@@ -741,6 +754,18 @@ dependencies = [
"syn", "syn",
] ]
[[package]]
name = "serde_json"
version = "1.0.140"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373"
dependencies = [
"itoa",
"memchr",
"ryu",
"serde",
]
[[package]] [[package]]
name = "sharded-slab" name = "sharded-slab"
version = "0.1.7" version = "0.1.7"

View File

@@ -16,9 +16,16 @@ shlex = "1.3"
chrono = "0.4" chrono = "0.4"
shellexpand = "3.1" shellexpand = "3.1"
clap_complete = "4.1" clap_complete = "4.1"
serde_json = { version = "1", optional = true } # <-- NEW
[dev-dependencies] [dev-dependencies]
assert_cmd = "2" assert_cmd = "2"
predicates = "3" predicates = "3"
tempfile = "3" tempfile = "3"
dirs = "5" # cross-platform data dir helper dirs = "5" # cross-platform data dir helper
[features]
# The CLI prints JSON only when this feature is enabled.
# Having the feature listed silences the `unexpected cfg` lint even
# when you dont turn it on.
json = ["serde_json"]

View File

@@ -1,26 +1,108 @@
// src/cli/coll.rs //! `marlin coll …` named collections of files (simple “playlists”).
use clap::{Subcommand, Args};
use clap::{Args, Subcommand};
use rusqlite::Connection; use rusqlite::Connection;
use crate::cli::Format;
use crate::{
cli::Format,
db,
};
#[derive(Subcommand, Debug)] #[derive(Subcommand, Debug)]
pub enum CollCmd { pub enum CollCmd {
/// Create an empty collection
Create(CreateArgs), Create(CreateArgs),
/// Add files (glob) to a collection
Add(AddArgs), Add(AddArgs),
/// List files inside a collection
List(ListArgs), List(ListArgs),
} }
#[derive(Args, Debug)] #[derive(Args, Debug)]
pub struct CreateArgs { pub name: String } pub struct CreateArgs {
#[derive(Args, Debug)] pub name: String,
pub struct AddArgs { pub name: String, pub file_pattern: String } }
#[derive(Args, Debug)]
pub struct ListArgs { pub name: String }
pub fn run(cmd: &CollCmd, _conn: &mut Connection, _format: Format) -> anyhow::Result<()> { #[derive(Args, Debug)]
pub struct AddArgs {
pub name: String,
pub file_pattern: String,
}
#[derive(Args, Debug)]
pub struct ListArgs {
pub name: String,
}
/// Look-up an existing collection **without** implicitly creating it.
///
/// Returns the collection ID or an error if it doesnt exist.
fn lookup_collection_id(conn: &Connection, name: &str) -> anyhow::Result<i64> {
conn.query_row(
"SELECT id FROM collections WHERE name = ?1",
[name],
|r| r.get(0),
)
.map_err(|_| anyhow::anyhow!("collection not found: {}", name))
}
pub fn run(cmd: &CollCmd, conn: &mut Connection, fmt: Format) -> anyhow::Result<()> {
match cmd { match cmd {
CollCmd::Create(a) => todo!("coll create {:?}", a), /* ── coll create ──────────────────────────────────────────── */
CollCmd::Add(a) => todo!("coll add {:?}", a), CollCmd::Create(a) => {
CollCmd::List(a) => todo!("coll list {:?}", a), db::ensure_collection(conn, &a.name)?;
if matches!(fmt, Format::Text) {
println!("Created collection '{}'", a.name);
} }
} }
/* ── coll add ─────────────────────────────────────────────── */
CollCmd::Add(a) => {
// Fail if the target collection does not yet exist
let coll_id = lookup_collection_id(conn, &a.name)?;
let like = a.file_pattern.replace('*', "%");
let mut stmt = conn.prepare("SELECT id FROM files WHERE path LIKE ?1")?;
let ids: Vec<i64> = stmt
.query_map([&like], |r| r.get::<_, i64>(0))?
.collect::<Result<_, _>>()?;
for fid in &ids {
db::add_file_to_collection(conn, coll_id, *fid)?;
}
match fmt {
Format::Text => println!("Added {} file(s) → '{}'", ids.len(), a.name),
Format::Json => {
#[cfg(feature = "json")]
{
println!(
"{{\"collection\":\"{}\",\"added\":{}}}",
a.name,
ids.len()
);
}
}
}
}
/* ── coll list ────────────────────────────────────────────── */
CollCmd::List(a) => {
let files = db::list_collection(conn, &a.name)?;
match fmt {
Format::Text => {
for f in files {
println!("{f}");
}
}
Format::Json => {
#[cfg(feature = "json")]
{
println!("{}", serde_json::to_string(&files)?);
}
}
}
}
}
Ok(())
}

View File

@@ -1,24 +1,168 @@
// src/cli/view.rs //! `marlin view …` save & use “smart folders” (named queries).
use clap::{Subcommand, Args};
use std::fs;
use anyhow::Result;
use clap::{Args, Subcommand};
use rusqlite::Connection; use rusqlite::Connection;
use crate::cli::Format;
use crate::{cli::Format, db};
#[derive(Subcommand, Debug)] #[derive(Subcommand, Debug)]
pub enum ViewCmd { pub enum ViewCmd {
/// Save (or update) a view
Save(ArgsSave), Save(ArgsSave),
/// List all saved views
List, List,
/// Execute a view (print matching paths)
Exec(ArgsExec), Exec(ArgsExec),
} }
#[derive(Args, Debug)] #[derive(Args, Debug)]
pub struct ArgsSave { pub view_name: String, pub query: String } pub struct ArgsSave {
#[derive(Args, Debug)] pub view_name: String,
pub struct ArgsExec { pub view_name: String } pub query: String,
}
pub fn run(cmd: &ViewCmd, _conn: &mut Connection, _format: Format) -> anyhow::Result<()> { #[derive(Args, Debug)]
pub struct ArgsExec {
pub view_name: String,
}
pub fn run(cmd: &ViewCmd, conn: &mut Connection, fmt: Format) -> anyhow::Result<()> {
match cmd { match cmd {
ViewCmd::Save(a) => todo!("view save {:?}", a), /* ── view save ───────────────────────────────────────────── */
ViewCmd::List => todo!("view list"), ViewCmd::Save(a) => {
ViewCmd::Exec(a)=> todo!("view exec {:?}", a), db::save_view(conn, &a.view_name, &a.query)?;
if matches!(fmt, Format::Text) {
println!("Saved view '{}' = {}", a.view_name, a.query);
}
}
/* ── view list ───────────────────────────────────────────── */
ViewCmd::List => {
let views = db::list_views(conn)?;
match fmt {
Format::Text => {
for (name, q) in views {
println!("{name}: {q}");
}
}
Format::Json => {
#[cfg(feature = "json")]
{
println!("{}", serde_json::to_string(&views)?);
}
}
}
}
/* ── view exec ───────────────────────────────────────────── */
ViewCmd::Exec(a) => {
let raw = db::view_query(conn, &a.view_name)?;
// Re-use the tiny parser from marlin search
let fts_expr = build_fts_match(&raw);
let mut stmt = conn.prepare(
r#"
SELECT f.path
FROM files_fts
JOIN files f ON f.rowid = files_fts.rowid
WHERE files_fts MATCH ?1
ORDER BY rank
"#,
)?;
let mut paths: Vec<String> = stmt
.query_map([fts_expr], |r| r.get::<_, String>(0))?
.collect::<Result<_, _>>()?;
/* ── NEW: graceful fallback when FTS finds nothing ───── */
if paths.is_empty() && !raw.contains(':') {
paths = naive_search(conn, &raw)?;
}
if paths.is_empty() && matches!(fmt, Format::Text) {
eprintln!("(view '{}' has no matches)", a.view_name);
} else {
for p in paths {
println!("{p}");
}
}
}
}
Ok(())
}
/* ─── naive substring path/content search (≤ 64 kB files) ───────── */
fn naive_search(conn: &Connection, term: &str) -> Result<Vec<String>> {
let term_lc = term.to_lowercase();
let mut stmt = conn.prepare("SELECT path FROM files")?;
let rows = stmt.query_map([], |r| r.get::<_, String>(0))?;
let mut hits = Vec::new();
for p in rows {
let p = p?;
/* path match */
if p.to_lowercase().contains(&term_lc) {
hits.push(p);
continue;
}
/* small-file content match */
if let Ok(meta) = fs::metadata(&p) {
if meta.len() > 64_000 {
continue;
}
}
if let Ok(content) = fs::read_to_string(&p) {
if content.to_lowercase().contains(&term_lc) {
hits.push(p);
}
}
}
Ok(hits)
}
/* ─── minimal copy of search-string → FTS5 translator ───────────── */
fn build_fts_match(raw_query: &str) -> String {
use shlex;
let mut parts = Vec::new();
let toks = shlex::split(raw_query).unwrap_or_else(|| vec![raw_query.to_string()]);
for tok in toks {
if ["AND", "OR", "NOT"].contains(&tok.as_str()) {
parts.push(tok);
} else if let Some(tag) = tok.strip_prefix("tag:") {
for (i, seg) in tag.split('/').filter(|s| !s.is_empty()).enumerate() {
if i > 0 {
parts.push("AND".into());
}
parts.push(format!("tags_text:{}", escape(seg)));
}
} else if let Some(attr) = tok.strip_prefix("attr:") {
let mut kv = attr.splitn(2, '=');
let key = kv.next().unwrap();
if let Some(val) = kv.next() {
parts.push(format!("attrs_text:{}", escape(key)));
parts.push("AND".into());
parts.push(format!("attrs_text:{}", escape(val)));
} else {
parts.push(format!("attrs_text:{}", escape(key)));
}
} else {
parts.push(escape(&tok));
}
}
parts.join(" ")
}
fn escape(term: &str) -> String {
if term.contains(|c: char| c.is_whitespace() || "-:()\"".contains(c))
|| ["AND", "OR", "NOT", "NEAR"].contains(&term.to_uppercase().as_str())
{
format!("\"{}\"", term.replace('"', "\"\""))
} else {
term.to_string()
} }
} }

View File

@@ -1,7 +1,10 @@
use std::path::{Path, PathBuf};
use anyhow::Result; use anyhow::Result;
use directories::ProjectDirs; use directories::ProjectDirs;
use std::{
collections::hash_map::DefaultHasher,
hash::{Hash, Hasher},
path::{Path, PathBuf},
};
/// Runtime configuration (currently just the DB path). /// Runtime configuration (currently just the DB path).
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@@ -10,22 +13,39 @@ pub struct Config {
} }
impl Config { impl Config {
/// Resolve configuration from environment or XDG directories. /// Resolve configuration from environment or derive one per-workspace.
///
/// Priority:
/// 1. `MARLIN_DB_PATH` env-var (explicit override)
/// 2. *Workspace-local* file under XDG data dir
/// (`~/.local/share/marlin/index_<hash>.db`)
/// 3. Fallback to `./index.db` when we cannot locate an XDG dir
pub fn load() -> Result<Self> { pub fn load() -> Result<Self> {
let db_path = std::env::var_os("MARLIN_DB_PATH") // 1) explicit override
.map(PathBuf::from) if let Some(val) = std::env::var_os("MARLIN_DB_PATH") {
.or_else(|| { let p = PathBuf::from(val);
ProjectDirs::from("io", "Marlin", "marlin") std::fs::create_dir_all(p.parent().expect("has parent"))?;
.map(|dirs| dirs.data_dir().join("index.db")) return Ok(Self { db_path: p });
}
// 2) derive per-workspace DB name from CWD hash
let cwd = std::env::current_dir()?;
let mut h = DefaultHasher::new();
cwd.hash(&mut h);
let digest = h.finish(); // 64-bit
let file_name = format!("index_{digest:016x}.db");
if let Some(dirs) = ProjectDirs::from("io", "Marlin", "marlin") {
let dir = dirs.data_dir();
std::fs::create_dir_all(dir)?;
return Ok(Self {
db_path: dir.join(file_name),
});
}
// 3) very last resort workspace-relative DB
Ok(Self {
db_path: Path::new(&file_name).to_path_buf(),
}) })
.unwrap_or_else(|| Path::new("index.db").to_path_buf());
std::fs::create_dir_all(
db_path
.parent()
.expect("db_path should always have a parent directory"),
)?;
Ok(Self { db_path })
} }
} }

View File

@@ -1,260 +0,0 @@
use std::{
fs,
path::{Path, PathBuf},
};
use anyhow::{Context, Result};
use chrono::Local;
use rusqlite::{
backup::{Backup, StepResult},
params,
Connection,
OpenFlags,
OptionalExtension,
};
use tracing::{debug, info};
/// Embed every numbered migration file here.
const MIGRATIONS: &[(&str, &str)] = &[
("0001_initial_schema.sql", include_str!("migrations/0001_initial_schema.sql")),
("0002_update_fts_and_triggers.sql", include_str!("migrations/0002_update_fts_and_triggers.sql")),
("0003_create_links_collections_views.sql", include_str!("migrations/0003_create_links_collections_views.sql")),
("0004_fix_hierarchical_tags_fts.sql", include_str!("migrations/0004_fix_hierarchical_tags_fts.sql")),
];
/* ─── connection bootstrap ──────────────────────────────────────────── */
pub fn open<P: AsRef<Path>>(db_path: P) -> Result<Connection> {
let db_path_ref = db_path.as_ref();
let mut conn = Connection::open(db_path_ref)
.with_context(|| format!("failed to open DB at {}", db_path_ref.display()))?;
conn.pragma_update(None, "journal_mode", "WAL")?;
conn.pragma_update(None, "foreign_keys", "ON")?;
// Apply migrations (drops & recreates all FTS triggers)
apply_migrations(&mut conn)?;
Ok(conn)
}
/* ─── migration runner ──────────────────────────────────────────────── */
fn apply_migrations(conn: &mut Connection) -> Result<()> {
// Ensure schema_version table
conn.execute_batch(
"CREATE TABLE IF NOT EXISTS schema_version (
version INTEGER PRIMARY KEY,
applied_on TEXT NOT NULL
);",
)?;
// Legacy patch (ignore if exists)
let _ = conn.execute("ALTER TABLE schema_version ADD COLUMN applied_on TEXT", []);
let tx = conn.transaction()?;
for (fname, sql) in MIGRATIONS {
let version: i64 = fname
.split('_')
.next()
.and_then(|s| s.parse().ok())
.expect("migration filenames start with number");
let already: Option<i64> = tx
.query_row(
"SELECT version FROM schema_version WHERE version = ?1",
[version],
|r| r.get(0),
)
.optional()?;
if already.is_some() {
debug!("migration {} already applied", fname);
continue;
}
info!("applying migration {}", fname);
println!(
"\nSQL SCRIPT FOR MIGRATION: {}\nBEGIN SQL >>>\n{}\n<<< END SQL\n",
fname, sql
);
tx.execute_batch(sql)
.with_context(|| format!("could not apply migration {}", fname))?;
tx.execute(
"INSERT INTO schema_version (version, applied_on) VALUES (?1, ?2)",
params![version, Local::now().to_rfc3339()],
)?;
}
tx.commit()?;
Ok(())
}
/* ─── helpers ───────────────────────────────────────────────────────── */
pub fn ensure_tag_path(conn: &Connection, path: &str) -> Result<i64> {
let mut parent: Option<i64> = None;
for segment in path.split('/').filter(|s| !s.is_empty()) {
conn.execute(
"INSERT OR IGNORE INTO tags(name, parent_id) VALUES (?1, ?2)",
params![segment, parent],
)?;
let id: i64 = conn.query_row(
"SELECT id FROM tags WHERE name = ?1 AND (parent_id IS ?2 OR parent_id = ?2)",
params![segment, parent],
|row| row.get(0),
)?;
parent = Some(id);
}
parent.ok_or_else(|| anyhow::anyhow!("empty tag path"))
}
pub fn file_id(conn: &Connection, path: &str) -> Result<i64> {
conn.query_row("SELECT id FROM files WHERE path = ?1", [path], |r| r.get(0))
.map_err(|_| anyhow::anyhow!("file not indexed: {}", path))
}
pub fn upsert_attr(conn: &Connection, file_id: i64, key: &str, value: &str) -> Result<()> {
conn.execute(
r#"
INSERT INTO attributes(file_id, key, value)
VALUES (?1, ?2, ?3)
ON CONFLICT(file_id, key) DO UPDATE SET value = excluded.value
"#,
params![file_id, key, value],
)?;
Ok(())
}
/// Add a typed link from one file to another.
pub fn add_link(conn: &Connection, src_file_id: i64, dst_file_id: i64, link_type: Option<&str>) -> Result<()> {
conn.execute(
"INSERT INTO links(src_file_id, dst_file_id, type)
VALUES (?1, ?2, ?3)
ON CONFLICT(src_file_id, dst_file_id, type) DO NOTHING",
params![src_file_id, dst_file_id, link_type],
)?;
Ok(())
}
/// Remove a typed link between two files.
pub fn remove_link(conn: &Connection, src_file_id: i64, dst_file_id: i64, link_type: Option<&str>) -> Result<()> {
conn.execute(
"DELETE FROM links
WHERE src_file_id = ?1
AND dst_file_id = ?2
AND (type IS ?3 OR type = ?3)",
params![src_file_id, dst_file_id, link_type],
)?;
Ok(())
}
/// List all links for files matching a glob-style pattern.
/// `direction` may be `"in"` (incoming), `"out"` (outgoing), or `None` (outgoing).
pub fn list_links(
conn: &Connection,
pattern: &str,
direction: Option<&str>,
link_type: Option<&str>,
) -> Result<Vec<(String, String, Option<String>)>> {
// Convert glob '*' → SQL LIKE '%'
let like_pattern = pattern.replace('*', "%");
// Find matching files
let mut stmt = conn.prepare("SELECT id, path FROM files WHERE path LIKE ?1")?;
let mut rows = stmt.query(params![like_pattern])?;
let mut files = Vec::new();
while let Some(row) = rows.next()? {
let id: i64 = row.get(0)?;
let path: String = row.get(1)?;
files.push((id, path));
}
let mut results = Vec::new();
for (file_id, file_path) in files {
let (src_col, dst_col) = match direction {
Some("in") => ("dst_file_id", "src_file_id"),
_ => ("src_file_id", "dst_file_id"),
};
let sql = format!(
"SELECT f2.path, l.type
FROM links l
JOIN files f2 ON f2.id = l.{dst}
WHERE l.{src} = ?1
AND (?2 IS NULL OR l.type = ?2)",
src = src_col,
dst = dst_col,
);
let mut stmt2 = conn.prepare(&sql)?;
let mut rows2 = stmt2.query(params![file_id, link_type])?;
while let Some(r2) = rows2.next()? {
let other: String = r2.get(0)?;
let typ: Option<String> = r2.get(1)?;
results.push((file_path.clone(), other, typ));
}
}
Ok(results)
}
/// Find all incoming links (backlinks) to files matching a pattern.
pub fn find_backlinks(conn: &Connection, pattern: &str) -> Result<Vec<(String, Option<String>)>> {
let like_pattern = pattern.replace('*', "%");
let mut stmt = conn.prepare(
"SELECT f1.path, l.type
FROM links l
JOIN files f1 ON f1.id = l.src_file_id
JOIN files f2 ON f2.id = l.dst_file_id
WHERE f2.path LIKE ?1",
)?;
let mut rows = stmt.query(params![like_pattern])?;
let mut result = Vec::new();
while let Some(row) = rows.next()? {
let src_path: String = row.get(0)?;
let typ: Option<String> = row.get(1)?;
result.push((src_path, typ));
}
Ok(result)
}
/* ─── backup / restore ──────────────────────────────────────────────── */
pub fn backup<P: AsRef<Path>>(db_path: P) -> Result<PathBuf> {
let src = db_path.as_ref();
let dir = src
.parent()
.ok_or_else(|| anyhow::anyhow!("invalid DB path: {}", src.display()))?
.join("backups");
fs::create_dir_all(&dir)?;
let stamp = Local::now().format("%Y-%m-%d_%H-%M-%S");
let dst = dir.join(format!("backup_{stamp}.db"));
let src_conn = Connection::open_with_flags(src, OpenFlags::SQLITE_OPEN_READ_ONLY)?;
let mut dst_conn = Connection::open(&dst)?;
let bk = Backup::new(&src_conn, &mut dst_conn)?;
while let StepResult::More = bk.step(100)? {}
Ok(dst)
}
pub fn restore<P: AsRef<Path>>(backup_path: P, live_db_path: P) -> Result<()> {
fs::copy(&backup_path, &live_db_path)?;
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn migrations_apply_in_memory() {
// Opening an in-memory database should apply every migration without error.
let _conn = open(":memory:").expect("in-memory migrations should run cleanly");
}
}

View File

@@ -1,8 +1,12 @@
//! Central DB helper connection bootstrap, migrations **and** most
//! data-access helpers (tags, links, collections, saved views, …).
use std::{ use std::{
fs, fs,
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };
use std::result::Result as StdResult;
use anyhow::{Context, Result}; use anyhow::{Context, Result};
use chrono::Local; use chrono::Local;
use rusqlite::{ use rusqlite::{
@@ -11,10 +15,12 @@ use rusqlite::{
Connection, Connection,
OpenFlags, OpenFlags,
OptionalExtension, OptionalExtension,
TransactionBehavior,
}; };
use tracing::{debug, info, warn}; use tracing::{debug, info, warn};
/// Embed every numbered migration file here. /* ─── embedded migrations ─────────────────────────────────────────── */
const MIGRATIONS: &[(&str, &str)] = &[ const MIGRATIONS: &[(&str, &str)] = &[
("0001_initial_schema.sql", include_str!("migrations/0001_initial_schema.sql")), ("0001_initial_schema.sql", include_str!("migrations/0001_initial_schema.sql")),
("0002_update_fts_and_triggers.sql", include_str!("migrations/0002_update_fts_and_triggers.sql")), ("0002_update_fts_and_triggers.sql", include_str!("migrations/0002_update_fts_and_triggers.sql")),
@@ -22,11 +28,7 @@ const MIGRATIONS: &[(&str, &str)] = &[
("0004_fix_hierarchical_tags_fts.sql", include_str!("migrations/0004_fix_hierarchical_tags_fts.sql")), ("0004_fix_hierarchical_tags_fts.sql", include_str!("migrations/0004_fix_hierarchical_tags_fts.sql")),
]; ];
/// Migrations that should *always* be re-run. /* ─── connection bootstrap ────────────────────────────────────────── */
/// We no longer need to force any, so leave it empty.
const FORCE_APPLY_MIGRATIONS: &[i64] = &[]; // <- was &[4]
/* ─── connection bootstrap ──────────────────────────────────────────── */
pub fn open<P: AsRef<Path>>(db_path: P) -> Result<Connection> { pub fn open<P: AsRef<Path>>(db_path: P) -> Result<Connection> {
let db_path_ref = db_path.as_ref(); let db_path_ref = db_path.as_ref();
@@ -36,16 +38,18 @@ pub fn open<P: AsRef<Path>>(db_path: P) -> Result<Connection> {
conn.pragma_update(None, "journal_mode", "WAL")?; conn.pragma_update(None, "journal_mode", "WAL")?;
conn.pragma_update(None, "foreign_keys", "ON")?; conn.pragma_update(None, "foreign_keys", "ON")?;
// Apply migrations (drops & recreates all FTS triggers) // Wait up to 30 s for a competing writer before giving up
apply_migrations(&mut conn)?; conn.busy_timeout(std::time::Duration::from_secs(30))?; // ← tweaked
apply_migrations(&mut conn)?;
Ok(conn) Ok(conn)
} }
/* ─── migration runner ──────────────────────────────────────────────── */
/* ─── migration runner ────────────────────────────────────────────── */
fn apply_migrations(conn: &mut Connection) -> Result<()> { fn apply_migrations(conn: &mut Connection) -> Result<()> {
// Ensure schema_version table // Ensure schema_version bookkeeping table exists
conn.execute_batch( conn.execute_batch(
"CREATE TABLE IF NOT EXISTS schema_version ( "CREATE TABLE IF NOT EXISTS schema_version (
version INTEGER PRIMARY KEY, version INTEGER PRIMARY KEY,
@@ -53,18 +57,11 @@ fn apply_migrations(conn: &mut Connection) -> Result<()> {
);", );",
)?; )?;
// Legacy patch (ignore if exists) // Legacy patch ignore errors if column already exists
let _ = conn.execute("ALTER TABLE schema_version ADD COLUMN applied_on TEXT", []); let _ = conn.execute("ALTER TABLE schema_version ADD COLUMN applied_on TEXT", []);
// Force-remove migrations that should always be applied // Grab the write-lock up-front so migrations can run uninterrupted
for &version in FORCE_APPLY_MIGRATIONS { let tx = conn.transaction_with_behavior(TransactionBehavior::Immediate)?;
let rows_affected = conn.execute("DELETE FROM schema_version WHERE version = ?1", [version])?;
if rows_affected > 0 {
info!("Forcing reapplication of migration {}", version);
}
}
let tx = conn.transaction()?;
for (fname, sql) in MIGRATIONS { for (fname, sql) in MIGRATIONS {
let version: i64 = fname let version: i64 = fname
@@ -87,13 +84,8 @@ fn apply_migrations(conn: &mut Connection) -> Result<()> {
} }
info!("applying migration {}", fname); info!("applying migration {}", fname);
println!(
"\nSQL SCRIPT FOR MIGRATION: {}\nBEGIN SQL >>>\n{}\n<<< END SQL\n",
fname, sql
);
tx.execute_batch(sql) tx.execute_batch(sql)
.with_context(|| format!("could not apply migration {}", fname))?; .with_context(|| format!("could not apply migration {fname}"))?;
tx.execute( tx.execute(
"INSERT INTO schema_version (version, applied_on) VALUES (?1, ?2)", "INSERT INTO schema_version (version, applied_on) VALUES (?1, ?2)",
@@ -103,40 +95,30 @@ fn apply_migrations(conn: &mut Connection) -> Result<()> {
tx.commit()?; tx.commit()?;
// Verify that all migrations have been applied // sanity warn if any embedded migration got skipped
let mut missing_migrations = Vec::new(); let mut missing = Vec::new();
for (fname, _) in MIGRATIONS { for (fname, _) in MIGRATIONS {
let version: i64 = fname let v: i64 = fname.split('_').next().unwrap().parse().unwrap();
.split('_') let ok: bool = conn
.next()
.and_then(|s| s.parse().ok())
.expect("migration filenames start with number");
let exists: bool = conn
.query_row( .query_row(
"SELECT 1 FROM schema_version WHERE version = ?1", "SELECT 1 FROM schema_version WHERE version = ?1",
[version], [v],
|_| Ok(true), |_| Ok(true),
) )
.optional()? .optional()?
.unwrap_or(false); .unwrap_or(false);
if !ok {
if !exists { missing.push(v);
missing_migrations.push(version);
} }
} }
if !missing.is_empty() {
if !missing_migrations.is_empty() { warn!("migrations not applied: {:?}", missing);
warn!(
"The following migrations were not applied: {:?}. This may indicate a problem with the migration system.",
missing_migrations
);
} }
Ok(()) Ok(())
} }
/* ─── helpers ───────────────────────────────────────────────────────── */ /* ─── tag helpers ─────────────────────────────────────────────────── */
pub fn ensure_tag_path(conn: &Connection, path: &str) -> Result<i64> { pub fn ensure_tag_path(conn: &Connection, path: &str) -> Result<i64> {
let mut parent: Option<i64> = None; let mut parent: Option<i64> = None;
@@ -148,7 +130,7 @@ pub fn ensure_tag_path(conn: &Connection, path: &str) -> Result<i64> {
let id: i64 = conn.query_row( let id: i64 = conn.query_row(
"SELECT id FROM tags WHERE name = ?1 AND (parent_id IS ?2 OR parent_id = ?2)", "SELECT id FROM tags WHERE name = ?1 AND (parent_id IS ?2 OR parent_id = ?2)",
params![segment, parent], params![segment, parent],
|row| row.get(0), |r| r.get(0),
)?; )?;
parent = Some(id); parent = Some(id);
} }
@@ -160,6 +142,8 @@ pub fn file_id(conn: &Connection, path: &str) -> Result<i64> {
.map_err(|_| anyhow::anyhow!("file not indexed: {}", path)) .map_err(|_| anyhow::anyhow!("file not indexed: {}", path))
} }
/* ─── attributes ──────────────────────────────────────────────────── */
pub fn upsert_attr(conn: &Connection, file_id: i64, key: &str, value: &str) -> Result<()> { pub fn upsert_attr(conn: &Connection, file_id: i64, key: &str, value: &str) -> Result<()> {
conn.execute( conn.execute(
r#" r#"
@@ -172,7 +156,8 @@ pub fn upsert_attr(conn: &Connection, file_id: i64, key: &str, value: &str) -> R
Ok(()) Ok(())
} }
/// Add a typed link from one file to another. /* ─── links ───────────────────────────────────────────────────────── */
pub fn add_link(conn: &Connection, src_file_id: i64, dst_file_id: i64, link_type: Option<&str>) -> Result<()> { pub fn add_link(conn: &Connection, src_file_id: i64, dst_file_id: i64, link_type: Option<&str>) -> Result<()> {
conn.execute( conn.execute(
"INSERT INTO links(src_file_id, dst_file_id, type) "INSERT INTO links(src_file_id, dst_file_id, type)
@@ -183,7 +168,6 @@ pub fn add_link(conn: &Connection, src_file_id: i64, dst_file_id: i64, link_type
Ok(()) Ok(())
} }
/// Remove a typed link between two files.
pub fn remove_link(conn: &Connection, src_file_id: i64, dst_file_id: i64, link_type: Option<&str>) -> Result<()> { pub fn remove_link(conn: &Connection, src_file_id: i64, dst_file_id: i64, link_type: Option<&str>) -> Result<()> {
conn.execute( conn.execute(
"DELETE FROM links "DELETE FROM links
@@ -195,29 +179,22 @@ pub fn remove_link(conn: &Connection, src_file_id: i64, dst_file_id: i64, link_t
Ok(()) Ok(())
} }
/// List all links for files matching a glob-style pattern.
/// `direction` may be `"in"` (incoming), `"out"` (outgoing), or `None` (outgoing).
pub fn list_links( pub fn list_links(
conn: &Connection, conn: &Connection,
pattern: &str, pattern: &str,
direction: Option<&str>, direction: Option<&str>,
link_type: Option<&str>, link_type: Option<&str>,
) -> Result<Vec<(String, String, Option<String>)>> { ) -> Result<Vec<(String, String, Option<String>)>> {
// Convert glob '*' → SQL LIKE '%'
let like_pattern = pattern.replace('*', "%"); let like_pattern = pattern.replace('*', "%");
// Find matching files // Files matching pattern
let mut stmt = conn.prepare("SELECT id, path FROM files WHERE path LIKE ?1")?; let mut stmt = conn.prepare("SELECT id, path FROM files WHERE path LIKE ?1")?;
let mut rows = stmt.query(params![like_pattern])?; let rows = stmt
let mut files = Vec::new(); .query_map(params![like_pattern], |r| Ok((r.get::<_, i64>(0)?, r.get::<_, String>(1)?)))?
while let Some(row) = rows.next()? { .collect::<Result<Vec<_>, _>>()?;
let id: i64 = row.get(0)?;
let path: String = row.get(1)?;
files.push((id, path));
}
let mut results = Vec::new(); let mut out = Vec::new();
for (file_id, file_path) in files { for (fid, fpath) in rows {
let (src_col, dst_col) = match direction { let (src_col, dst_col) = match direction {
Some("in") => ("dst_file_id", "src_file_id"), Some("in") => ("dst_file_id", "src_file_id"),
_ => ("src_file_id", "dst_file_id"), _ => ("src_file_id", "dst_file_id"),
@@ -226,28 +203,29 @@ pub fn list_links(
let sql = format!( let sql = format!(
"SELECT f2.path, l.type "SELECT f2.path, l.type
FROM links l FROM links l
JOIN files f2 ON f2.id = l.{dst} JOIN files f2 ON f2.id = l.{dst_col}
WHERE l.{src} = ?1 WHERE l.{src_col} = ?1
AND (?2 IS NULL OR l.type = ?2)", AND (?2 IS NULL OR l.type = ?2)",
src = src_col,
dst = dst_col,
); );
let mut stmt2 = conn.prepare(&sql)?; let mut stmt2 = conn.prepare(&sql)?;
let mut rows2 = stmt2.query(params![file_id, link_type])?; let links = stmt2
while let Some(r2) = rows2.next()? { .query_map(params![fid, link_type], |r| Ok((r.get::<_, String>(0)?, r.get::<_, Option<String>>(1)?)))?
let other: String = r2.get(0)?; .collect::<Result<Vec<_>, _>>()?;
let typ: Option<String> = r2.get(1)?;
results.push((file_path.clone(), other, typ)); for (other, typ) in links {
out.push((fpath.clone(), other, typ));
} }
} }
Ok(out)
}
Ok(results) pub fn find_backlinks(
} conn: &Connection,
pattern: &str,
) -> Result<Vec<(String, Option<String>)>> {
let like = pattern.replace('*', "%");
/// Find all incoming links (backlinks) to files matching a pattern.
pub fn find_backlinks(conn: &Connection, pattern: &str) -> Result<Vec<(String, Option<String>)>> {
let like_pattern = pattern.replace('*', "%");
let mut stmt = conn.prepare( let mut stmt = conn.prepare(
"SELECT f1.path, l.type "SELECT f1.path, l.type
FROM links l FROM links l
@@ -255,17 +233,84 @@ pub fn find_backlinks(conn: &Connection, pattern: &str) -> Result<Vec<(String, O
JOIN files f2 ON f2.id = l.dst_file_id JOIN files f2 ON f2.id = l.dst_file_id
WHERE f2.path LIKE ?1", WHERE f2.path LIKE ?1",
)?; )?;
let mut rows = stmt.query(params![like_pattern])?;
let mut result = Vec::new(); let rows = stmt.query_map([like], |r| {
while let Some(row) = rows.next()? { Ok((r.get::<_, String>(0)?, r.get::<_, Option<String>>(1)?))
let src_path: String = row.get(0)?; })?;
let typ: Option<String> = row.get(1)?;
result.push((src_path, typ)); let out = rows.collect::<StdResult<Vec<_>, _>>()?; // rusqlite → anyhow via `?`
} Ok(out)
Ok(result)
} }
/* ─── backup / restore ──────────────────────────────────────────────── */ /* ─── NEW: collections helpers ────────────────────────────────────── */
pub fn ensure_collection(conn: &Connection, name: &str) -> Result<i64> {
conn.execute(
"INSERT OR IGNORE INTO collections(name) VALUES (?1)",
params![name],
)?;
conn.query_row(
"SELECT id FROM collections WHERE name = ?1",
params![name],
|r| r.get(0),
)
.context("collection lookup failed")
}
pub fn add_file_to_collection(conn: &Connection, coll_id: i64, file_id: i64) -> Result<()> {
conn.execute(
"INSERT OR IGNORE INTO collection_files(collection_id, file_id)
VALUES (?1, ?2)",
params![coll_id, file_id],
)?;
Ok(())
}
pub fn list_collection(conn: &Connection, name: &str) -> Result<Vec<String>> {
let mut stmt = conn.prepare(
r#"SELECT f.path
FROM collections c
JOIN collection_files cf ON cf.collection_id = c.id
JOIN files f ON f.id = cf.file_id
WHERE c.name = ?1
ORDER BY f.path"#,
)?;
let rows = stmt.query_map([name], |r| r.get::<_, String>(0))?;
let list = rows.collect::<StdResult<Vec<_>, _>>()?;
Ok(list)
}
/* ─── NEW: saved views (smart folders) ────────────────────────────── */
pub fn save_view(conn: &Connection, name: &str, query: &str) -> Result<()> {
conn.execute(
"INSERT INTO views(name, query)
VALUES (?1, ?2)
ON CONFLICT(name) DO UPDATE SET query = excluded.query",
params![name, query],
)?;
Ok(())
}
pub fn list_views(conn: &Connection) -> Result<Vec<(String, String)>> {
let mut stmt = conn.prepare("SELECT name, query FROM views ORDER BY name")?;
let rows = stmt.query_map([], |r| Ok((r.get::<_, String>(0)?, r.get::<_, String>(1)?)))?;
let list = rows.collect::<StdResult<Vec<_>, _>>()?;
Ok(list)
}
pub fn view_query(conn: &Connection, name: &str) -> Result<String> {
conn.query_row(
"SELECT query FROM views WHERE name = ?1",
[name],
|r| r.get::<_, String>(0),
)
.context(format!("no view called '{name}'"))
}
/* ─── backup / restore helpers ────────────────────────────────────── */
pub fn backup<P: AsRef<Path>>(db_path: P) -> Result<PathBuf> { pub fn backup<P: AsRef<Path>>(db_path: P) -> Result<PathBuf> {
let src = db_path.as_ref(); let src = db_path.as_ref();
@@ -291,13 +336,14 @@ pub fn restore<P: AsRef<Path>>(backup_path: P, live_db_path: P) -> Result<()> {
Ok(()) Ok(())
} }
/* ─── tests ───────────────────────────────────────────────────────── */
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
#[test] #[test]
fn migrations_apply_in_memory() { fn migrations_apply_in_memory() {
// Opening an in-memory database should apply every migration without error. open(":memory:").expect("all migrations apply");
let _conn = open(":memory:").expect("in-memory migrations should run cleanly");
} }
} }

View File

@@ -5,9 +5,13 @@ use tracing_subscriber::{fmt, EnvFilter};
/// Reads `RUST_LOG` for filtering, falls back to `info`. /// Reads `RUST_LOG` for filtering, falls back to `info`.
pub fn init() { pub fn init() {
let filter = EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new("info")); let filter = EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new("info"));
// All tracing output (INFO, WARN, ERROR …) now goes to *stderr* so the
// integration tests can assert on warnings / errors reliably.
fmt() fmt()
.with_target(false) .with_target(false) // hide module targets
.with_level(true) .with_level(true) // include log level
.with_env_filter(filter) .with_env_filter(filter) // respect RUST_LOG
.with_writer(std::io::stderr) // <-- NEW: send to stderr
.init(); .init();
} }

View File

@@ -8,13 +8,19 @@ mod logging;
mod scan; mod scan;
use anyhow::{Context, Result}; use anyhow::{Context, Result};
use clap::{Parser, CommandFactory}; use clap::{CommandFactory, Parser};
use clap_complete::generate; use clap_complete::generate;
use glob::Pattern; use glob::Pattern;
use rusqlite::params; use rusqlite::params;
use shellexpand; use shellexpand;
use shlex; use shlex;
use std::{env, io, path::PathBuf, process::Command}; use std::{
env,
fs,
io,
path::{Path, PathBuf},
process::Command,
};
use tracing::{debug, error, info}; use tracing::{debug, error, info};
use walkdir::WalkDir; use walkdir::WalkDir;
@@ -39,13 +45,13 @@ fn main() -> Result<()> {
/* ── config & automatic backup ───────────────────────────────── */ /* ── config & automatic backup ───────────────────────────────── */
let cfg = config::Config::load()?; // DB path etc. let cfg = config::Config::load()?; // DB path, etc.
match &args.command { match &args.command {
Commands::Init | Commands::Backup | Commands::Restore { .. } => {} Commands::Init | Commands::Backup | Commands::Restore { .. } => {}
_ => match db::backup(&cfg.db_path) { _ => match db::backup(&cfg.db_path) {
Ok(path) => info!("Pre-command auto-backup created at {}", path.display()), Ok(path) => info!("Pre-command auto-backup created at {}", path.display()),
Err(e) => error!("Failed to create pre-command auto-backup: {}", e), Err(e) => error!("Failed to create pre-command auto-backup: {e}"),
}, },
} }
@@ -66,7 +72,7 @@ fn main() -> Result<()> {
let cwd = env::current_dir().context("getting current directory")?; let cwd = env::current_dir().context("getting current directory")?;
let count = scan::scan_directory(&mut conn, &cwd) let count = scan::scan_directory(&mut conn, &cwd)
.context("initial scan failed")?; .context("initial scan failed")?;
info!("Initial scan complete indexed/updated {} files", count); info!("Initial scan complete indexed/updated {count} files");
} }
Commands::Scan { paths } => { Commands::Scan { paths } => {
@@ -81,24 +87,30 @@ fn main() -> Result<()> {
} }
Commands::Tag { pattern, tag_path } => apply_tag(&conn, &pattern, &tag_path)?, Commands::Tag { pattern, tag_path } => apply_tag(&conn, &pattern, &tag_path)?,
Commands::Attr { action } => match action { Commands::Attr { action } => match action {
cli::AttrCmd::Set { pattern, key, value } => { cli::AttrCmd::Set { pattern, key, value } => {
attr_set(&conn, &pattern, &key, &value)? attr_set(&conn, &pattern, &key, &value)?
} }
cli::AttrCmd::Ls { path } => attr_ls(&conn, &path)?, cli::AttrCmd::Ls { path } => attr_ls(&conn, &path)?,
}, },
Commands::Search { query, exec } => run_search(&conn, &query, exec)?, Commands::Search { query, exec } => run_search(&conn, &query, exec)?,
Commands::Backup => { Commands::Backup => {
let path = db::backup(&cfg.db_path)?; let path = db::backup(&cfg.db_path)?;
println!("Backup created: {}", path.display()); println!("Backup created: {}", path.display());
} }
Commands::Restore { backup_path } => { Commands::Restore { backup_path } => {
drop(conn); // close handle before overwrite drop(conn); // close handle before overwrite
db::restore(&backup_path, &cfg.db_path) db::restore(&backup_path, &cfg.db_path).with_context(|| {
.with_context(|| format!("Failed to restore DB from {}", backup_path.display()))?; format!("Failed to restore DB from {}", backup_path.display())
})?;
println!("Restored DB from {}", backup_path.display()); println!("Restored DB from {}", backup_path.display());
db::open(&cfg.db_path) db::open(&cfg.db_path).with_context(|| {
.with_context(|| format!("Could not open restored DB at {}", cfg.db_path.display()))?; format!("Could not open restored DB at {}", cfg.db_path.display())
})?;
info!("Successfully opened restored database."); info!("Successfully opened restored database.");
} }
@@ -117,7 +129,9 @@ fn main() -> Result<()> {
Ok(()) Ok(())
} }
/* ───────────────────────── helpers & sub-routines ───────────────── */ /* ───────────────────────── helpers & sub-routines ───────────────── */
/* ---------- TAGS ---------- */
/// Apply a hierarchical tag to all files matching the glob pattern. /// Apply a hierarchical tag to all files matching the glob pattern.
fn apply_tag(conn: &rusqlite::Connection, pattern: &str, tag_path: &str) -> Result<()> { fn apply_tag(conn: &rusqlite::Connection, pattern: &str, tag_path: &str) -> Result<()> {
@@ -142,7 +156,7 @@ fn apply_tag(conn: &rusqlite::Connection, pattern: &str, tag_path: &str) -> Resu
let expanded = shellexpand::tilde(pattern).into_owned(); let expanded = shellexpand::tilde(pattern).into_owned();
let pat = Pattern::new(&expanded) let pat = Pattern::new(&expanded)
.with_context(|| format!("Invalid glob pattern `{}`", expanded))?; .with_context(|| format!("Invalid glob pattern `{expanded}`"))?;
let root = determine_scan_root(&expanded); let root = determine_scan_root(&expanded);
let mut stmt_file = conn.prepare("SELECT id FROM files WHERE path = ?1")?; let mut stmt_file = conn.prepare("SELECT id FROM files WHERE path = ?1")?;
@@ -157,12 +171,9 @@ fn apply_tag(conn: &rusqlite::Connection, pattern: &str, tag_path: &str) -> Resu
.filter(|e| e.file_type().is_file()) .filter(|e| e.file_type().is_file())
{ {
let path_str = entry.path().to_string_lossy(); let path_str = entry.path().to_string_lossy();
debug!("testing path: {}", path_str);
if !pat.matches(&path_str) { if !pat.matches(&path_str) {
debug!(" → no match");
continue; continue;
} }
debug!(" → matched");
match stmt_file.query_row(params![path_str.as_ref()], |r| r.get::<_, i64>(0)) { match stmt_file.query_row(params![path_str.as_ref()], |r| r.get::<_, i64>(0)) {
Ok(file_id) => { Ok(file_id) => {
@@ -175,8 +186,6 @@ fn apply_tag(conn: &rusqlite::Connection, pattern: &str, tag_path: &str) -> Resu
if newly { if newly {
info!(file = %path_str, tag = tag_path, "tagged"); info!(file = %path_str, tag = tag_path, "tagged");
count += 1; count += 1;
} else {
debug!(file = %path_str, tag = tag_path, "already tagged");
} }
} }
Err(rusqlite::Error::QueryReturnedNoRows) => { Err(rusqlite::Error::QueryReturnedNoRows) => {
@@ -188,24 +197,20 @@ fn apply_tag(conn: &rusqlite::Connection, pattern: &str, tag_path: &str) -> Resu
} }
} }
if count > 0 { info!(
info!("Applied tag '{}' to {} file(s).", tag_path, count); "Applied tag '{}' to {} file(s).",
} else { tag_path, count
info!("No new files were tagged with '{}' (no matches or already tagged).", tag_path); );
}
Ok(()) Ok(())
} }
/* ---------- ATTRIBUTES ---------- */
/// Set a key=value attribute on all files matching the glob pattern. /// Set a key=value attribute on all files matching the glob pattern.
fn attr_set( fn attr_set(conn: &rusqlite::Connection, pattern: &str, key: &str, value: &str) -> Result<()> {
conn: &rusqlite::Connection,
pattern: &str,
key: &str,
value: &str,
) -> Result<()> {
let expanded = shellexpand::tilde(pattern).into_owned(); let expanded = shellexpand::tilde(pattern).into_owned();
let pat = Pattern::new(&expanded) let pat = Pattern::new(&expanded)
.with_context(|| format!("Invalid glob pattern `{}`", expanded))?; .with_context(|| format!("Invalid glob pattern `{expanded}`"))?;
let root = determine_scan_root(&expanded); let root = determine_scan_root(&expanded);
let mut stmt_file = conn.prepare("SELECT id FROM files WHERE path = ?1")?; let mut stmt_file = conn.prepare("SELECT id FROM files WHERE path = ?1")?;
@@ -217,17 +222,14 @@ fn attr_set(
.filter(|e| e.file_type().is_file()) .filter(|e| e.file_type().is_file())
{ {
let path_str = entry.path().to_string_lossy(); let path_str = entry.path().to_string_lossy();
debug!("testing attr path: {}", path_str);
if !pat.matches(&path_str) { if !pat.matches(&path_str) {
debug!(" → no match");
continue; continue;
} }
debug!(" → matched");
match stmt_file.query_row(params![path_str.as_ref()], |r| r.get::<_, i64>(0)) { match stmt_file.query_row(params![path_str.as_ref()], |r| r.get::<_, i64>(0)) {
Ok(file_id) => { Ok(file_id) => {
db::upsert_attr(conn, file_id, key, value)?; db::upsert_attr(conn, file_id, key, value)?;
info!(file = %path_str, key = key, value = value, "attr set"); info!(file = %path_str, key, value, "attr set");
count += 1; count += 1;
} }
Err(rusqlite::Error::QueryReturnedNoRows) => { Err(rusqlite::Error::QueryReturnedNoRows) => {
@@ -239,21 +241,20 @@ fn attr_set(
} }
} }
if count > 0 { info!(
info!("Attribute '{}: {}' set on {} file(s).", key, value, count); "Attribute '{}={}' set on {} file(s).",
} else { key, value, count
info!("No attributes set (no matches or not indexed)."); );
}
Ok(()) Ok(())
} }
/// List attributes for a given file path. /// List attributes for a given file path.
fn attr_ls(conn: &rusqlite::Connection, path: &std::path::Path) -> Result<()> { fn attr_ls(conn: &rusqlite::Connection, path: &Path) -> Result<()> {
let file_id = db::file_id(conn, &path.to_string_lossy())?; let file_id = db::file_id(conn, &path.to_string_lossy())?;
let mut stmt = conn.prepare( let mut stmt =
"SELECT key, value FROM attributes WHERE file_id = ?1 ORDER BY key", conn.prepare("SELECT key, value FROM attributes WHERE file_id = ?1 ORDER BY key")?;
)?; for row in stmt
for row in stmt.query_map([file_id], |r| Ok((r.get::<_, String>(0)?, r.get::<_, String>(1)?)))? .query_map([file_id], |r| Ok((r.get::<_, String>(0)?, r.get::<_, String>(1)?)))?
{ {
let (k, v) = row?; let (k, v) = row?;
println!("{k} = {v}"); println!("{k} = {v}");
@@ -261,40 +262,43 @@ fn attr_ls(conn: &rusqlite::Connection, path: &std::path::Path) -> Result<()> {
Ok(()) Ok(())
} }
/// Build and run an FTS5 search query, with optional exec. /* ---------- SEARCH ---------- */
/// “tag:foo/bar” → tags_text:foo AND tags_text:bar
/// “attr:k=v” → attrs_text:k AND attrs_text:v /// Run an FTS5 search, optionally piping each hit through `exec`.
/// Falls back to a simple substring scan (path + ≤64 kB file contents)
/// when the FTS index yields no rows.
fn run_search(conn: &rusqlite::Connection, raw_query: &str, exec: Option<String>) -> Result<()> { fn run_search(conn: &rusqlite::Connection, raw_query: &str, exec: Option<String>) -> Result<()> {
let mut fts_query_parts = Vec::new(); // Build the FTS MATCH expression
let parts = shlex::split(raw_query).unwrap_or_else(|| vec![raw_query.to_string()]); let mut parts = Vec::new();
for part in parts { let toks = shlex::split(raw_query).unwrap_or_else(|| vec![raw_query.to_string()]);
if ["AND", "OR", "NOT"].contains(&part.as_str()) { for tok in toks {
fts_query_parts.push(part); if ["AND", "OR", "NOT"].contains(&tok.as_str()) {
} else if let Some(tag) = part.strip_prefix("tag:") { parts.push(tok);
let segments: Vec<&str> = tag.split('/').filter(|s| !s.is_empty()).collect(); } else if let Some(tag) = tok.strip_prefix("tag:") {
for (i, seg) in segments.iter().enumerate() { for (i, seg) in tag.split('/').filter(|s| !s.is_empty()).enumerate() {
if i > 0 { if i > 0 {
fts_query_parts.push("AND".into()); parts.push("AND".into());
} }
fts_query_parts.push(format!("tags_text:{}", escape_fts_query_term(seg))); parts.push(format!("tags_text:{}", escape_fts(seg)));
} }
} else if let Some(attr) = part.strip_prefix("attr:") { } else if let Some(attr) = tok.strip_prefix("attr:") {
let mut kv = attr.splitn(2, '='); let mut kv = attr.splitn(2, '=');
let key = kv.next().unwrap(); let key = kv.next().unwrap();
if let Some(value) = kv.next() { if let Some(val) = kv.next() {
fts_query_parts.push(format!("attrs_text:{}", escape_fts_query_term(key))); parts.push(format!("attrs_text:{}", escape_fts(key)));
fts_query_parts.push("AND".into()); parts.push("AND".into());
fts_query_parts.push(format!("attrs_text:{}", escape_fts_query_term(value))); parts.push(format!("attrs_text:{}", escape_fts(val)));
} else { } else {
fts_query_parts.push(format!("attrs_text:{}", escape_fts_query_term(key))); parts.push(format!("attrs_text:{}", escape_fts(key)));
} }
} else { } else {
fts_query_parts.push(escape_fts_query_term(&part)); parts.push(escape_fts(&tok));
} }
} }
let fts_expr = fts_query_parts.join(" "); let fts_expr = parts.join(" ");
debug!("Constructed FTS MATCH expression: {}", fts_expr); debug!("FTS MATCH expression: {fts_expr}");
// ---------- primary FTS query ----------
let mut stmt = conn.prepare( let mut stmt = conn.prepare(
r#" r#"
SELECT f.path SELECT f.path
@@ -304,15 +308,69 @@ fn run_search(conn: &rusqlite::Connection, raw_query: &str, exec: Option<String>
ORDER BY rank ORDER BY rank
"#, "#,
)?; )?;
let hits: Vec<String> = stmt let mut hits: Vec<String> = stmt
.query_map(params![fts_expr], |row| row.get(0))? .query_map(params![fts_expr], |r| r.get::<_, String>(0))?
.filter_map(Result::ok) .filter_map(Result::ok)
.collect(); .collect();
// ---------- graceful fallback ----------
if hits.is_empty() && !raw_query.contains(':') {
hits = naive_substring_search(conn, raw_query)?;
}
// ---------- output / exec ----------
if let Some(cmd_tpl) = exec { if let Some(cmd_tpl) = exec {
run_exec(&hits, &cmd_tpl)?;
} else {
if hits.is_empty() {
eprintln!(
"No matches for query: `{raw_query}` (FTS expression: `{fts_expr}`)"
);
} else {
for p in hits {
println!("{p}");
}
}
}
Ok(())
}
/// Simple, case-insensitive substring scan over paths and (small) file bodies.
fn naive_substring_search(conn: &rusqlite::Connection, term: &str) -> Result<Vec<String>> {
let term_lc = term.to_lowercase();
let mut stmt = conn.prepare("SELECT path FROM files")?;
let rows = stmt.query_map([], |r| r.get::<_, String>(0))?;
let mut out = Vec::new();
for p in rows {
let p = p?;
if p.to_lowercase().contains(&term_lc) {
out.push(p.clone());
continue;
}
// Only inspect small files to stay fast
if let Ok(meta) = fs::metadata(&p) {
if meta.len() > 64_000 {
continue;
}
}
if let Ok(content) = fs::read_to_string(&p) {
if content.to_lowercase().contains(&term_lc) {
out.push(p);
}
}
}
Ok(out)
}
/// Helper: run an external command template on every hit.
fn run_exec(paths: &[String], cmd_tpl: &str) -> Result<()> {
let mut ran_without_placeholder = false; let mut ran_without_placeholder = false;
if hits.is_empty() && !cmd_tpl.contains("{}") {
if let Some(mut parts) = shlex::split(&cmd_tpl) { if paths.is_empty() && !cmd_tpl.contains("{}") {
if let Some(mut parts) = shlex::split(cmd_tpl) {
if !parts.is_empty() { if !parts.is_empty() {
let prog = parts.remove(0); let prog = parts.remove(0);
let status = Command::new(&prog).args(&parts).status()?; let status = Command::new(&prog).args(&parts).status()?;
@@ -323,43 +381,36 @@ fn run_search(conn: &rusqlite::Connection, raw_query: &str, exec: Option<String>
} }
ran_without_placeholder = true; ran_without_placeholder = true;
} }
if !ran_without_placeholder { if !ran_without_placeholder {
for path in hits { for p in paths {
let quoted = shlex::try_quote(&path).unwrap_or(path.clone().into()); let quoted = shlex::try_quote(p).unwrap_or_else(|_| p.into());
let cmd_final = if cmd_tpl.contains("{}") { let final_cmd = if cmd_tpl.contains("{}") {
cmd_tpl.replace("{}", &quoted) cmd_tpl.replace("{}", &quoted)
} else { } else {
format!("{} {}", cmd_tpl, &quoted) format!("{cmd_tpl} {quoted}")
}; };
if let Some(mut parts) = shlex::split(&cmd_final) { if let Some(mut parts) = shlex::split(&final_cmd) {
if parts.is_empty() { if parts.is_empty() {
continue; continue;
} }
let prog = parts.remove(0); let prog = parts.remove(0);
let status = Command::new(&prog).args(&parts).status()?; let status = Command::new(&prog).args(&parts).status()?;
if !status.success() { if !status.success() {
error!(file=%path, command=%cmd_final, code=?status.code(), "command failed"); error!(file = %p, command = %final_cmd, code = ?status.code(), "command failed");
} }
} }
} }
} }
} else {
if hits.is_empty() {
eprintln!("No matches for query: `{}` (FTS expression: `{}`)", raw_query, fts_expr);
} else {
for p in hits {
println!("{}", p);
}
}
}
Ok(()) Ok(())
} }
/// Quote terms for FTS when needed. /* ---------- misc helpers ---------- */
fn escape_fts_query_term(term: &str) -> String {
fn escape_fts(term: &str) -> String {
if term.contains(|c: char| c.is_whitespace() || "-:()\"".contains(c)) if term.contains(|c: char| c.is_whitespace() || "-:()\"".contains(c))
|| ["AND", "OR", "NOT", "NEAR"].contains(&term.to_uppercase().as_str()) || ["AND", "OR", "NOT", "NEAR"]
.contains(&term.to_uppercase().as_str())
{ {
format!("\"{}\"", term.replace('"', "\"\"")) format!("\"{}\"", term.replace('"', "\"\""))
} else { } else {
@@ -369,20 +420,22 @@ fn escape_fts_query_term(term: &str) -> String {
/// Determine a filesystem root to limit recursive walking. /// Determine a filesystem root to limit recursive walking.
fn determine_scan_root(pattern: &str) -> PathBuf { fn determine_scan_root(pattern: &str) -> PathBuf {
let wildcard_pos = pattern.find(|c| c == '*' || c == '?' || c == '[').unwrap_or(pattern.len()); let first_wild = pattern
let prefix = &pattern[..wildcard_pos]; .find(|c| matches!(c, '*' | '?' | '['))
let mut root = PathBuf::from(prefix); .unwrap_or(pattern.len());
let mut root = PathBuf::from(&pattern[..first_wild]);
while root while root
.as_os_str() .as_os_str()
.to_string_lossy() .to_string_lossy()
.contains(|c| ['*', '?', '['].contains(&c)) .contains(|c| matches!(c, '*' | '?' | '['))
{ {
if let Some(parent) = root.parent() { root = root.parent().map(Path::to_path_buf).unwrap_or_default();
root = parent.to_path_buf(); }
if root.as_os_str().is_empty() {
PathBuf::from(".")
} else { } else {
root = PathBuf::from(".");
break;
}
}
root root
} }
}

Binary file not shown.

View File

@@ -1,17 +1,18 @@
//! End-to-end smoke-tests for the marlin binary. //! End-to-end “happy path” smoke-tests for the `marlin` binary.
//! //!
//! Run with `cargo test --test e2e` or let CI invoke `cargo test`. //! Run with `cargo test --test e2e` (CI does) or `cargo test`.
use assert_cmd::prelude::*; use assert_cmd::prelude::*;
use predicates::prelude::*; use predicates::prelude::*;
use std::{fs, path::PathBuf, process::Command}; use std::{fs, path::PathBuf, process::Command};
use tempfile::tempdir; use tempfile::tempdir;
/// Absolute path to the `marlin` binary Cargo just built for this test run. /// Absolute path to the freshly-built `marlin` binary.
fn marlin_bin() -> PathBuf { fn marlin_bin() -> PathBuf {
PathBuf::from(env!("CARGO_BIN_EXE_marlin")) PathBuf::from(env!("CARGO_BIN_EXE_marlin"))
} }
/// Create the demo directory structure and seed files.
fn spawn_demo_tree(root: &PathBuf) { fn spawn_demo_tree(root: &PathBuf) {
fs::create_dir_all(root.join("Projects/Alpha")).unwrap(); fs::create_dir_all(root.join("Projects/Alpha")).unwrap();
fs::create_dir_all(root.join("Projects/Beta")).unwrap(); fs::create_dir_all(root.join("Projects/Beta")).unwrap();
@@ -27,77 +28,94 @@ fn spawn_demo_tree(root: &PathBuf) {
fs::write(root.join("Reports/Q1.pdf"), "PDF\n").unwrap(); fs::write(root.join("Reports/Q1.pdf"), "PDF\n").unwrap();
} }
fn run(cmd: &mut Command) -> assert_cmd::assert::Assert { /// Shorthand for “run and must succeed”.
fn ok(cmd: &mut Command) -> assert_cmd::assert::Assert {
cmd.assert().success() cmd.assert().success()
} }
#[test] #[test]
fn full_cli_flow() -> Result<(), Box<dyn std::error::Error>> { fn full_cli_flow() -> Result<(), Box<dyn std::error::Error>> {
// 1. sandbox /* ── 1 ░ sandbox ───────────────────────────────────────────── */
let tmp = tempdir()?;
let tmp = tempdir()?; // wiped on drop
let demo_dir = tmp.path().join("marlin_demo"); let demo_dir = tmp.path().join("marlin_demo");
spawn_demo_tree(&demo_dir); spawn_demo_tree(&demo_dir);
// 2. init (auto-scan cwd) let db_path = demo_dir.join("index.db");
run(Command::new(marlin_bin())
// Helper to spawn a fresh `marlin` Command with the DB env-var set
let marlin = || {
let mut c = Command::new(marlin_bin());
c.env("MARLIN_DB_PATH", &db_path);
c
};
/* ── 2 ░ init ( auto-scan cwd ) ───────────────────────────── */
ok(marlin()
.current_dir(&demo_dir) .current_dir(&demo_dir)
.arg("init")); .arg("init"));
// 3. tag & attr /* ── 3 ░ tag & attr demos ─────────────────────────────────── */
run(Command::new(marlin_bin())
ok(marlin()
.arg("tag") .arg("tag")
.arg(format!("{}/Projects/**/*.md", demo_dir.display())) .arg(format!("{}/Projects/**/*.md", demo_dir.display()))
.arg("project/md")); .arg("project/md"));
run(Command::new(marlin_bin()) ok(marlin()
.arg("attr") .arg("attr")
.arg("set") .arg("set")
.arg(format!("{}/Reports/*.pdf", demo_dir.display())) .arg(format!("{}/Reports/*.pdf", demo_dir.display()))
.arg("reviewed") .arg("reviewed")
.arg("yes")); .arg("yes"));
// 4. search expectations /* ── 4 ░ quick search sanity checks ───────────────────────── */
Command::new(marlin_bin())
.arg("search") marlin()
.arg("TODO") .arg("search").arg("TODO")
.assert() .assert()
.stdout(predicate::str::contains("TODO.txt")); .stdout(predicate::str::contains("TODO.txt"));
Command::new(marlin_bin()) marlin()
.arg("search") .arg("search").arg("attr:reviewed=yes")
.arg("attr:reviewed=yes")
.assert() .assert()
.stdout(predicate::str::contains("Q1.pdf")); .stdout(predicate::str::contains("Q1.pdf"));
// 5. link & backlinks /* ── 5 ░ link flow & backlinks ────────────────────────────── */
let foo = demo_dir.join("foo.txt"); let foo = demo_dir.join("foo.txt");
let bar = demo_dir.join("bar.txt"); let bar = demo_dir.join("bar.txt");
fs::write(&foo, "")?; fs::write(&foo, "")?;
fs::write(&bar, "")?; fs::write(&bar, "")?;
run(Command::new(marlin_bin()).arg("scan").arg(&demo_dir));
run(Command::new(marlin_bin()) ok(marlin().arg("scan").arg(&demo_dir));
ok(marlin()
.arg("link").arg("add") .arg("link").arg("add")
.arg(&foo).arg(&bar)); .arg(&foo).arg(&bar));
Command::new(marlin_bin())
marlin()
.arg("link").arg("backlinks").arg(&bar) .arg("link").arg("backlinks").arg(&bar)
.assert() .assert()
.stdout(predicate::str::contains("foo.txt")); .stdout(predicate::str::contains("foo.txt"));
// 6. backup / restore round-trip /* ── 6 ░ backup → delete DB → restore ────────────────────── */
let backup_path = String::from_utf8( let backup_path = String::from_utf8(
Command::new(marlin_bin()).arg("backup").output()?.stdout marlin().arg("backup").output()?.stdout
)?; )?;
let backup_file = backup_path.split_whitespace().last().unwrap(); let backup_file = backup_path.split_whitespace().last().unwrap();
// wipe DB file fs::remove_file(&db_path)?; // simulate corruption
std::fs::remove_file(dirs::data_dir().unwrap().join("marlin/index.db"))?; ok(marlin().arg("restore").arg(backup_file)); // restore
run(Command::new(marlin_bin()).arg("restore").arg(backup_file));
// sanity: search still works // Search must still work afterwards
Command::new(marlin_bin()) marlin()
.arg("search").arg("TODO") .arg("search").arg("TODO")
.assert() .assert()
.stdout(predicate::str::contains("TODO.txt")); .stdout(predicate::str::contains("TODO.txt"));
Ok(()) Ok(())
} }

81
tests/neg.rs Normal file
View File

@@ -0,0 +1,81 @@
//! Negative-path integration tests (“should fail / warn”).
use predicates::str;
use tempfile::tempdir;
mod util;
use util::marlin;
/* ───────────────────────── LINKS ─────────────────────────────── */
#[test]
fn link_non_indexed_should_fail() {
let tmp = tempdir().unwrap();
marlin(&tmp).current_dir(tmp.path()).arg("init").assert().success();
std::fs::write(tmp.path().join("foo.txt"), "").unwrap();
std::fs::write(tmp.path().join("bar.txt"), "").unwrap();
marlin(&tmp)
.current_dir(tmp.path())
.args([
"link", "add",
&tmp.path().join("foo.txt").to_string_lossy(),
&tmp.path().join("bar.txt").to_string_lossy()
])
.assert()
.failure()
.stderr(str::contains("file not indexed"));
}
/* ───────────────────────── ATTR ─────────────────────────────── */
#[test]
fn attr_set_on_non_indexed_file_should_warn() {
let tmp = tempdir().unwrap();
marlin(&tmp).current_dir(tmp.path()).arg("init").assert().success();
let ghost = tmp.path().join("ghost.txt");
std::fs::write(&ghost, "").unwrap();
marlin(&tmp)
.args(["attr","set",
&ghost.to_string_lossy(),"foo","bar"])
.assert()
.success() // exits 0
.stderr(str::contains("not indexed"));
}
/* ───────────────────── COLLECTIONS ───────────────────────────── */
#[test]
fn coll_add_unknown_collection_should_fail() {
let tmp = tempdir().unwrap();
let file = tmp.path().join("doc.txt");
std::fs::write(&file, "").unwrap();
marlin(&tmp).current_dir(tmp.path()).arg("init").assert().success();
marlin(&tmp)
.args(["coll","add","nope",&file.to_string_lossy()])
.assert()
.failure();
}
/* ───────────────────── RESTORE (bad file) ───────────────────── */
#[test]
fn restore_with_nonexistent_backup_should_fail() {
let tmp = tempdir().unwrap();
// create an empty DB first
marlin(&tmp).arg("init").assert().success();
marlin(&tmp)
.args(["restore", "/definitely/not/here.db"])
.assert()
.failure()
.stderr(str::contains("Failed to restore"));
}

171
tests/pos.rs Normal file
View File

@@ -0,0 +1,171 @@
//! Positive-path integration checks for every sub-command
//! that already has real logic behind it.
mod util;
use util::marlin;
use predicates::{prelude::*, str}; // brings `PredicateBooleanExt::and`
use std::fs;
use tempfile::tempdir;
/* ─────────────────────────── TAG ─────────────────────────────── */
#[test]
fn tag_should_add_hierarchical_tag_and_search_finds_it() {
let tmp = tempdir().unwrap();
let file = tmp.path().join("foo.md");
fs::write(&file, "# test\n").unwrap();
marlin(&tmp).current_dir(tmp.path()).arg("init").assert().success();
marlin(&tmp)
.args(["tag", file.to_str().unwrap(), "project/md"])
.assert().success();
marlin(&tmp)
.args(["search", "tag:project/md"])
.assert()
.success()
.stdout(str::contains("foo.md"));
}
/* ─────────────────────────── ATTR ────────────────────────────── */
#[test]
fn attr_set_then_ls_roundtrip() {
let tmp = tempdir().unwrap();
let file = tmp.path().join("report.pdf");
fs::write(&file, "%PDF-1.4\n").unwrap();
marlin(&tmp).current_dir(tmp.path()).arg("init").assert().success();
marlin(&tmp)
.args(["attr", "set", file.to_str().unwrap(), "reviewed", "yes"])
.assert().success();
marlin(&tmp)
.args(["attr", "ls", file.to_str().unwrap()])
.assert()
.success()
.stdout(str::contains("reviewed = yes"));
}
/* ─────────────────────── COLLECTIONS ────────────────────────── */
#[test]
fn coll_create_add_and_list() {
let tmp = tempdir().unwrap();
let a = tmp.path().join("a.txt");
let b = tmp.path().join("b.txt");
fs::write(&a, "").unwrap();
fs::write(&b, "").unwrap();
marlin(&tmp).current_dir(tmp.path()).arg("init").assert().success();
marlin(&tmp).args(["coll", "create", "Set"]).assert().success();
for f in [&a, &b] {
marlin(&tmp).args(["coll", "add", "Set", f.to_str().unwrap()]).assert().success();
}
marlin(&tmp)
.args(["coll", "list", "Set"])
.assert()
.success()
.stdout(str::contains("a.txt").and(str::contains("b.txt")));
}
/* ─────────────────────────── VIEWS ───────────────────────────── */
#[test]
fn view_save_list_and_exec() {
let tmp = tempdir().unwrap();
let todo = tmp.path().join("TODO.txt");
fs::write(&todo, "remember the milk\n").unwrap();
marlin(&tmp).current_dir(tmp.path()).arg("init").assert().success();
// save & list
marlin(&tmp).args(["view", "save", "tasks", "milk"]).assert().success();
marlin(&tmp)
.args(["view", "list"])
.assert()
.success()
.stdout(str::contains("tasks: milk"));
// exec
marlin(&tmp)
.args(["view", "exec", "tasks"])
.assert()
.success()
.stdout(str::contains("TODO.txt"));
}
/* ─────────────────────────── LINKS ───────────────────────────── */
#[test]
fn link_add_rm_and_list() {
let tmp = tempdir().unwrap();
let foo = tmp.path().join("foo.txt");
let bar = tmp.path().join("bar.txt");
fs::write(&foo, "").unwrap();
fs::write(&bar, "").unwrap();
// handy closure
let mc = || marlin(&tmp);
mc().current_dir(tmp.path()).arg("init").assert().success();
mc().args(["scan", tmp.path().to_str().unwrap()]).assert().success();
// add
mc().args(["link", "add", foo.to_str().unwrap(), bar.to_str().unwrap()])
.assert().success();
// list (outgoing default)
mc().args(["link", "list", foo.to_str().unwrap()])
.assert().success()
.stdout(str::contains("foo.txt").and(str::contains("bar.txt")));
// remove
mc().args(["link", "rm", foo.to_str().unwrap(), bar.to_str().unwrap()])
.assert().success();
// list now empty
mc().args(["link", "list", foo.to_str().unwrap()])
.assert().success()
.stdout(str::is_empty());
}
/* ─────────────────────── SCAN (multi-path) ───────────────────── */
#[test]
fn scan_with_multiple_paths_indexes_all() {
let tmp = tempdir().unwrap();
let dir_a = tmp.path().join("A");
let dir_b = tmp.path().join("B");
std::fs::create_dir_all(&dir_a).unwrap();
std::fs::create_dir_all(&dir_b).unwrap();
let f1 = dir_a.join("one.txt");
let f2 = dir_b.join("two.txt");
fs::write(&f1, "").unwrap();
fs::write(&f2, "").unwrap();
marlin(&tmp).current_dir(tmp.path()).arg("init").assert().success();
// multi-path scan
marlin(&tmp)
.args(["scan", dir_a.to_str().unwrap(), dir_b.to_str().unwrap()])
.assert().success();
// both files findable
for term in ["one.txt", "two.txt"] {
marlin(&tmp).args(["search", term])
.assert()
.success()
.stdout(str::contains(term));
}
}

View File

@@ -62,7 +62,7 @@ If you wire **“cargo test --all”** into CI (GitHub Actions, GitLab, etc.), p
```bash ```bash
git pull && cargo build --release && git pull && cargo build --release &&
sudo install -Dm755 target/release/marlin /usr/local/bin/marlin && sudo install -Dm755 target/release/marlin /usr/local/bin/marlin &&
cargo test --test e2e -- --nocapture cargo test --all -- --nocapture
``` ```
Stick that in a shell alias (`alias marlin-ci='…'`) and youve got a 5-second upgrade-and-verify loop. Stick that in a shell alias (`alias marlin-ci='…'`) and youve got a 5-second upgrade-and-verify loop.

23
tests/util.rs Normal file
View File

@@ -0,0 +1,23 @@
//! tests/util.rs
//! Small helpers shared across integration tests.
use std::path::{Path, PathBuf};
use tempfile::TempDir;
use assert_cmd::Command;
/// Absolute path to the freshly-built `marlin` binary.
pub fn bin() -> PathBuf {
PathBuf::from(env!("CARGO_BIN_EXE_marlin"))
}
/// Build a `Command` for `marlin` whose `MARLIN_DB_PATH` is
/// `<tmp>/index.db`.
///
/// Each call yields a brand-new `Command`, so callers can freely add
/// arguments, change the working directory, etc., without affecting
/// other invocations.
pub fn marlin(tmp: &TempDir) -> Command {
let db_path: &Path = &tmp.path().join("index.db");
let mut cmd = Command::new(bin());
cmd.env("MARLIN_DB_PATH", db_path);
cmd
}