mirror of
https://github.com/PR0M3TH3AN/Marlin.git
synced 2025-09-08 07:08:44 +00:00
update
This commit is contained in:
0
bar.txt → .github/workflows/ci.yml
vendored
0
bar.txt → .github/workflows/ci.yml
vendored
41
Cargo.lock
generated
41
Cargo.lock
generated
@@ -440,6 +440,23 @@ version = "0.2.172"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa"
|
checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "libmarlin"
|
||||||
|
version = "0.1.0"
|
||||||
|
dependencies = [
|
||||||
|
"anyhow",
|
||||||
|
"chrono",
|
||||||
|
"directories",
|
||||||
|
"glob",
|
||||||
|
"rusqlite",
|
||||||
|
"serde_json",
|
||||||
|
"shellexpand",
|
||||||
|
"shlex",
|
||||||
|
"tracing",
|
||||||
|
"tracing-subscriber",
|
||||||
|
"walkdir",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "libredox"
|
name = "libredox"
|
||||||
version = "0.1.3"
|
version = "0.1.3"
|
||||||
@@ -474,7 +491,29 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94"
|
checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "marlin"
|
name = "marlin-cli"
|
||||||
|
version = "0.1.0"
|
||||||
|
dependencies = [
|
||||||
|
"anyhow",
|
||||||
|
"assert_cmd",
|
||||||
|
"clap",
|
||||||
|
"clap_complete",
|
||||||
|
"dirs 5.0.1",
|
||||||
|
"glob",
|
||||||
|
"libmarlin",
|
||||||
|
"predicates",
|
||||||
|
"rusqlite",
|
||||||
|
"serde_json",
|
||||||
|
"shellexpand",
|
||||||
|
"shlex",
|
||||||
|
"tempfile",
|
||||||
|
"tracing",
|
||||||
|
"tracing-subscriber",
|
||||||
|
"walkdir",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "marlin-tui"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
|
38
Cargo.toml
38
Cargo.toml
@@ -1,31 +1,9 @@
|
|||||||
[package]
|
[workspace]
|
||||||
name = "marlin"
|
members = [
|
||||||
version = "0.1.0"
|
"libmarlin",
|
||||||
edition = "2021"
|
"cli-bin",
|
||||||
|
"tui-bin",
|
||||||
|
]
|
||||||
|
|
||||||
[dependencies]
|
# optionally, share common dependency versions here:
|
||||||
anyhow = "1"
|
# [workspace.dependencies]
|
||||||
clap = { version = "4", features = ["derive"] }
|
|
||||||
directories = "5"
|
|
||||||
glob = "0.3"
|
|
||||||
rusqlite = { version = "0.31", features = ["bundled", "backup"] }
|
|
||||||
tracing = "0.1"
|
|
||||||
tracing-subscriber = { version = "0.3", features = ["fmt", "env-filter"] }
|
|
||||||
walkdir = "2.5"
|
|
||||||
shlex = "1.3"
|
|
||||||
chrono = "0.4"
|
|
||||||
shellexpand = "3.1"
|
|
||||||
clap_complete = "4.1"
|
|
||||||
serde_json = { version = "1", optional = true } # <-- NEW
|
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
assert_cmd = "2"
|
|
||||||
predicates = "3"
|
|
||||||
tempfile = "3"
|
|
||||||
dirs = "5" # cross-platform data dir helper
|
|
||||||
|
|
||||||
[features]
|
|
||||||
# The CLI prints JSON only when this feature is enabled.
|
|
||||||
# Having the feature listed silences the `unexpected cfg` lint even
|
|
||||||
# when you don’t turn it on.
|
|
||||||
json = ["serde_json"]
|
|
||||||
|
33
cli-bin/Cargo.toml
Normal file
33
cli-bin/Cargo.toml
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
[package]
|
||||||
|
name = "marlin-cli"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
publish = false # binary crate, not meant for crates.io
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "marlin" # cargo install/run -> `marlin`
|
||||||
|
path = "src/main.rs"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
libmarlin = { path = "../libmarlin" } # ← core library
|
||||||
|
anyhow = "1"
|
||||||
|
clap = { version = "4", features = ["derive"] }
|
||||||
|
clap_complete = "4.1"
|
||||||
|
glob = "0.3"
|
||||||
|
rusqlite = { version = "0.31", features = ["bundled", "backup"] }
|
||||||
|
shellexpand = "3.1"
|
||||||
|
shlex = "1.3"
|
||||||
|
tracing = "0.1"
|
||||||
|
tracing-subscriber = { version = "0.3", features = ["fmt", "env-filter"] }
|
||||||
|
walkdir = "2.5"
|
||||||
|
serde_json = { version = "1", optional = true }
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
assert_cmd = "2"
|
||||||
|
predicates = "3"
|
||||||
|
tempfile = "3"
|
||||||
|
dirs = "5"
|
||||||
|
|
||||||
|
[features]
|
||||||
|
# Enable JSON output with `--features json`
|
||||||
|
json = ["serde_json"]
|
11
cli-bin/build.rs
Normal file
11
cli-bin/build.rs
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
// cli-bin/build.rs
|
||||||
|
//
|
||||||
|
// The CLI currently needs no build-time code-generation, but Cargo
|
||||||
|
// insists on rerunning any build-script each compile. Tell it to
|
||||||
|
// rebuild only if this file itself changes.
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
// If you later add code-gen (e.g. embed completions or YAML), add
|
||||||
|
// further `cargo:rerun-if-changed=<path>` lines here.
|
||||||
|
println!("cargo:rerun-if-changed=build.rs");
|
||||||
|
}
|
@@ -3,10 +3,8 @@
|
|||||||
use clap::{Args, Subcommand};
|
use clap::{Args, Subcommand};
|
||||||
use rusqlite::Connection;
|
use rusqlite::Connection;
|
||||||
|
|
||||||
use crate::{
|
use crate::cli::Format; // local enum for text / json output
|
||||||
cli::Format,
|
use libmarlin::db; // core DB helpers from the library crate
|
||||||
db,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(Subcommand, Debug)]
|
#[derive(Subcommand, Debug)]
|
||||||
pub enum CollCmd {
|
pub enum CollCmd {
|
@@ -1,9 +1,10 @@
|
|||||||
// src/cli/link.rs
|
//! src/cli/link.rs – manage typed relationships between files
|
||||||
|
|
||||||
use crate::db;
|
|
||||||
use clap::{Subcommand, Args};
|
use clap::{Subcommand, Args};
|
||||||
use rusqlite::Connection;
|
use rusqlite::Connection;
|
||||||
use crate::cli::Format;
|
|
||||||
|
use crate::cli::Format; // output selector
|
||||||
|
use libmarlin::db; // ← switched from `crate::db`
|
||||||
|
|
||||||
#[derive(Subcommand, Debug)]
|
#[derive(Subcommand, Debug)]
|
||||||
pub enum LinkCmd {
|
pub enum LinkCmd {
|
@@ -6,7 +6,8 @@ use anyhow::Result;
|
|||||||
use clap::{Args, Subcommand};
|
use clap::{Args, Subcommand};
|
||||||
use rusqlite::Connection;
|
use rusqlite::Connection;
|
||||||
|
|
||||||
use crate::{cli::Format, db};
|
use crate::cli::Format; // output selector stays local
|
||||||
|
use libmarlin::db; // ← path switched from `crate::db`
|
||||||
|
|
||||||
#[derive(Subcommand, Debug)]
|
#[derive(Subcommand, Debug)]
|
||||||
pub enum ViewCmd {
|
pub enum ViewCmd {
|
@@ -1,24 +1,33 @@
|
|||||||
// src/main.rs
|
//! Marlin CLI entry-point (post crate-split)
|
||||||
|
//!
|
||||||
|
//! All heavy lifting now lives in the `libmarlin` crate; this file
|
||||||
|
//! handles argument parsing, logging, orchestration and the few
|
||||||
|
//! helpers that remain CLI-specific.
|
||||||
|
|
||||||
#![deny(warnings)]
|
#![deny(warnings)]
|
||||||
|
|
||||||
mod cli;
|
mod cli; // sub-command definitions and argument structs
|
||||||
mod config;
|
|
||||||
mod db;
|
/* ── shared modules re-exported from libmarlin ─────────────────── */
|
||||||
mod logging;
|
use libmarlin::{
|
||||||
mod scan;
|
config,
|
||||||
|
db,
|
||||||
|
logging,
|
||||||
|
scan,
|
||||||
|
utils::determine_scan_root,
|
||||||
|
};
|
||||||
|
|
||||||
use anyhow::{Context, Result};
|
use anyhow::{Context, Result};
|
||||||
use clap::{CommandFactory, Parser};
|
use clap::{CommandFactory, Parser};
|
||||||
use clap_complete::generate;
|
use clap_complete::generate;
|
||||||
use glob::Pattern;
|
use glob::Pattern;
|
||||||
use rusqlite::params;
|
|
||||||
use shellexpand;
|
use shellexpand;
|
||||||
use shlex;
|
use shlex;
|
||||||
use std::{
|
use std::{
|
||||||
env,
|
env,
|
||||||
fs,
|
fs,
|
||||||
io,
|
io,
|
||||||
path::{Path, PathBuf},
|
path::Path,
|
||||||
process::Command,
|
process::Command,
|
||||||
};
|
};
|
||||||
use tracing::{debug, error, info};
|
use tracing::{debug, error, info};
|
||||||
@@ -27,7 +36,7 @@ use walkdir::WalkDir;
|
|||||||
use cli::{Cli, Commands};
|
use cli::{Cli, Commands};
|
||||||
|
|
||||||
fn main() -> Result<()> {
|
fn main() -> Result<()> {
|
||||||
/* ── CLI parsing & logging ────────────────────────────────────── */
|
/* ── CLI parsing & logging ────────────────────────────────── */
|
||||||
|
|
||||||
let args = Cli::parse();
|
let args = Cli::parse();
|
||||||
if args.verbose {
|
if args.verbose {
|
||||||
@@ -35,7 +44,7 @@ fn main() -> Result<()> {
|
|||||||
}
|
}
|
||||||
logging::init();
|
logging::init();
|
||||||
|
|
||||||
/* ── shell-completion shortcut ───────────────────────────────── */
|
/* ── shell-completion shortcut ────────────────────────────── */
|
||||||
|
|
||||||
if let Commands::Completions { shell } = &args.command {
|
if let Commands::Completions { shell } = &args.command {
|
||||||
let mut cmd = Cli::command();
|
let mut cmd = Cli::command();
|
||||||
@@ -43,63 +52,65 @@ fn main() -> Result<()> {
|
|||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ── config & automatic backup ───────────────────────────────── */
|
/* ── config & automatic backup ───────────────────────────── */
|
||||||
|
|
||||||
let cfg = config::Config::load()?; // DB path, etc.
|
let cfg = config::Config::load()?; // resolves DB path
|
||||||
|
|
||||||
match &args.command {
|
match &args.command {
|
||||||
Commands::Init | Commands::Backup | Commands::Restore { .. } => {}
|
Commands::Init | Commands::Backup | Commands::Restore { .. } => {}
|
||||||
_ => match db::backup(&cfg.db_path) {
|
_ => match db::backup(&cfg.db_path) {
|
||||||
Ok(path) => info!("Pre-command auto-backup created at {}", path.display()),
|
Ok(p) => info!("Pre-command auto-backup created at {}", p.display()),
|
||||||
Err(e) => error!("Failed to create pre-command auto-backup: {e}"),
|
Err(e) => error!("Failed to create pre-command auto-backup: {e}"),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ── open DB (runs migrations if needed) ─────────────────────── */
|
/* ── open DB (runs migrations) ───────────────────────────── */
|
||||||
|
|
||||||
let mut conn = db::open(&cfg.db_path)?;
|
let mut conn = db::open(&cfg.db_path)?;
|
||||||
|
|
||||||
/* ── command dispatch ────────────────────────────────────────── */
|
/* ── command dispatch ────────────────────────────────────── */
|
||||||
|
|
||||||
match args.command {
|
match args.command {
|
||||||
Commands::Completions { .. } => {} // already handled
|
Commands::Completions { .. } => {} // handled above
|
||||||
|
|
||||||
|
/* ---- init ------------------------------------------------ */
|
||||||
Commands::Init => {
|
Commands::Init => {
|
||||||
info!("Database initialised at {}", cfg.db_path.display());
|
info!("Database initialised at {}", cfg.db_path.display());
|
||||||
|
|
||||||
// Always (re-)scan the current directory so even an existing DB
|
|
||||||
// picks up newly created files in the working tree.
|
|
||||||
let cwd = env::current_dir().context("getting current directory")?;
|
let cwd = env::current_dir().context("getting current directory")?;
|
||||||
let count = scan::scan_directory(&mut conn, &cwd)
|
let count = scan::scan_directory(&mut conn, &cwd)
|
||||||
.context("initial scan failed")?;
|
.context("initial scan failed")?;
|
||||||
info!("Initial scan complete – indexed/updated {count} files");
|
info!("Initial scan complete – indexed/updated {count} files");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* ---- scan ------------------------------------------------ */
|
||||||
Commands::Scan { paths } => {
|
Commands::Scan { paths } => {
|
||||||
let scan_paths = if paths.is_empty() {
|
let scan_paths = if paths.is_empty() {
|
||||||
vec![env::current_dir()?]
|
vec![env::current_dir()?]
|
||||||
} else {
|
} else { paths };
|
||||||
paths
|
|
||||||
};
|
|
||||||
for p in scan_paths {
|
for p in scan_paths {
|
||||||
scan::scan_directory(&mut conn, &p)?;
|
scan::scan_directory(&mut conn, &p)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Commands::Tag { pattern, tag_path } => apply_tag(&conn, &pattern, &tag_path)?,
|
/* ---- tag / attribute / search --------------------------- */
|
||||||
|
Commands::Tag { pattern, tag_path } =>
|
||||||
|
apply_tag(&conn, &pattern, &tag_path)?,
|
||||||
|
|
||||||
Commands::Attr { action } => match action {
|
Commands::Attr { action } => match action {
|
||||||
cli::AttrCmd::Set { pattern, key, value } => {
|
cli::AttrCmd::Set { pattern, key, value } =>
|
||||||
attr_set(&conn, &pattern, &key, &value)?
|
attr_set(&conn, &pattern, &key, &value)?,
|
||||||
}
|
cli::AttrCmd::Ls { path } =>
|
||||||
cli::AttrCmd::Ls { path } => attr_ls(&conn, &path)?,
|
attr_ls(&conn, &path)?,
|
||||||
},
|
},
|
||||||
|
|
||||||
Commands::Search { query, exec } => run_search(&conn, &query, exec)?,
|
Commands::Search { query, exec } =>
|
||||||
|
run_search(&conn, &query, exec)?,
|
||||||
|
|
||||||
|
/* ---- maintenance ---------------------------------------- */
|
||||||
Commands::Backup => {
|
Commands::Backup => {
|
||||||
let path = db::backup(&cfg.db_path)?;
|
let p = db::backup(&cfg.db_path)?;
|
||||||
println!("Backup created: {}", path.display());
|
println!("Backup created: {}", p.display());
|
||||||
}
|
}
|
||||||
|
|
||||||
Commands::Restore { backup_path } => {
|
Commands::Restore { backup_path } => {
|
||||||
@@ -114,14 +125,14 @@ fn main() -> Result<()> {
|
|||||||
info!("Successfully opened restored database.");
|
info!("Successfully opened restored database.");
|
||||||
}
|
}
|
||||||
|
|
||||||
/* passthrough sub-modules that still stub out their logic */
|
/* ---- passthrough sub-modules (some still stubs) ---------- */
|
||||||
Commands::Link(link_cmd) => cli::link::run(&link_cmd, &mut conn, args.format)?,
|
Commands::Link(link_cmd) => cli::link::run(&link_cmd, &mut conn, args.format)?,
|
||||||
Commands::Coll(coll_cmd) => cli::coll::run(&coll_cmd, &mut conn, args.format)?,
|
Commands::Coll(coll_cmd) => cli::coll::run(&coll_cmd, &mut conn, args.format)?,
|
||||||
Commands::View(view_cmd) => cli::view::run(&view_cmd, &mut conn, args.format)?,
|
Commands::View(view_cmd) => cli::view::run(&view_cmd, &mut conn, args.format)?,
|
||||||
Commands::State(state_cmd) => cli::state::run(&state_cmd, &mut conn, args.format)?,
|
Commands::State(state_cmd) => cli::state::run(&state_cmd, &mut conn, args.format)?,
|
||||||
Commands::Task(task_cmd) => cli::task::run(&task_cmd, &mut conn, args.format)?,
|
Commands::Task(task_cmd) => cli::task::run(&task_cmd, &mut conn, args.format)?,
|
||||||
Commands::Remind(rm_cmd) => cli::remind::run(&rm_cmd, &mut conn, args.format)?,
|
Commands::Remind(rm_cmd) => cli::remind::run(&rm_cmd, &mut conn, args.format)?,
|
||||||
Commands::Annotate(an_cmd) => cli::annotate::run(&an_cmd, &mut conn, args.format)?,
|
Commands::Annotate(a_cmd) => cli::annotate::run(&a_cmd, &mut conn, args.format)?,
|
||||||
Commands::Version(v_cmd) => cli::version::run(&v_cmd, &mut conn, args.format)?,
|
Commands::Version(v_cmd) => cli::version::run(&v_cmd, &mut conn, args.format)?,
|
||||||
Commands::Event(e_cmd) => cli::event::run(&e_cmd, &mut conn, args.format)?,
|
Commands::Event(e_cmd) => cli::event::run(&e_cmd, &mut conn, args.format)?,
|
||||||
}
|
}
|
||||||
@@ -129,29 +140,24 @@ fn main() -> Result<()> {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ───────────────────────── helpers & sub-routines ───────────────── */
|
/* ─────────────────── helpers & sub-routines ─────────────────── */
|
||||||
|
|
||||||
/* ---------- TAGS ---------- */
|
/* ---------- TAGS ---------- */
|
||||||
|
|
||||||
/// Apply a hierarchical tag to all files matching the glob pattern.
|
|
||||||
fn apply_tag(conn: &rusqlite::Connection, pattern: &str, tag_path: &str) -> Result<()> {
|
fn apply_tag(conn: &rusqlite::Connection, pattern: &str, tag_path: &str) -> Result<()> {
|
||||||
// ensure_tag_path returns the deepest-node ID
|
// ensure_tag_path returns ID of deepest node
|
||||||
let leaf_tag_id = db::ensure_tag_path(conn, tag_path)?;
|
let leaf_tag_id = db::ensure_tag_path(conn, tag_path)?;
|
||||||
|
|
||||||
// collect that tag and all its ancestors
|
// collect leaf + ancestors
|
||||||
let mut tag_ids = Vec::new();
|
let mut tag_ids = Vec::new();
|
||||||
let mut current = Some(leaf_tag_id);
|
let mut current = Some(leaf_tag_id);
|
||||||
while let Some(id) = current {
|
while let Some(id) = current {
|
||||||
tag_ids.push(id);
|
tag_ids.push(id);
|
||||||
current = match conn.query_row(
|
current = conn.query_row(
|
||||||
"SELECT parent_id FROM tags WHERE id = ?1",
|
"SELECT parent_id FROM tags WHERE id=?1",
|
||||||
params![id],
|
[id],
|
||||||
|r| r.get::<_, Option<i64>>(0),
|
|r| r.get::<_, Option<i64>>(0),
|
||||||
) {
|
)?;
|
||||||
Ok(parent_id) => parent_id,
|
|
||||||
Err(rusqlite::Error::QueryReturnedNoRows) => None,
|
|
||||||
Err(e) => return Err(e.into()),
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let expanded = shellexpand::tilde(pattern).into_owned();
|
let expanded = shellexpand::tilde(pattern).into_owned();
|
||||||
@@ -159,102 +165,87 @@ fn apply_tag(conn: &rusqlite::Connection, pattern: &str, tag_path: &str) -> Resu
|
|||||||
.with_context(|| format!("Invalid glob pattern `{expanded}`"))?;
|
.with_context(|| format!("Invalid glob pattern `{expanded}`"))?;
|
||||||
let root = determine_scan_root(&expanded);
|
let root = determine_scan_root(&expanded);
|
||||||
|
|
||||||
let mut stmt_file = conn.prepare("SELECT id FROM files WHERE path = ?1")?;
|
let mut stmt_file = conn.prepare("SELECT id FROM files WHERE path=?1")?;
|
||||||
let mut stmt_insert = conn.prepare(
|
let mut stmt_insert = conn.prepare(
|
||||||
"INSERT OR IGNORE INTO file_tags(file_id, tag_id) VALUES (?1, ?2)",
|
"INSERT OR IGNORE INTO file_tags(file_id, tag_id) VALUES (?1, ?2)",
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let mut count = 0;
|
let mut count = 0usize;
|
||||||
for entry in WalkDir::new(&root)
|
for entry in WalkDir::new(&root)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(Result::ok)
|
.filter_map(Result::ok)
|
||||||
.filter(|e| e.file_type().is_file())
|
.filter(|e| e.file_type().is_file())
|
||||||
{
|
{
|
||||||
let path_str = entry.path().to_string_lossy();
|
let p = entry.path().to_string_lossy();
|
||||||
if !pat.matches(&path_str) {
|
if !pat.matches(&p) { continue; }
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
match stmt_file.query_row(params![path_str.as_ref()], |r| r.get::<_, i64>(0)) {
|
match stmt_file.query_row([p.as_ref()], |r| r.get::<_, i64>(0)) {
|
||||||
Ok(file_id) => {
|
Ok(fid) => {
|
||||||
let mut newly = false;
|
let mut newly = false;
|
||||||
for &tid in &tag_ids {
|
for &tid in &tag_ids {
|
||||||
if stmt_insert.execute(params![file_id, tid])? > 0 {
|
if stmt_insert.execute([fid, tid])? > 0 {
|
||||||
newly = true;
|
newly = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if newly {
|
if newly {
|
||||||
info!(file = %path_str, tag = tag_path, "tagged");
|
info!(file=%p, tag=tag_path, "tagged");
|
||||||
count += 1;
|
count += 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(rusqlite::Error::QueryReturnedNoRows) => {
|
Err(rusqlite::Error::QueryReturnedNoRows) =>
|
||||||
error!(file = %path_str, "not indexed – run `marlin scan` first");
|
error!(file=%p, "not indexed – run `marlin scan` first"),
|
||||||
}
|
Err(e) =>
|
||||||
Err(e) => {
|
error!(file=%p, error=%e, "could not lookup file ID"),
|
||||||
error!(file = %path_str, error = %e, "could not lookup file ID");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
info!(
|
info!("Applied tag '{}' to {} file(s).", tag_path, count);
|
||||||
"Applied tag '{}' to {} file(s).",
|
|
||||||
tag_path, count
|
|
||||||
);
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ---------- ATTRIBUTES ---------- */
|
/* ---------- ATTRIBUTES ---------- */
|
||||||
|
|
||||||
/// Set a key=value attribute on all files matching the glob pattern.
|
|
||||||
fn attr_set(conn: &rusqlite::Connection, pattern: &str, key: &str, value: &str) -> Result<()> {
|
fn attr_set(conn: &rusqlite::Connection, pattern: &str, key: &str, value: &str) -> Result<()> {
|
||||||
let expanded = shellexpand::tilde(pattern).into_owned();
|
let expanded = shellexpand::tilde(pattern).into_owned();
|
||||||
let pat = Pattern::new(&expanded)
|
let pat = Pattern::new(&expanded)
|
||||||
.with_context(|| format!("Invalid glob pattern `{expanded}`"))?;
|
.with_context(|| format!("Invalid glob pattern `{expanded}`"))?;
|
||||||
let root = determine_scan_root(&expanded);
|
let root = determine_scan_root(&expanded);
|
||||||
|
|
||||||
let mut stmt_file = conn.prepare("SELECT id FROM files WHERE path = ?1")?;
|
let mut stmt_file = conn.prepare("SELECT id FROM files WHERE path=?1")?;
|
||||||
let mut count = 0;
|
let mut count = 0usize;
|
||||||
|
|
||||||
for entry in WalkDir::new(&root)
|
for entry in WalkDir::new(&root)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(Result::ok)
|
.filter_map(Result::ok)
|
||||||
.filter(|e| e.file_type().is_file())
|
.filter(|e| e.file_type().is_file())
|
||||||
{
|
{
|
||||||
let path_str = entry.path().to_string_lossy();
|
let p = entry.path().to_string_lossy();
|
||||||
if !pat.matches(&path_str) {
|
if !pat.matches(&p) { continue; }
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
match stmt_file.query_row(params![path_str.as_ref()], |r| r.get::<_, i64>(0)) {
|
match stmt_file.query_row([p.as_ref()], |r| r.get::<_, i64>(0)) {
|
||||||
Ok(file_id) => {
|
Ok(fid) => {
|
||||||
db::upsert_attr(conn, file_id, key, value)?;
|
db::upsert_attr(conn, fid, key, value)?;
|
||||||
info!(file = %path_str, key, value, "attr set");
|
info!(file=%p, key, value, "attr set");
|
||||||
count += 1;
|
count += 1;
|
||||||
}
|
}
|
||||||
Err(rusqlite::Error::QueryReturnedNoRows) => {
|
Err(rusqlite::Error::QueryReturnedNoRows) =>
|
||||||
error!(file = %path_str, "not indexed – run `marlin scan` first");
|
error!(file=%p, "not indexed – run `marlin scan` first"),
|
||||||
}
|
Err(e) =>
|
||||||
Err(e) => {
|
error!(file=%p, error=%e, "could not lookup file ID"),
|
||||||
error!(file = %path_str, error = %e, "could not lookup file ID");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
info!(
|
info!("Attribute '{}={}' set on {} file(s).", key, value, count);
|
||||||
"Attribute '{}={}' set on {} file(s).",
|
|
||||||
key, value, count
|
|
||||||
);
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// List attributes for a given file path.
|
|
||||||
fn attr_ls(conn: &rusqlite::Connection, path: &Path) -> Result<()> {
|
fn attr_ls(conn: &rusqlite::Connection, path: &Path) -> Result<()> {
|
||||||
let file_id = db::file_id(conn, &path.to_string_lossy())?;
|
let fid = db::file_id(conn, &path.to_string_lossy())?;
|
||||||
let mut stmt =
|
let mut stmt = conn.prepare(
|
||||||
conn.prepare("SELECT key, value FROM attributes WHERE file_id = ?1 ORDER BY key")?;
|
"SELECT key, value FROM attributes WHERE file_id=?1 ORDER BY key"
|
||||||
|
)?;
|
||||||
for row in stmt
|
for row in stmt
|
||||||
.query_map([file_id], |r| Ok((r.get::<_, String>(0)?, r.get::<_, String>(1)?)))?
|
.query_map([fid], |r| Ok((r.get::<_, String>(0)?, r.get::<_, String>(1)?)))?
|
||||||
{
|
{
|
||||||
let (k, v) = row?;
|
let (k, v) = row?;
|
||||||
println!("{k} = {v}");
|
println!("{k} = {v}");
|
||||||
@@ -264,11 +255,8 @@ fn attr_ls(conn: &rusqlite::Connection, path: &Path) -> Result<()> {
|
|||||||
|
|
||||||
/* ---------- SEARCH ---------- */
|
/* ---------- SEARCH ---------- */
|
||||||
|
|
||||||
/// Run an FTS5 search, optionally piping each hit through `exec`.
|
|
||||||
/// Falls back to a simple substring scan (path + ≤64 kB file contents)
|
|
||||||
/// when the FTS index yields no rows.
|
|
||||||
fn run_search(conn: &rusqlite::Connection, raw_query: &str, exec: Option<String>) -> Result<()> {
|
fn run_search(conn: &rusqlite::Connection, raw_query: &str, exec: Option<String>) -> Result<()> {
|
||||||
// Build the FTS MATCH expression
|
/* ── build FTS expression -------------------------------- */
|
||||||
let mut parts = Vec::new();
|
let mut parts = Vec::new();
|
||||||
let toks = shlex::split(raw_query).unwrap_or_else(|| vec![raw_query.to_string()]);
|
let toks = shlex::split(raw_query).unwrap_or_else(|| vec![raw_query.to_string()]);
|
||||||
for tok in toks {
|
for tok in toks {
|
||||||
@@ -276,9 +264,7 @@ fn run_search(conn: &rusqlite::Connection, raw_query: &str, exec: Option<String>
|
|||||||
parts.push(tok);
|
parts.push(tok);
|
||||||
} else if let Some(tag) = tok.strip_prefix("tag:") {
|
} else if let Some(tag) = tok.strip_prefix("tag:") {
|
||||||
for (i, seg) in tag.split('/').filter(|s| !s.is_empty()).enumerate() {
|
for (i, seg) in tag.split('/').filter(|s| !s.is_empty()).enumerate() {
|
||||||
if i > 0 {
|
if i > 0 { parts.push("AND".into()); }
|
||||||
parts.push("AND".into());
|
|
||||||
}
|
|
||||||
parts.push(format!("tags_text:{}", escape_fts(seg)));
|
parts.push(format!("tags_text:{}", escape_fts(seg)));
|
||||||
}
|
}
|
||||||
} else if let Some(attr) = tok.strip_prefix("attr:") {
|
} else if let Some(attr) = tok.strip_prefix("attr:") {
|
||||||
@@ -298,7 +284,7 @@ fn run_search(conn: &rusqlite::Connection, raw_query: &str, exec: Option<String>
|
|||||||
let fts_expr = parts.join(" ");
|
let fts_expr = parts.join(" ");
|
||||||
debug!("FTS MATCH expression: {fts_expr}");
|
debug!("FTS MATCH expression: {fts_expr}");
|
||||||
|
|
||||||
// ---------- primary FTS query ----------
|
/* ── primary FTS query ---------------------------------- */
|
||||||
let mut stmt = conn.prepare(
|
let mut stmt = conn.prepare(
|
||||||
r#"
|
r#"
|
||||||
SELECT f.path
|
SELECT f.path
|
||||||
@@ -309,55 +295,49 @@ fn run_search(conn: &rusqlite::Connection, raw_query: &str, exec: Option<String>
|
|||||||
"#,
|
"#,
|
||||||
)?;
|
)?;
|
||||||
let mut hits: Vec<String> = stmt
|
let mut hits: Vec<String> = stmt
|
||||||
.query_map(params![fts_expr], |r| r.get::<_, String>(0))?
|
.query_map([&fts_expr], |r| r.get::<_, String>(0))?
|
||||||
.filter_map(Result::ok)
|
.filter_map(Result::ok)
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
// ---------- graceful fallback ----------
|
/* ── graceful fallback (substring scan) ----------------- */
|
||||||
if hits.is_empty() && !raw_query.contains(':') {
|
if hits.is_empty() && !raw_query.contains(':') {
|
||||||
hits = naive_substring_search(conn, raw_query)?;
|
hits = naive_substring_search(conn, raw_query)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
// ---------- output / exec ----------
|
/* ── output / exec -------------------------------------- */
|
||||||
if let Some(cmd_tpl) = exec {
|
if let Some(cmd_tpl) = exec {
|
||||||
run_exec(&hits, &cmd_tpl)?;
|
run_exec(&hits, &cmd_tpl)?;
|
||||||
} else {
|
} else {
|
||||||
if hits.is_empty() {
|
if hits.is_empty() {
|
||||||
eprintln!(
|
eprintln!(
|
||||||
"No matches for query: `{raw_query}` (FTS expression: `{fts_expr}`)"
|
"No matches for query: `{raw_query}` (FTS expr: `{fts_expr}`)"
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
for p in hits {
|
for p in hits { println!("{p}"); }
|
||||||
println!("{p}");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Simple, case-insensitive substring scan over paths and (small) file bodies.
|
/// Fallback: case-insensitive substring scan over paths *and* small file bodies.
|
||||||
fn naive_substring_search(conn: &rusqlite::Connection, term: &str) -> Result<Vec<String>> {
|
fn naive_substring_search(conn: &rusqlite::Connection, term: &str) -> Result<Vec<String>> {
|
||||||
let term_lc = term.to_lowercase();
|
let needle = term.to_lowercase();
|
||||||
|
|
||||||
let mut stmt = conn.prepare("SELECT path FROM files")?;
|
let mut stmt = conn.prepare("SELECT path FROM files")?;
|
||||||
let rows = stmt.query_map([], |r| r.get::<_, String>(0))?;
|
let rows = stmt.query_map([], |r| r.get::<_, String>(0))?;
|
||||||
|
|
||||||
let mut out = Vec::new();
|
let mut out = Vec::new();
|
||||||
for p in rows {
|
for p in rows {
|
||||||
let p = p?;
|
let p = p?;
|
||||||
if p.to_lowercase().contains(&term_lc) {
|
if p.to_lowercase().contains(&needle) {
|
||||||
out.push(p.clone());
|
out.push(p.clone());
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
// Only inspect small files to stay fast
|
// Only scan files ≤ 64 kB
|
||||||
if let Ok(meta) = fs::metadata(&p) {
|
if let Ok(meta) = fs::metadata(&p) {
|
||||||
if meta.len() > 64_000 {
|
if meta.len() > 65_536 { continue; }
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if let Ok(content) = fs::read_to_string(&p) {
|
if let Ok(body) = fs::read_to_string(&p) {
|
||||||
if content.to_lowercase().contains(&term_lc) {
|
if body.to_lowercase().contains(&needle) {
|
||||||
out.push(p);
|
out.push(p);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -365,17 +345,18 @@ fn naive_substring_search(conn: &rusqlite::Connection, term: &str) -> Result<Vec
|
|||||||
Ok(out)
|
Ok(out)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Helper: run an external command template on every hit.
|
/// Run external command template on every hit (`{}` placeholder supported).
|
||||||
fn run_exec(paths: &[String], cmd_tpl: &str) -> Result<()> {
|
fn run_exec(paths: &[String], cmd_tpl: &str) -> Result<()> {
|
||||||
let mut ran_without_placeholder = false;
|
let mut ran_without_placeholder = false;
|
||||||
|
|
||||||
|
// optimisation: if no hits and no placeholder, run once
|
||||||
if paths.is_empty() && !cmd_tpl.contains("{}") {
|
if paths.is_empty() && !cmd_tpl.contains("{}") {
|
||||||
if let Some(mut parts) = shlex::split(cmd_tpl) {
|
if let Some(mut parts) = shlex::split(cmd_tpl) {
|
||||||
if !parts.is_empty() {
|
if !parts.is_empty() {
|
||||||
let prog = parts.remove(0);
|
let prog = parts.remove(0);
|
||||||
let status = Command::new(&prog).args(&parts).status()?;
|
let status = Command::new(&prog).args(parts).status()?;
|
||||||
if !status.success() {
|
if !status.success() {
|
||||||
error!(command = %cmd_tpl, code = ?status.code(), "command failed");
|
error!(command=%cmd_tpl, code=?status.code(), "command failed");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -391,13 +372,11 @@ fn run_exec(paths: &[String], cmd_tpl: &str) -> Result<()> {
|
|||||||
format!("{cmd_tpl} {quoted}")
|
format!("{cmd_tpl} {quoted}")
|
||||||
};
|
};
|
||||||
if let Some(mut parts) = shlex::split(&final_cmd) {
|
if let Some(mut parts) = shlex::split(&final_cmd) {
|
||||||
if parts.is_empty() {
|
if parts.is_empty() { continue; }
|
||||||
continue;
|
|
||||||
}
|
|
||||||
let prog = parts.remove(0);
|
let prog = parts.remove(0);
|
||||||
let status = Command::new(&prog).args(&parts).status()?;
|
let status = Command::new(&prog).args(parts).status()?;
|
||||||
if !status.success() {
|
if !status.success() {
|
||||||
error!(file = %p, command = %final_cmd, code = ?status.code(), "command failed");
|
error!(file=%p, command=%final_cmd, code=?status.code(), "command failed");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -409,33 +388,8 @@ fn run_exec(paths: &[String], cmd_tpl: &str) -> Result<()> {
|
|||||||
|
|
||||||
fn escape_fts(term: &str) -> String {
|
fn escape_fts(term: &str) -> String {
|
||||||
if term.contains(|c: char| c.is_whitespace() || "-:()\"".contains(c))
|
if term.contains(|c: char| c.is_whitespace() || "-:()\"".contains(c))
|
||||||
|| ["AND", "OR", "NOT", "NEAR"]
|
|| ["AND", "OR", "NOT", "NEAR"].contains(&term.to_uppercase().as_str())
|
||||||
.contains(&term.to_uppercase().as_str())
|
|
||||||
{
|
{
|
||||||
format!("\"{}\"", term.replace('"', "\"\""))
|
format!("\"{}\"", term.replace('"', "\"\""))
|
||||||
} else {
|
} else { term.to_string() }
|
||||||
term.to_string()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Determine a filesystem root to limit recursive walking.
|
|
||||||
fn determine_scan_root(pattern: &str) -> PathBuf {
|
|
||||||
let first_wild = pattern
|
|
||||||
.find(|c| matches!(c, '*' | '?' | '['))
|
|
||||||
.unwrap_or(pattern.len());
|
|
||||||
let mut root = PathBuf::from(&pattern[..first_wild]);
|
|
||||||
|
|
||||||
while root
|
|
||||||
.as_os_str()
|
|
||||||
.to_string_lossy()
|
|
||||||
.contains(|c| matches!(c, '*' | '?' | '['))
|
|
||||||
{
|
|
||||||
root = root.parent().map(Path::to_path_buf).unwrap_or_default();
|
|
||||||
}
|
|
||||||
|
|
||||||
if root.as_os_str().is_empty() {
|
|
||||||
PathBuf::from(".")
|
|
||||||
} else {
|
|
||||||
root
|
|
||||||
}
|
|
||||||
}
|
}
|
@@ -1,3 +1,4 @@
|
|||||||
|
//! tests e2e.rs
|
||||||
//! End-to-end “happy path” smoke-tests for the `marlin` binary.
|
//! End-to-end “happy path” smoke-tests for the `marlin` binary.
|
||||||
//!
|
//!
|
||||||
//! Run with `cargo test --test e2e` (CI does) or `cargo test`.
|
//! Run with `cargo test --test e2e` (CI does) or `cargo test`.
|
@@ -1,3 +1,4 @@
|
|||||||
|
//! tests neg.rs
|
||||||
//! Negative-path integration tests (“should fail / warn”).
|
//! Negative-path integration tests (“should fail / warn”).
|
||||||
|
|
||||||
use predicates::str;
|
use predicates::str;
|
@@ -1,3 +1,4 @@
|
|||||||
|
//! tests pos.rs
|
||||||
//! Positive-path integration checks for every sub-command
|
//! Positive-path integration checks for every sub-command
|
||||||
//! that already has real logic behind it.
|
//! that already has real logic behind it.
|
||||||
|
|
@@ -60,7 +60,7 @@ If you wire **“cargo test --all”** into CI (GitHub Actions, GitLab, etc.), p
|
|||||||
### One-liner helper (copy/paste)
|
### One-liner helper (copy/paste)
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
git pull && cargo build --release &&
|
cargo build --release &&
|
||||||
sudo install -Dm755 target/release/marlin /usr/local/bin/marlin &&
|
sudo install -Dm755 target/release/marlin /usr/local/bin/marlin &&
|
||||||
cargo test --all -- --nocapture
|
cargo test --all -- --nocapture
|
||||||
```
|
```
|
74
docs/roadmap.md
Normal file
74
docs/roadmap.md
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
# Marlin ― Delivery Road‑map **v3**
|
||||||
|
|
||||||
|
*Engineering‑ready version — updated 2025‑05‑17*
|
||||||
|
|
||||||
|
> **Legend**
|
||||||
|
> **△** = engineering artefact (spec / ADR / perf target) **✦** = user-visible deliverable
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 0 · Methodology primer (what “Done” means)
|
||||||
|
|
||||||
|
| Theme | Project rule-of-thumb |
|
||||||
|
| -------------- | -------------------------------------------------------------------------------------------------------------------------------- |
|
||||||
|
| **Branching** | Trunk-based. Feature branches → PR → 2 reviews → squash-merge. |
|
||||||
|
| **Spec first** | Every epic starts with a **Design Proposal (DP-xxx)** in `/docs/adr/`. Include schema diffs, example CLI session, perf budget. |
|
||||||
|
| **Tests** | Unit + integration coverage ≥ 85 % on lines **touched in the sprint** (checked by Tarpaulin). |
|
||||||
|
| **Perf gate** | Cold start P95 ≤ 3 s on 100 k files **unless overridden in DP**. Regressions fail CI. |
|
||||||
|
| **Docs** | CLI flags & examples land in `README.md` **same PR** that ships the code. |
|
||||||
|
| **Demo** | Closing each epic produces a 2-min asciinema or gif in `docs/demos/`. |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 1 · Bird’s‑eye table (now includes engineering columns)
|
||||||
|
|
||||||
|
| Phase / Sprint | Timeline | Focus & Rationale | ✦ Key UX Deliverables | △ Engineering artefacts / tasks | Definition of Done |
|
||||||
|
| --------------------------------------------- | -------- | ---------------------------------------- | -------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- |
|
||||||
|
| **Epic 1 — Scale & Reliability** | 2025-Q2 | Stay fast @ 100 k files | • `scan --dirty` (re-index touched rows only) | • DP-002 Dirty-flag design + FTS rebuild cadence<br>• Hyperfine benchmark script committed | Dirty scan vs full ≤ 15 % runtime on 100 k corpus; benchmark job passes |
|
||||||
|
| **Epic 2 — Live Mode & Self‑Pruning Backups** | 2025-Q2 | “Just works” indexing, DB never explodes | • `marlin watch <dir>` (notify/FSEvents)<br>• `backup --prune N` & auto-prune | • DP-003 file-watcher life-cycle & debouncing<br>• Integration test with inotify-sim <br>• Cron-style GitHub job for nightly prune | 8 h stress-watch alters 10 k files < 1 % misses; backup dir ≤ N |
|
||||||
|
| **Phase 3 — Content FTS + Annotations** | 2025-Q3 | Search inside files, leave notes | • Grep-style snippet output (`-C3`)<br>• `marlin annotate add/list` | • DP-004 content-blob strategy (inline vs ext-table)<br>• Syntax-highlight via `syntect` PoC<br>• New FTS triggers unit-tested | Indexes 1 GB corpus in ≤ 30 min; snippet CLI passes golden-file tests |
|
||||||
|
| **Phase 4 — Versioning & Deduplication** | 2025-Q3 | Historic diffs, detect dupes | • `scan --rehash` (SHA-256)<br>• `version diff <file>` | • DP-005 hash column + Bloom-de-dupe<br>• Binary diff adapter research | Diff on 10 MB file ≤ 500 ms; dupes listed via CLI |
|
||||||
|
| **Phase 5 — Tag Aliases & Semantic Booster** | 2025-Q3 | Tame tag sprawl, start AI hints | • `tag alias add/ls/rm`<br>• `tag suggest`, `summary` | • DP-006 embeddings size & model choice<br>• Vector store schema + k-NN index bench | 95 % of “foo/bar\~foo” alias look-ups resolve in one hop; suggest CLI returns ≤ 150 ms |
|
||||||
|
| **Phase 6 — Search DSL v2 & Smart Views** | 2025-Q4 | Pro-grade query language | • New `nom` grammar: AND/OR, parentheses, ranges | • DP-007 BNF + 30 acceptance strings<br>• Lexer fuzz-tests with `cargo-fuzz` | Old queries keep working (migration shim); 0 crashes in fuzz run ≥ 1 M cases |
|
||||||
|
| **Phase 7 — Structured Workflows** | 2025-Q4 | Tasks, state, reminders, templates | • `state set/transitions add/log`<br>• `task scan/list`<br>• **NEW:** `template apply` | • DP-008 Workflow tables & validation<br>• Sample YAML template spec + CLI expansion tests | Create template, apply to 20 files → all attrs/link rows present; state graph denies illegal transitions |
|
||||||
|
| **Phase 8 — Lightweight Integrations** | 2026-Q1 | First “shell” GUIs | • VS Code side-bar (read-only)<br>• **TUI v1** (tag tree ▸ file list ▸ preview) | • DP-009 TUI key-map & redraw budget<br>• Crate split `marlin_core`, `marlin_tui` | TUI binary ≤ 2.0 MB; 10 k row scroll ≤ 4 ms redraw |
|
||||||
|
| **Phase 9 — Dolphin Sidebar (MVP)** | 2026-Q1 | Peek metadata in KDE file-manager | • Qt-plugin showing tags, attrs, links | • DP-010 DB/IP bridge (D‑Bus vs UNIX socket)<br>• CMake packaging script | Sidebar opens in ≤ 150 ms; passes KDE lint |
|
||||||
|
| **Phase 10 — Full GUI & Multi-device Sync** | 2026-Q2 | Edit metadata visually, sync option | • Electron/Qt hybrid explorer UI<br>• Pick & integrate sync backend | • DP-011 sync back-end trade-study<br>• UI e2e tests in Playwright | Round-trip CRUD between two nodes in < 2 s; 25 GUI tests green |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 2 · Feature cross-matrix (quick look-ups)
|
||||||
|
|
||||||
|
| Capability | Sprint / Phase | CLI flag or GUI element | Linked DP |
|
||||||
|
| ------------------------------------- | -------------- | ---------------------------------- | --------- |
|
||||||
|
| Relationship **templates** | P7 | `template new`, `template apply` | DP-008 |
|
||||||
|
| Positive / negative filter combinator | P6 | DSL `+tag:foo -tag:bar date>=2025` | DP-007 |
|
||||||
|
| Dirty-scan optimisation | E1 | `scan --dirty` | DP-002 |
|
||||||
|
| Watch-mode | E2 | `marlin watch .` | DP-003 |
|
||||||
|
| Grep snippets | P3 | `search -C3 "foo"` | DP-004 |
|
||||||
|
| Hash / dedupe | P4 | `scan --rehash` | DP-005 |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 3 · Milestone acceptance checklist
|
||||||
|
|
||||||
|
Before a milestone is declared “shipped”:
|
||||||
|
|
||||||
|
* [ ] **Spec** merged (DP-xxx) with schema diff & example ASCII-cast
|
||||||
|
* [ ] **Unit & integration tests** ≥ 85 % coverage on changed lines
|
||||||
|
* [ ] **Perf guard-rail** script passes on CI matrix (Ubuntu 22, macOS 14)
|
||||||
|
* [ ] **Docs** — CLI man-page, README table row, roadmap ticked
|
||||||
|
* [ ] **Demo** uploaded to `docs/demos/` and linked in release notes
|
||||||
|
* [ ] **Release tag** pushed; Cargo binary on GitHub Releases
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 4 · Next immediate actions
|
||||||
|
|
||||||
|
1. **Write DP-001 (Schema v1.1)** — owner @alice, due 21 May
|
||||||
|
2. **Set up Tarpaulin & Hyperfine jobs** — @bob, due 23 May
|
||||||
|
3. **Spike dirty-flag logic** — @carol 2 days time-box, outcome in DP-002
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
> *This roadmap now contains both product-level “what” and engineering-level “how/when/prove it”. It should allow a new contributor to jump in, pick the matching DP, and know exactly the bar they must clear for their code to merge.*
|
21
libmarlin/Cargo.toml
Normal file
21
libmarlin/Cargo.toml
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
[package]
|
||||||
|
name = "libmarlin"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
publish = false
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
anyhow = "1"
|
||||||
|
chrono = "0.4"
|
||||||
|
directories = "5"
|
||||||
|
glob = "0.3"
|
||||||
|
rusqlite = { version = "0.31", features = ["bundled", "backup"] }
|
||||||
|
tracing = "0.1"
|
||||||
|
tracing-subscriber = { version = "0.3", features = ["fmt", "env-filter"] }
|
||||||
|
walkdir = "2.5"
|
||||||
|
shlex = "1.3"
|
||||||
|
shellexpand = "3.1"
|
||||||
|
serde_json = { version = "1", optional = true }
|
||||||
|
|
||||||
|
[features]
|
||||||
|
json = ["serde_json"]
|
122
libmarlin/src/lib.rs
Normal file
122
libmarlin/src/lib.rs
Normal file
@@ -0,0 +1,122 @@
|
|||||||
|
//! libmarlin – public API surface for the Marlin core.
|
||||||
|
//!
|
||||||
|
//! Down-stream crates (`cli-bin`, `tui-bin`, tests, plugins) should depend
|
||||||
|
//! *only* on the helpers re-exported here, never on internal modules
|
||||||
|
//! directly. That gives us room to refactor internals without breaking
|
||||||
|
//! callers.
|
||||||
|
|
||||||
|
#![deny(warnings)]
|
||||||
|
|
||||||
|
pub mod config; // moved as-is
|
||||||
|
pub mod db; // moved as-is
|
||||||
|
pub mod logging; // expose the logging init helper
|
||||||
|
pub mod scan; // moved as-is
|
||||||
|
pub mod utils; // hosts determine_scan_root() & misc helpers
|
||||||
|
|
||||||
|
use anyhow::{Context, Result};
|
||||||
|
use rusqlite::Connection;
|
||||||
|
use std::path::Path;
|
||||||
|
use walkdir::WalkDir;
|
||||||
|
|
||||||
|
/// Primary façade – open a workspace then call helper methods.
|
||||||
|
///
|
||||||
|
/// Most methods simply wrap what the CLI used to do directly; more will be
|
||||||
|
/// filled in sprint-by-sprint.
|
||||||
|
pub struct Marlin {
|
||||||
|
#[allow(dead_code)]
|
||||||
|
cfg: config::Config,
|
||||||
|
conn: Connection,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Marlin {
|
||||||
|
/// Load configuration from env / workspace and open (or create) the DB.
|
||||||
|
pub fn open_default() -> Result<Self> {
|
||||||
|
let cfg = config::Config::load()?;
|
||||||
|
let conn = db::open(&cfg.db_path)?;
|
||||||
|
Ok(Self { cfg, conn })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Open an explicit DB path – handy for tests or headless tools.
|
||||||
|
pub fn open_at<P: AsRef<Path>>(path: P) -> Result<Self> {
|
||||||
|
let cfg = config::Config { db_path: path.as_ref().to_path_buf() };
|
||||||
|
let conn = db::open(&cfg.db_path)?;
|
||||||
|
Ok(Self { cfg, conn })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Recursively index one or more directories.
|
||||||
|
pub fn scan<P: AsRef<Path>>(&mut self, paths: &[P]) -> Result<usize> {
|
||||||
|
let mut total = 0usize;
|
||||||
|
for p in paths {
|
||||||
|
total += scan::scan_directory(&mut self.conn, p.as_ref())?;
|
||||||
|
}
|
||||||
|
Ok(total)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Attach a hierarchical tag (`foo/bar`) to every file that matches the
|
||||||
|
/// glob pattern. Returns the number of files that actually got updated.
|
||||||
|
pub fn tag(&mut self, pattern: &str, tag_path: &str) -> Result<usize> {
|
||||||
|
use glob::Pattern;
|
||||||
|
|
||||||
|
// 1) ensure tag hierarchy exists
|
||||||
|
let leaf_tag_id = db::ensure_tag_path(&self.conn, tag_path)?;
|
||||||
|
|
||||||
|
// 2) collect leaf + ancestors
|
||||||
|
let mut tag_ids = Vec::new();
|
||||||
|
let mut current = Some(leaf_tag_id);
|
||||||
|
while let Some(id) = current {
|
||||||
|
tag_ids.push(id);
|
||||||
|
current = self.conn.query_row(
|
||||||
|
"SELECT parent_id FROM tags WHERE id=?1",
|
||||||
|
[id],
|
||||||
|
|r| r.get::<_, Option<i64>>(0),
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3) walk the file tree and upsert `file_tags`
|
||||||
|
let expanded = shellexpand::tilde(pattern).into_owned();
|
||||||
|
let pat = Pattern::new(&expanded)
|
||||||
|
.with_context(|| format!("Invalid glob pattern `{expanded}`"))?;
|
||||||
|
let root = utils::determine_scan_root(&expanded);
|
||||||
|
|
||||||
|
let mut stmt_file = self.conn.prepare("SELECT id FROM files WHERE path=?1")?;
|
||||||
|
let mut stmt_insert = self.conn.prepare(
|
||||||
|
"INSERT OR IGNORE INTO file_tags(file_id, tag_id) VALUES (?1, ?2)",
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let mut changed = 0usize;
|
||||||
|
for entry in WalkDir::new(&root)
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(Result::ok)
|
||||||
|
.filter(|e| e.file_type().is_file())
|
||||||
|
{
|
||||||
|
let p = entry.path().to_string_lossy();
|
||||||
|
if !pat.matches(&p) { continue; }
|
||||||
|
|
||||||
|
match stmt_file.query_row([p.as_ref()], |r| r.get::<_, i64>(0)) {
|
||||||
|
Ok(fid) => {
|
||||||
|
let mut newly = false;
|
||||||
|
for &tid in &tag_ids {
|
||||||
|
if stmt_insert.execute([fid, tid])? > 0 { newly = true; }
|
||||||
|
}
|
||||||
|
if newly { changed += 1; }
|
||||||
|
}
|
||||||
|
Err(_) => { /* ignore non‐indexed files */ }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(changed)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// FTS5 search → list of matching paths.
|
||||||
|
pub fn search(&self, query: &str) -> Result<Vec<String>> {
|
||||||
|
let mut stmt = self.conn.prepare(
|
||||||
|
"SELECT path FROM files_fts WHERE files_fts MATCH ?1 ORDER BY rank",
|
||||||
|
)?;
|
||||||
|
let rows = stmt.query_map([query], |r| r.get::<_, String>(0))?
|
||||||
|
.collect::<Result<Vec<_>, _>>()?;
|
||||||
|
Ok(rows)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Borrow the underlying SQLite connection (read-only).
|
||||||
|
pub fn conn(&self) -> &Connection { &self.conn }
|
||||||
|
}
|
25
libmarlin/src/utils.rs
Normal file
25
libmarlin/src/utils.rs
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
//! Misc shared helpers.
|
||||||
|
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
/// Determine a filesystem root to limit recursive walking on glob scans.
|
||||||
|
pub fn determine_scan_root(pattern: &str) -> PathBuf {
|
||||||
|
let first_wild = pattern
|
||||||
|
.find(|c| matches!(c, '*' | '?' | '['))
|
||||||
|
.unwrap_or(pattern.len());
|
||||||
|
let mut root = PathBuf::from(&pattern[..first_wild]);
|
||||||
|
|
||||||
|
while root
|
||||||
|
.as_os_str()
|
||||||
|
.to_string_lossy()
|
||||||
|
.contains(|c| matches!(c, '*' | '?' | '['))
|
||||||
|
{
|
||||||
|
root = root.parent().map(|p| p.to_path_buf()).unwrap_or_default();
|
||||||
|
}
|
||||||
|
|
||||||
|
if root.as_os_str().is_empty() {
|
||||||
|
PathBuf::from(".")
|
||||||
|
} else {
|
||||||
|
root
|
||||||
|
}
|
||||||
|
}
|
75
roadmap.md
75
roadmap.md
@@ -1,75 +0,0 @@
|
|||||||
# Marlin ― Delivery Road-map **v3**
|
|
||||||
|
|
||||||
*Engineering-ready version — updated 2025-05-17*
|
|
||||||
|
|
||||||
> **Legend**
|
|
||||||
> **△** = engineering artefact (spec / ADR / perf target) **✦** = user-visible deliverable
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 0 · Methodology primer (what “Done” means)
|
|
||||||
|
|
||||||
| Theme | Project rule-of-thumb |
|
|
||||||
| -------------- | -------------------------------------------------------------------------------------------------------------------------------- |
|
|
||||||
| **Branching** | Trunk-based. Feature branches → PR → 2 reviews → squash-merge. |
|
|
||||||
| **Spec first** | Every epic starts with a **Design Proposal (DP-xxx)** in `/docs/adr/`. Include schema diffs, example CLI session, perf budget. |
|
|
||||||
| **Tests** | Unit + integration coverage ≥ 85 % on lines **touched in the sprint** (checked by Tarpaulin). |
|
|
||||||
| **Perf gate** | Cold start P95 ≤ 3 s on 100 k files **unless overridden in DP**. Regressions fail CI. |
|
|
||||||
| **Docs** | CLI flags & examples land in `README.md` **same PR** that ships the code. |
|
|
||||||
| **Demo** | Closing each epic produces a 2-min asciinema or gif in `docs/demos/`. |
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 1 · Bird’s-eye table (now includes engineering columns)
|
|
||||||
|
|
||||||
| Phase / Sprint | Timeline | Focus & Rationale | ✦ Key UX Deliverables | △ Engineering artefacts / tasks | Definition of Done | | | |
|
|
||||||
| --------------------------------------------- | ----------------------------- | ----------------------------------------------------- | ------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------ | ----------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------ | -------------------------------------------------------------------------------------------------------- |
|
|
||||||
| **Sprint α — Bedrock & Metadata Domains** | **2025-Q2<br>(now → 06 Jun)** | Lock schema, smoking-fast CI, first metadata objects. | • CLI stubs: `marlin link / coll / view`<br>• `marlin demo` interactive tour | • DP-001 Schema v1.1 (ER + migration scripts)<br>• Unit tests (`escape_fts`, `determine_scan_root`)<br>• GitHub Action for SQL dry-run | 100 % migrations green on CI; demo command prints green tick | | | |
|
|
||||||
| **Epic 1 — Scale & Reliability** | 2025-Q2 | Stay fast @ 100 k files | • `scan --dirty` (re-index touched rows only) | • DP-002 Dirty-flag design + FTS rebuild cadence<br>• Hyperfine benchmark script committed | Dirty scan vs full ≤ 15 % runtime on 100 k corpus; benchmark job passes | | | |
|
|
||||||
| **Epic 2 — Live Mode & Self-Pruning Backups** | 2025-Q2 | “Just works” indexing, DB never explodes | • `marlin watch <dir>` (notify/FSEvents)<br>• `backup --prune N` & auto-prune | • DP-003 file-watcher life-cycle & debouncing<br>• Integration test with inotify-sim <br>• Cron-style GitHub job for nightly prune | 8 h stress-watch alters 10 k files < 1 % misses; backup dir ≤ N | | | |
|
|
||||||
| **Phase 3 — Content FTS + Annotations** | 2025-Q3 | Search inside files, leave notes | • Grep-style snippet output (`-C3`)<br>• \`marlin annotate add | list\` | • DP-004 content-blob strategy (inline vs ext-table)<br>• Syntax-highlight via `syntect` PoC<br>• New FTS triggers unit-tested | Indexes 1 GB corpus in ≤ 30 min; snippet CLI passes golden-file tests | | |
|
|
||||||
| **Phase 4 — Versioning & Deduplication** | 2025-Q3 | Historic diffs, detect dupes | • `scan --rehash` (SHA-256)<br>• `version diff <file>` | • DP-005 hash column + Bloom-de-dupe<br>• Binary diff adapter research | Diff on 10 MB file ≤ 500 ms; dupes listed via CLI | | | |
|
|
||||||
| **Phase 5 — Tag Aliases & Semantic Booster** | 2025-Q3 | Tame tag sprawl, start AI hints | • \`tag alias add | ls | rm`<br>• `tag suggest`, `summary\` | • DP-006 embeddings size & model choice<br>• Vector store schema + k-NN index bench | 95 % of “foo/bar\~foo” alias look-ups resolve in one hop; suggest CLI returns ≤ 150 ms | |
|
|
||||||
| **Phase 6 — Search DSL v2 & Smart Views** | 2025-Q4 | Pro-grade query language | • New `nom` grammar: AND/OR, parentheses, ranges | • DP-007 BNF + 30 acceptance strings<br>• Lexer fuzz-tests with `cargo-fuzz` | Old queries keep working (migration shim); 0 crashes in fuzz run ≥ 1 M cases | | | |
|
|
||||||
| **Phase 7 — Structured Workflows** | 2025-Q4 | Tasks, state, reminders, templates | • \`state set | transitions add | log`<br>• `task scan | list`<br>• **NEW:** `template apply\` for relationship templates | • DP-008 Workflow tables & validation<br>• Sample YAML template spec + CLI expansion tests | Create template, apply to 20 files → all attrs/link rows present; state graph denies illegal transitions |
|
|
||||||
| **Phase 8 — Lightweight Integrations** | 2026-Q1 | First “shell” GUIs | • VS Code side-bar (read-only)<br>• **TUI v1** (tag tree ▸ file list ▸ preview) | • DP-009 TUI key-map & redraw budget<br>• Crate split `marlin_core`, `marlin_tui` | TUI binary ≤ 2.0 MB; 10 k row scroll ≤ 4 ms redraw | | | |
|
|
||||||
| **Phase 9 — Dolphin Sidebar (MVP)** | 2026-Q1 | Peek metadata in KDE file-manager | • Qt-plugin showing tags, attrs, links | • DP-010 DB/IP bridge (D-Bus vs UNIX socket)<br>• CMake packaging script | Sidebar opens in ≤ 150 ms; passes KDE lint | | | |
|
|
||||||
| **Phase 10 — Full GUI & Multi-device Sync** | 2026-Q2 | Edit metadata visually, sync option | • Electron/Qt hybrid explorer UI<br>• Pick & integrate sync backend | • DP-011 sync back-end trade-study<br>• UI e2e tests in Playwright | Round-trip CRUD between two nodes in < 2 s; 25 GUI tests green | | | |
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 2 · Feature cross-matrix (quick look-ups)
|
|
||||||
|
|
||||||
| Capability | Sprint / Phase | CLI flag or GUI element | Linked DP |
|
|
||||||
| ------------------------------------- | -------------- | ---------------------------------- | --------- |
|
|
||||||
| Relationship **templates** | P7 | `template new`, `template apply` | DP-008 |
|
|
||||||
| Positive / negative filter combinator | P6 | DSL `+tag:foo -tag:bar date>=2025` | DP-007 |
|
|
||||||
| Dirty-scan optimisation | E1 | `scan --dirty` | DP-002 |
|
|
||||||
| Watch-mode | E2 | `marlin watch .` | DP-003 |
|
|
||||||
| Grep snippets | P3 | `search -C3 "foo"` | DP-004 |
|
|
||||||
| Hash / dedupe | P4 | `scan --rehash` | DP-005 |
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 3 · Milestone acceptance checklist
|
|
||||||
|
|
||||||
Before a milestone is declared “shipped”:
|
|
||||||
|
|
||||||
* [ ] **Spec** merged (DP-xxx) with schema diff & example ASCII-cast
|
|
||||||
* [ ] **Unit & integration tests** ≥ 85 % coverage on changed lines
|
|
||||||
* [ ] **Perf guard-rail** script passes on CI matrix (Ubuntu 22, macOS 14)
|
|
||||||
* [ ] **Docs** — CLI man-page, README table row, roadmap ticked
|
|
||||||
* [ ] **Demo** uploaded to `docs/demos/` and linked in release notes
|
|
||||||
* [ ] **Release tag** pushed; Cargo binary on GitHub Releases
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 4 · Next immediate actions
|
|
||||||
|
|
||||||
1. **Write DP-001 (Schema v1.1)** — owner @alice, due 21 May
|
|
||||||
2. **Set up Tarpaulin & Hyperfine jobs** — @bob, due 23 May
|
|
||||||
3. **Spike dirty-flag logic** — @carol 2 days time-box, outcome in DP-002
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
> *This roadmap now contains both product-level “what” and engineering-level “how/when/prove it”. It should allow a new contributor to jump in, pick the matching DP, and know exactly the bar they must clear for their code to merge.*
|
|
@@ -1,240 +0,0 @@
|
|||||||
// Test script to validate hierarchical tag FTS fix
|
|
||||||
// This script demonstrates how the fix works with a simple test case
|
|
||||||
|
|
||||||
use rusqlite::{Connection, params};
|
|
||||||
use std::path::Path;
|
|
||||||
use std::fs;
|
|
||||||
use anyhow::Result;
|
|
||||||
|
|
||||||
fn main() -> Result<()> {
|
|
||||||
// Create a test database in a temporary location
|
|
||||||
let db_path = Path::new("/tmp/marlin_test.db");
|
|
||||||
if db_path.exists() {
|
|
||||||
fs::remove_file(db_path)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
println!("Creating test database at {:?}", db_path);
|
|
||||||
|
|
||||||
// Initialize database with our schema and migrations
|
|
||||||
let conn = Connection::open(db_path)?;
|
|
||||||
|
|
||||||
// Apply schema (simplified version of what's in the migrations)
|
|
||||||
println!("Applying schema...");
|
|
||||||
conn.execute_batch(
|
|
||||||
"PRAGMA foreign_keys = ON;
|
|
||||||
PRAGMA journal_mode = WAL;
|
|
||||||
|
|
||||||
CREATE TABLE files (
|
|
||||||
id INTEGER PRIMARY KEY,
|
|
||||||
path TEXT NOT NULL UNIQUE,
|
|
||||||
size INTEGER,
|
|
||||||
mtime INTEGER,
|
|
||||||
hash TEXT
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE tags (
|
|
||||||
id INTEGER PRIMARY KEY,
|
|
||||||
name TEXT NOT NULL,
|
|
||||||
parent_id INTEGER REFERENCES tags(id) ON DELETE CASCADE,
|
|
||||||
canonical_id INTEGER REFERENCES tags(id) ON DELETE SET NULL,
|
|
||||||
UNIQUE(name, parent_id)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE file_tags (
|
|
||||||
file_id INTEGER NOT NULL REFERENCES files(id) ON DELETE CASCADE,
|
|
||||||
tag_id INTEGER NOT NULL REFERENCES tags(id) ON DELETE CASCADE,
|
|
||||||
PRIMARY KEY(file_id, tag_id)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE attributes (
|
|
||||||
id INTEGER PRIMARY KEY,
|
|
||||||
file_id INTEGER NOT NULL REFERENCES files(id) ON DELETE CASCADE,
|
|
||||||
key TEXT NOT NULL,
|
|
||||||
value TEXT,
|
|
||||||
UNIQUE(file_id, key)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE VIRTUAL TABLE files_fts
|
|
||||||
USING fts5(
|
|
||||||
path,
|
|
||||||
tags_text,
|
|
||||||
attrs_text,
|
|
||||||
content='',
|
|
||||||
tokenize=\"unicode61 remove_diacritics 2\"
|
|
||||||
);"
|
|
||||||
)?;
|
|
||||||
|
|
||||||
// Apply our fixed triggers
|
|
||||||
println!("Applying fixed FTS triggers...");
|
|
||||||
conn.execute_batch(
|
|
||||||
"CREATE TRIGGER files_fts_ai_file
|
|
||||||
AFTER INSERT ON files
|
|
||||||
BEGIN
|
|
||||||
INSERT INTO files_fts(rowid, path, tags_text, attrs_text)
|
|
||||||
VALUES (
|
|
||||||
NEW.id,
|
|
||||||
NEW.path,
|
|
||||||
(SELECT IFNULL(GROUP_CONCAT(tag_path, ' '), '')
|
|
||||||
FROM (
|
|
||||||
WITH RECURSIVE tag_tree(id, name, parent_id, path) AS (
|
|
||||||
SELECT t.id, t.name, t.parent_id, t.name
|
|
||||||
FROM tags t
|
|
||||||
WHERE t.parent_id IS NULL
|
|
||||||
|
|
||||||
UNION ALL
|
|
||||||
|
|
||||||
SELECT t.id, t.name, t.parent_id, tt.path || '/' || t.name
|
|
||||||
FROM tags t
|
|
||||||
JOIN tag_tree tt ON t.parent_id = tt.id
|
|
||||||
)
|
|
||||||
SELECT DISTINCT tag_tree.path as tag_path
|
|
||||||
FROM file_tags ft
|
|
||||||
JOIN tag_tree ON ft.tag_id = tag_tree.id
|
|
||||||
WHERE ft.file_id = NEW.id
|
|
||||||
|
|
||||||
UNION
|
|
||||||
|
|
||||||
SELECT t.name as tag_path
|
|
||||||
FROM file_tags ft
|
|
||||||
JOIN tags t ON ft.tag_id = t.id
|
|
||||||
WHERE ft.file_id = NEW.id AND t.parent_id IS NULL
|
|
||||||
)),
|
|
||||||
(SELECT IFNULL(GROUP_CONCAT(a.key || '=' || a.value, ' '), '')
|
|
||||||
FROM attributes a
|
|
||||||
WHERE a.file_id = NEW.id)
|
|
||||||
);
|
|
||||||
END;
|
|
||||||
|
|
||||||
CREATE TRIGGER file_tags_fts_ai
|
|
||||||
AFTER INSERT ON file_tags
|
|
||||||
BEGIN
|
|
||||||
INSERT OR REPLACE INTO files_fts(rowid, path, tags_text, attrs_text)
|
|
||||||
SELECT f.id, f.path,
|
|
||||||
(SELECT IFNULL(GROUP_CONCAT(tag_path, ' '), '')
|
|
||||||
FROM (
|
|
||||||
WITH RECURSIVE tag_tree(id, name, parent_id, path) AS (
|
|
||||||
SELECT t.id, t.name, t.parent_id, t.name
|
|
||||||
FROM tags t
|
|
||||||
WHERE t.parent_id IS NULL
|
|
||||||
|
|
||||||
UNION ALL
|
|
||||||
|
|
||||||
SELECT t.id, t.name, t.parent_id, tt.path || '/' || t.name
|
|
||||||
FROM tags t
|
|
||||||
JOIN tag_tree tt ON t.parent_id = tt.id
|
|
||||||
)
|
|
||||||
SELECT DISTINCT tag_tree.path as tag_path
|
|
||||||
FROM file_tags ft
|
|
||||||
JOIN tag_tree ON ft.tag_id = tag_tree.id
|
|
||||||
WHERE ft.file_id = f.id
|
|
||||||
|
|
||||||
UNION
|
|
||||||
|
|
||||||
SELECT t.name as tag_path
|
|
||||||
FROM file_tags ft
|
|
||||||
JOIN tags t ON ft.tag_id = t.id
|
|
||||||
WHERE ft.file_id = f.id AND t.parent_id IS NULL
|
|
||||||
)),
|
|
||||||
(SELECT IFNULL(GROUP_CONCAT(a.key || '=' || a.value, ' '), '')
|
|
||||||
FROM attributes a
|
|
||||||
WHERE a.file_id = f.id)
|
|
||||||
FROM files f
|
|
||||||
WHERE f.id = NEW.file_id;
|
|
||||||
END;"
|
|
||||||
)?;
|
|
||||||
|
|
||||||
// Insert test data
|
|
||||||
println!("Inserting test data...");
|
|
||||||
|
|
||||||
// Insert a test file
|
|
||||||
conn.execute(
|
|
||||||
"INSERT INTO files (id, path) VALUES (1, '/test/document.md')",
|
|
||||||
[],
|
|
||||||
)?;
|
|
||||||
|
|
||||||
// Create hierarchical tags: project/md
|
|
||||||
println!("Creating hierarchical tags: project/md");
|
|
||||||
|
|
||||||
// Insert parent tag 'project'
|
|
||||||
conn.execute(
|
|
||||||
"INSERT INTO tags (id, name, parent_id) VALUES (1, 'project', NULL)",
|
|
||||||
[],
|
|
||||||
)?;
|
|
||||||
|
|
||||||
// Insert child tag 'md' under 'project'
|
|
||||||
conn.execute(
|
|
||||||
"INSERT INTO tags (id, name, parent_id) VALUES (2, 'md', 1)",
|
|
||||||
[],
|
|
||||||
)?;
|
|
||||||
|
|
||||||
// Tag the file with the 'md' tag (which is under 'project')
|
|
||||||
conn.execute(
|
|
||||||
"INSERT INTO file_tags (file_id, tag_id) VALUES (1, 2)",
|
|
||||||
[],
|
|
||||||
)?;
|
|
||||||
|
|
||||||
// Check what's in the FTS index
|
|
||||||
println!("\nChecking FTS index content:");
|
|
||||||
let mut stmt = conn.prepare("SELECT rowid, path, tags_text, attrs_text FROM files_fts")?;
|
|
||||||
let rows = stmt.query_map([], |row| {
|
|
||||||
Ok((
|
|
||||||
row.get::<_, i64>(0)?,
|
|
||||||
row.get::<_, String>(1)?,
|
|
||||||
row.get::<_, String>(2)?,
|
|
||||||
row.get::<_, String>(3)?,
|
|
||||||
))
|
|
||||||
})?;
|
|
||||||
|
|
||||||
for row in rows {
|
|
||||||
let (id, path, tags, attrs) = row?;
|
|
||||||
println!("ID: {}, Path: {}, Tags: '{}', Attrs: '{}'", id, path, tags, attrs);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test searching for the full hierarchical tag path
|
|
||||||
println!("\nTesting search for 'project/md':");
|
|
||||||
let mut stmt = conn.prepare("SELECT f.path FROM files_fts JOIN files f ON f.id = files_fts.rowid WHERE files_fts MATCH 'project/md'")?;
|
|
||||||
let rows = stmt.query_map([], |row| row.get::<_, String>(0))?;
|
|
||||||
|
|
||||||
let mut found = false;
|
|
||||||
for row in rows {
|
|
||||||
found = true;
|
|
||||||
println!("Found file: {}", row?);
|
|
||||||
}
|
|
||||||
|
|
||||||
if !found {
|
|
||||||
println!("No files found with tag 'project/md'");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test searching for just the parent tag
|
|
||||||
println!("\nTesting search for just 'project':");
|
|
||||||
let mut stmt = conn.prepare("SELECT f.path FROM files_fts JOIN files f ON f.id = files_fts.rowid WHERE files_fts MATCH 'project'")?;
|
|
||||||
let rows = stmt.query_map([], |row| row.get::<_, String>(0))?;
|
|
||||||
|
|
||||||
let mut found = false;
|
|
||||||
for row in rows {
|
|
||||||
found = true;
|
|
||||||
println!("Found file: {}", row?);
|
|
||||||
}
|
|
||||||
|
|
||||||
if !found {
|
|
||||||
println!("No files found with tag 'project'");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test searching for just the child tag
|
|
||||||
println!("\nTesting search for just 'md':");
|
|
||||||
let mut stmt = conn.prepare("SELECT f.path FROM files_fts JOIN files f ON f.id = files_fts.rowid WHERE files_fts MATCH 'md'")?;
|
|
||||||
let rows = stmt.query_map([], |row| row.get::<_, String>(0))?;
|
|
||||||
|
|
||||||
let mut found = false;
|
|
||||||
for row in rows {
|
|
||||||
found = true;
|
|
||||||
println!("Found file: {}", row?);
|
|
||||||
}
|
|
||||||
|
|
||||||
if !found {
|
|
||||||
println!("No files found with tag 'md'");
|
|
||||||
}
|
|
||||||
|
|
||||||
println!("\nTest completed successfully!");
|
|
||||||
Ok(())
|
|
||||||
}
|
|
Binary file not shown.
@@ -1 +1 @@
|
|||||||
/home/user/Documents/GitHub/Marlin/target/release/marlin: /home/user/Documents/GitHub/Marlin/src/cli/annotate.rs /home/user/Documents/GitHub/Marlin/src/cli/coll.rs /home/user/Documents/GitHub/Marlin/src/cli/event.rs /home/user/Documents/GitHub/Marlin/src/cli/link.rs /home/user/Documents/GitHub/Marlin/src/cli/remind.rs /home/user/Documents/GitHub/Marlin/src/cli/state.rs /home/user/Documents/GitHub/Marlin/src/cli/task.rs /home/user/Documents/GitHub/Marlin/src/cli/version.rs /home/user/Documents/GitHub/Marlin/src/cli/view.rs /home/user/Documents/GitHub/Marlin/src/cli.rs /home/user/Documents/GitHub/Marlin/src/config.rs /home/user/Documents/GitHub/Marlin/src/db/migrations/0001_initial_schema.sql /home/user/Documents/GitHub/Marlin/src/db/migrations/0002_update_fts_and_triggers.sql /home/user/Documents/GitHub/Marlin/src/db/migrations/0003_create_links_collections_views.sql /home/user/Documents/GitHub/Marlin/src/db/migrations/0004_fix_hierarchical_tags_fts.sql /home/user/Documents/GitHub/Marlin/src/db/mod.rs /home/user/Documents/GitHub/Marlin/src/logging.rs /home/user/Documents/GitHub/Marlin/src/main.rs /home/user/Documents/GitHub/Marlin/src/scan.rs
|
/home/user/Documents/GitHub/Marlin/target/release/marlin: /home/user/Documents/GitHub/Marlin/cli-bin/build.rs /home/user/Documents/GitHub/Marlin/cli-bin/src/cli/annotate.rs /home/user/Documents/GitHub/Marlin/cli-bin/src/cli/coll.rs /home/user/Documents/GitHub/Marlin/cli-bin/src/cli/event.rs /home/user/Documents/GitHub/Marlin/cli-bin/src/cli/link.rs /home/user/Documents/GitHub/Marlin/cli-bin/src/cli/remind.rs /home/user/Documents/GitHub/Marlin/cli-bin/src/cli/state.rs /home/user/Documents/GitHub/Marlin/cli-bin/src/cli/task.rs /home/user/Documents/GitHub/Marlin/cli-bin/src/cli/version.rs /home/user/Documents/GitHub/Marlin/cli-bin/src/cli/view.rs /home/user/Documents/GitHub/Marlin/cli-bin/src/cli.rs /home/user/Documents/GitHub/Marlin/cli-bin/src/main.rs /home/user/Documents/GitHub/Marlin/libmarlin/src/config.rs /home/user/Documents/GitHub/Marlin/libmarlin/src/db/migrations/0001_initial_schema.sql /home/user/Documents/GitHub/Marlin/libmarlin/src/db/migrations/0002_update_fts_and_triggers.sql /home/user/Documents/GitHub/Marlin/libmarlin/src/db/migrations/0003_create_links_collections_views.sql /home/user/Documents/GitHub/Marlin/libmarlin/src/db/migrations/0004_fix_hierarchical_tags_fts.sql /home/user/Documents/GitHub/Marlin/libmarlin/src/db/mod.rs /home/user/Documents/GitHub/Marlin/libmarlin/src/lib.rs /home/user/Documents/GitHub/Marlin/libmarlin/src/logging.rs /home/user/Documents/GitHub/Marlin/libmarlin/src/scan.rs /home/user/Documents/GitHub/Marlin/libmarlin/src/utils.rs
|
||||||
|
34
tui-bin/Cargo.toml
Normal file
34
tui-bin/Cargo.toml
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
[package]
|
||||||
|
name = "marlin-tui"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
# Build a binary called `marlin-tui` from src/main.rs
|
||||||
|
[[bin]]
|
||||||
|
name = "marlin-tui"
|
||||||
|
path = "src/main.rs"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
anyhow = "1"
|
||||||
|
clap = { version = "4", features = ["derive"] }
|
||||||
|
directories = "5"
|
||||||
|
glob = "0.3"
|
||||||
|
rusqlite = { version = "0.31", features = ["bundled", "backup"] }
|
||||||
|
tracing = "0.1"
|
||||||
|
tracing-subscriber = { version = "0.3", features = ["fmt", "env-filter"] }
|
||||||
|
walkdir = "2.5"
|
||||||
|
shlex = "1.3"
|
||||||
|
chrono = "0.4"
|
||||||
|
shellexpand = "3.1"
|
||||||
|
clap_complete = "4.1"
|
||||||
|
serde_json = { version = "1", optional = true }
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
assert_cmd = "2"
|
||||||
|
predicates = "3"
|
||||||
|
tempfile = "3"
|
||||||
|
dirs = "5"
|
||||||
|
|
||||||
|
[features]
|
||||||
|
# Enable JSON output when requested.
|
||||||
|
json = ["serde_json"]
|
5
tui-bin/src/main.rs
Normal file
5
tui-bin/src/main.rs
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
// tui-bin/src/main.rs
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
eprintln!("marlin-tui is not yet implemented. Stay tuned!");
|
||||||
|
}
|
Reference in New Issue
Block a user