Merge pull request #5 from PR0M3TH3AN/beta

updates
This commit is contained in:
thePR0M3TH3AN
2025-05-18 21:31:58 -04:00
committed by GitHub
16 changed files with 523 additions and 69 deletions

View File

@@ -31,8 +31,7 @@ jobs:
needs: build-and-test
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
- uses: actions/checkout@v3
- name: Install Rust (nightly)
uses: actions-rs/toolchain@v1
@@ -40,15 +39,22 @@ jobs:
toolchain: nightly
override: true
- name: Install tarpaulin prerequisites
- name: Install system prerequisites
run: |
rustup component add llvm-tools-preview
sudo apt-get update
sudo apt-get install -y pkg-config libssl-dev
- name: Add llvm-tools (for tarpaulin)
run: rustup component add llvm-tools-preview
- name: Install cargo-tarpaulin
run: cargo install cargo-tarpaulin
- name: Run coverage
run: cargo tarpaulin --workspace --out Xml --fail-under 85
- name: Code coverage (libmarlin only)
run: cargo tarpaulin \
--package libmarlin \
--out Xml \
--fail-under 85
benchmark:
name: Performance Benchmark (Hyperfine)

1
Cargo.lock generated
View File

@@ -452,6 +452,7 @@ dependencies = [
"serde_json",
"shellexpand",
"shlex",
"tempfile",
"tracing",
"tracing-subscriber",
"walkdir",

1
cobertura.xml Normal file

File diff suppressed because one or more lines are too long

View File

@@ -19,3 +19,10 @@ serde_json = { version = "1", optional = true }
[features]
json = ["serde_json"]
[dev-dependencies]
# for temporary directories in config_tests.rs and scan_tests.rs
tempfile = "3"
# you already have rusqlite in [dependencies], so scan_tests.rs
# can just use rusqlite::Connection, no need to repeat it here.

View File

@@ -0,0 +1,22 @@
// libmarlin/src/config_tests.rs
use super::config::Config;
use std::env;
use tempfile::tempdir;
#[test]
fn load_env_override() {
let tmp = tempdir().unwrap();
let db = tmp.path().join("custom.db");
env::set_var("MARLIN_DB_PATH", &db);
let cfg = Config::load().unwrap();
assert_eq!(cfg.db_path, db);
env::remove_var("MARLIN_DB_PATH");
}
#[test]
fn load_xdg_or_fallback() {
// since XDG_DATA_HOME will normally be present, just test it doesn't error
let cfg = Config::load().unwrap();
assert!(cfg.db_path.to_string_lossy().ends_with(".db"));
}

View File

@@ -48,7 +48,7 @@ pub fn open<P: AsRef<Path>>(db_path: P) -> Result<Connection> {
/* ─── migration runner ────────────────────────────────────────────── */
fn apply_migrations(conn: &mut Connection) -> Result<()> {
pub(crate) fn apply_migrations(conn: &mut Connection) -> Result<()> {
// Ensure schema_version bookkeeping table exists
conn.execute_batch(
"CREATE TABLE IF NOT EXISTS schema_version (

175
libmarlin/src/db_tests.rs Normal file
View File

@@ -0,0 +1,175 @@
// libmarlin/src/db_tests.rs
use super::db;
use rusqlite::Connection;
use tempfile::tempdir;
fn open_mem() -> Connection {
// helper to open an in-memory DB with migrations applied
db::open(":memory:").expect("open in-memory DB")
}
#[test]
fn ensure_tag_path_creates_hierarchy() {
let conn = open_mem();
// create foo/bar/baz
let leaf = db::ensure_tag_path(&conn, "foo/bar/baz").unwrap();
// foo should exist as a root tag
let foo: i64 = conn
.query_row(
"SELECT id FROM tags WHERE name='foo' AND parent_id IS NULL",
[],
|r| r.get(0),
)
.unwrap();
// bar should be child of foo
let bar: i64 = conn
.query_row(
"SELECT id FROM tags WHERE name='bar' AND parent_id = ?1",
[foo],
|r| r.get(0),
)
.unwrap();
// baz should be child of bar, and its ID is what we got back
let baz: i64 = conn
.query_row(
"SELECT id FROM tags WHERE name='baz' AND parent_id = ?1",
[bar],
|r| r.get(0),
)
.unwrap();
assert_eq!(leaf, baz);
}
#[test]
fn upsert_attr_inserts_and_updates() {
let conn = open_mem();
// insert a dummy file
conn.execute(
"INSERT INTO files(path, size, mtime) VALUES (?1, 0, 0)",
["a.txt"],
)
.unwrap();
let fid: i64 = conn
.query_row("SELECT id FROM files WHERE path='a.txt'", [], |r| r.get(0))
.unwrap();
// insert
db::upsert_attr(&conn, fid, "k", "v").unwrap();
let v1: String = conn
.query_row(
"SELECT value FROM attributes WHERE file_id=?1 AND key='k'",
[fid],
|r| r.get(0),
)
.unwrap();
assert_eq!(v1, "v");
// update
db::upsert_attr(&conn, fid, "k", "v2").unwrap();
let v2: String = conn
.query_row(
"SELECT value FROM attributes WHERE file_id=?1 AND key='k'",
[fid],
|r| r.get(0),
)
.unwrap();
assert_eq!(v2, "v2");
}
#[test]
fn add_and_remove_links_and_backlinks() {
let conn = open_mem();
// create two files
conn.execute(
"INSERT INTO files(path, size, mtime) VALUES (?1, 0, 0)",
["one.txt"],
)
.unwrap();
conn.execute(
"INSERT INTO files(path, size, mtime) VALUES (?1, 0, 0)",
["two.txt"],
)
.unwrap();
let src: i64 = conn
.query_row("SELECT id FROM files WHERE path='one.txt'", [], |r| r.get(0))
.unwrap();
let dst: i64 = conn
.query_row("SELECT id FROM files WHERE path='two.txt'", [], |r| r.get(0))
.unwrap();
// add a link of type "ref"
db::add_link(&conn, src, dst, Some("ref")).unwrap();
let out = db::list_links(&conn, "one%", None, None).unwrap();
assert_eq!(out.len(), 1);
assert_eq!(out[0].2.as_deref(), Some("ref"));
// backlinks should mirror
let back = db::find_backlinks(&conn, "two%").unwrap();
assert_eq!(back.len(), 1);
assert_eq!(back[0].1.as_deref(), Some("ref"));
// remove it
db::remove_link(&conn, src, dst, Some("ref")).unwrap();
let empty = db::list_links(&conn, "one%", None, None).unwrap();
assert!(empty.is_empty());
}
#[test]
fn collections_roundtrip() {
let conn = open_mem();
// create collection "C"
let cid = db::ensure_collection(&conn, "C").unwrap();
// add a file
conn.execute(
"INSERT INTO files(path, size, mtime) VALUES (?1, 0, 0)",
["f.txt"],
)
.unwrap();
let fid: i64 = conn
.query_row("SELECT id FROM files WHERE path='f.txt'", [], |r| r.get(0))
.unwrap();
db::add_file_to_collection(&conn, cid, fid).unwrap();
let files = db::list_collection(&conn, "C").unwrap();
assert_eq!(files, vec!["f.txt".to_string()]);
}
#[test]
fn views_save_and_query() {
let conn = open_mem();
db::save_view(&conn, "v1", "some_query").unwrap();
let all = db::list_views(&conn).unwrap();
assert_eq!(all, vec![("v1".to_string(), "some_query".to_string())]);
let q = db::view_query(&conn, "v1").unwrap();
assert_eq!(q, "some_query");
}
#[test]
fn backup_and_restore_cycle() {
let tmp = tempdir().unwrap();
let db_path = tmp.path().join("data.db");
let live = db::open(&db_path).unwrap();
// insert a file
live.execute(
"INSERT INTO files(path, size, mtime) VALUES (?1, 0, 0)",
["x.bin"],
)
.unwrap();
// backup
let backup = db::backup(&db_path).unwrap();
// remove original
std::fs::remove_file(&db_path).unwrap();
// restore
db::restore(&backup, &db_path).unwrap();
// reopen and check that x.bin survived
let conn2 = db::open(&db_path).unwrap();
let cnt: i64 =
conn2.query_row("SELECT COUNT(*) FROM files WHERE path='x.bin'", [], |r| r.get(0)).unwrap();
assert_eq!(cnt, 1);
}

View File

@@ -0,0 +1,74 @@
// libmarlin/src/facade_tests.rs
use super::*; // brings Marlin, config, etc.
use std::{env, fs};
use tempfile::tempdir;
#[test]
fn open_at_and_scan_and_search() {
// 1) Prepare a temp workspace with one file
let tmp = tempdir().unwrap();
let file = tmp.path().join("hello.txt");
fs::write(&file, "hello FAÇT").unwrap();
// 2) Use open_at to create a fresh DB
let db_path = tmp.path().join("explicit.db");
let mut m = Marlin::open_at(&db_path).expect("open_at should succeed");
assert!(db_path.exists(), "DB file should be created");
// 3) Scan the directory
let count = m.scan(&[tmp.path()]).expect("scan should succeed");
assert_eq!(count, 1, "we created exactly one file");
// 4) Search using an FTS hit
let hits = m.search("hello").expect("search must not error");
assert_eq!(hits.len(), 1);
assert!(hits[0].ends_with("hello.txt"));
// 5) Search a substring that isn't a valid token (fires fallback)
let fallback_hits = m.search("FAÇT").expect("fallback search works");
assert_eq!(fallback_hits.len(), 1);
assert!(fallback_hits[0].ends_with("hello.txt"));
}
#[test]
fn tag_and_search_by_tag() {
let tmp = tempdir().unwrap();
let a = tmp.path().join("a.md");
let b = tmp.path().join("b.md");
fs::write(&a, "# a").unwrap();
fs::write(&b, "# b").unwrap();
let db_path = tmp.path().join("my.db");
env::set_var("MARLIN_DB_PATH", &db_path);
let mut m = Marlin::open_default().unwrap();
m.scan(&[tmp.path()]).unwrap();
let changed = m.tag("*.md", "foo/bar").unwrap();
assert_eq!(changed, 2);
let tagged = m.search("tags_text:\"foo/bar\"").unwrap();
assert_eq!(tagged.len(), 2);
env::remove_var("MARLIN_DB_PATH");
}
#[test]
fn open_default_fallback_config() {
// Unset all overrides
env::remove_var("MARLIN_DB_PATH");
env::remove_var("XDG_DATA_HOME");
// Simulate no XDG: temporarily point HOME to a read-only dir
let fake_home = tempdir().unwrap();
env::set_var("HOME", fake_home.path());
// This should fall back to "./index_<hash>.db"
let cfg = config::Config::load().unwrap();
let fname = cfg.db_path.file_name().unwrap().to_string_lossy();
assert!(fname.starts_with("index_") && fname.ends_with(".db"));
// Clean up
env::remove_var("HOME");
}

View File

@@ -7,21 +7,30 @@
#![deny(warnings)]
pub mod config; // moved as-is
pub mod db; // moved as-is
pub mod config; // as-is
pub mod db; // as-is
pub mod logging; // expose the logging init helper
pub mod scan; // moved as-is
pub mod scan; // as-is
pub mod utils; // hosts determine_scan_root() & misc helpers
#[cfg(test)]
mod utils_tests;
#[cfg(test)]
mod config_tests;
#[cfg(test)]
mod scan_tests;
#[cfg(test)]
mod logging_tests;
#[cfg(test)]
mod db_tests;
#[cfg(test)]
mod facade_tests;
use anyhow::{Context, Result};
use rusqlite::Connection;
use std::path::Path;
use walkdir::WalkDir;
use std::{fs, path::Path};
/// Primary façade open a workspace then call helper methods.
///
/// Most methods simply wrap what the CLI used to do directly; more will be
/// filled in sprint-by-sprint.
/// Main handle for interacting with a Marlin database.
pub struct Marlin {
#[allow(dead_code)]
cfg: config::Config,
@@ -29,94 +38,165 @@ pub struct Marlin {
}
impl Marlin {
/// Load configuration from env / workspace and open (or create) the DB.
/// Open using the default config (env override or XDG/CWD fallback),
/// ensuring parent directories exist and applying migrations.
pub fn open_default() -> Result<Self> {
let cfg = config::Config::load()?;
let conn = db::open(&cfg.db_path)?;
Ok(Self { cfg, conn })
// 1) Load configuration (checks MARLIN_DB_PATH, XDG_DATA_HOME, or falls back to ./index_<hash>.db)
let cfg = config::Config::load()?;
// 2) Ensure the DB's parent directory exists
if let Some(parent) = cfg.db_path.parent() {
fs::create_dir_all(parent)?;
}
// 3) Open the database and run migrations
let conn = db::open(&cfg.db_path)
.context(format!("opening database at {}", cfg.db_path.display()))?;
Ok(Marlin { cfg, conn })
}
/// Open an explicit DB path handy for tests or headless tools.
pub fn open_at<P: AsRef<Path>>(path: P) -> Result<Self> {
let cfg = config::Config { db_path: path.as_ref().to_path_buf() };
let conn = db::open(&cfg.db_path)?;
Ok(Self { cfg, conn })
/// Open a Marlin instance at the specified database path,
/// creating parent directories and applying migrations.
pub fn open_at<P: AsRef<Path>>(db_path: P) -> Result<Self> {
let db_path = db_path.as_ref();
// Ensure the specified DB directory exists
if let Some(parent) = db_path.parent() {
fs::create_dir_all(parent)?;
}
// Build a minimal Config so callers can still inspect cfg.db_path
let cfg = config::Config { db_path: db_path.to_path_buf() };
// Open the database and run migrations
let conn = db::open(db_path)
.context(format!("opening database at {}", db_path.display()))?;
Ok(Marlin { cfg, conn })
}
/// Recursively index one or more directories.
pub fn scan<P: AsRef<Path>>(&mut self, paths: &[P]) -> Result<usize> {
let mut total = 0usize;
let mut total = 0;
for p in paths {
total += scan::scan_directory(&mut self.conn, p.as_ref())?;
}
Ok(total)
}
/// Attach a hierarchical tag (`foo/bar`) to every file that matches the
/// glob pattern. Returns the number of files that actually got updated.
/// Attach a hierarchical tag (`foo/bar`) to every _indexed_ file
/// matching the glob. Returns the number of files actually updated.
pub fn tag(&mut self, pattern: &str, tag_path: &str) -> Result<usize> {
use glob::Pattern;
// 1) ensure tag hierarchy exists
let leaf_tag_id = db::ensure_tag_path(&self.conn, tag_path)?;
// 1) ensure tag hierarchy
let leaf = db::ensure_tag_path(&self.conn, tag_path)?;
// 2) collect leaf + ancestors
// 2) collect it plus all ancestors
let mut tag_ids = Vec::new();
let mut current = Some(leaf_tag_id);
while let Some(id) = current {
let mut cur = Some(leaf);
while let Some(id) = cur {
tag_ids.push(id);
current = self.conn.query_row(
"SELECT parent_id FROM tags WHERE id=?1",
cur = self.conn.query_row(
"SELECT parent_id FROM tags WHERE id = ?1",
[id],
|r| r.get::<_, Option<i64>>(0),
)?;
}
// 3) walk the file tree and upsert `file_tags`
// 3) pick matching files _from the DB_ (not from the FS!)
let expanded = shellexpand::tilde(pattern).into_owned();
let pat = Pattern::new(&expanded)
.with_context(|| format!("Invalid glob pattern `{expanded}`"))?;
let root = utils::determine_scan_root(&expanded);
let pat = Pattern::new(&expanded)
.with_context(|| format!("Invalid glob pattern `{}`", expanded))?;
// pull down all (id, path)
let mut stmt_all = self.conn.prepare("SELECT id, path FROM files")?;
let rows = stmt_all.query_map([], |r| Ok((r.get(0)?, r.get(1)?)))?;
let mut stmt_file = self.conn.prepare("SELECT id FROM files WHERE path=?1")?;
let mut stmt_insert = self.conn.prepare(
"INSERT OR IGNORE INTO file_tags(file_id, tag_id) VALUES (?1, ?2)",
)?;
let mut changed = 0usize;
for entry in WalkDir::new(&root)
.into_iter()
.filter_map(Result::ok)
.filter(|e| e.file_type().is_file())
{
let p = entry.path().to_string_lossy();
if !pat.matches(&p) { continue; }
let mut changed = 0;
for row in rows {
let (fid, path_str): (i64, String) = row?;
let matches = if expanded.contains(std::path::MAIN_SEPARATOR) {
// pattern includes a slash — match full path
pat.matches(&path_str)
} else {
// no slash — match just the file name
std::path::Path::new(&path_str)
.file_name()
.and_then(|n| n.to_str())
.map(|n| pat.matches(n))
.unwrap_or(false)
};
if !matches {
continue;
}
match stmt_file.query_row([p.as_ref()], |r| r.get::<_, i64>(0)) {
Ok(fid) => {
let mut newly = false;
for &tid in &tag_ids {
if stmt_insert.execute([fid, tid])? > 0 { newly = true; }
}
if newly { changed += 1; }
// upsert this tag + its ancestors
let mut newly = false;
for &tid in &tag_ids {
if stmt_insert.execute([fid, tid])? > 0 {
newly = true;
}
Err(_) => { /* ignore nonindexed files */ }
}
if newly {
changed += 1;
}
}
Ok(changed)
}
/// FTS5 search → list of matching paths.
/// Fulltext search over path, tags, and attrs (with fallback).
pub fn search(&self, query: &str) -> Result<Vec<String>> {
let mut stmt = self.conn.prepare(
"SELECT path FROM files_fts WHERE files_fts MATCH ?1 ORDER BY rank",
r#"
SELECT f.path
FROM files_fts
JOIN files f ON f.rowid = files_fts.rowid
WHERE files_fts MATCH ?1
ORDER BY rank
"#,
)?;
let rows = stmt.query_map([query], |r| r.get::<_, String>(0))?
.collect::<Result<Vec<_>, _>>()?;
Ok(rows)
let mut hits = stmt
.query_map([query], |r| r.get(0))?
.collect::<Result<Vec<_>, _>>()?;
// graceful fallback: substring scan when no FTS hits and no `:` in query
if hits.is_empty() && !query.contains(':') {
hits = self.fallback_search(query)?;
}
Ok(hits)
}
/// private helper: scan `files` table + small files for a substring
fn fallback_search(&self, term: &str) -> Result<Vec<String>> {
let needle = term.to_lowercase();
let mut stmt = self.conn.prepare("SELECT path FROM files")?;
let rows = stmt.query_map([], |r| r.get(0))?;
let mut out = Vec::new();
for path_res in rows {
let p: String = path_res?; // Explicit type annotation added
// match in the path itself?
if p.to_lowercase().contains(&needle) {
out.push(p.clone());
continue;
}
// otherwise read small files
if let Ok(meta) = fs::metadata(&p) {
if meta.len() <= 65_536 {
if let Ok(body) = fs::read_to_string(&p) {
if body.to_lowercase().contains(&needle) {
out.push(p.clone());
}
}
}
}
}
Ok(out)
}
/// Borrow the underlying SQLite connection (read-only).
pub fn conn(&self) -> &Connection { &self.conn }
}
pub fn conn(&self) -> &Connection {
&self.conn
}
}

View File

@@ -0,0 +1,13 @@
// libmarlin/src/logging_tests.rs
use super::logging;
use tracing::Level;
#[test]
fn init_sets_up_subscriber() {
// set RUST_LOG to something to test the EnvFilter path
std::env::set_var("RUST_LOG", "debug");
logging::init();
tracing::event!(Level::INFO, "this is a test log");
// if we made it here without panic, were good
}

View File

@@ -1,4 +1,5 @@
// src/scan.rs (unchanged except tiny doc tweak)
// src/scan.rs
use std::fs;
use std::path::Path;
@@ -27,14 +28,22 @@ pub fn scan_directory(conn: &mut Connection, root: &Path) -> Result<usize> {
.filter_map(Result::ok)
.filter(|e| e.file_type().is_file())
{
let meta = fs::metadata(entry.path())?;
let path = entry.path();
// Skip the database file and its WAL/SHM siblings
if let Some(name) = path.file_name().and_then(|n| n.to_str()) {
if name.ends_with(".db") || name.ends_with("-wal") || name.ends_with("-shm") {
continue;
}
}
let meta = fs::metadata(path)?;
let size = meta.len() as i64;
let mtime = meta
.modified()?
.duration_since(std::time::UNIX_EPOCH)?
.as_secs() as i64;
let path_str = entry.path().to_string_lossy();
let path_str = path.to_string_lossy();
stmt.execute(params![path_str, size, mtime])?;
count += 1;
debug!(file = %path_str, "indexed");

View File

@@ -0,0 +1,26 @@
// libmarlin/src/scan_tests.rs
use super::scan::scan_directory;
use super::db;
use tempfile::tempdir;
use std::fs::File;
#[test]
fn scan_directory_counts_files() {
let tmp = tempdir().unwrap();
// create a couple of files
File::create(tmp.path().join("a.txt")).unwrap();
File::create(tmp.path().join("b.log")).unwrap();
// open an in-memory DB (runs migrations)
let mut conn = db::open(":memory:").unwrap();
let count = scan_directory(&mut conn, tmp.path()).unwrap();
assert_eq!(count, 2);
// ensure the paths were inserted
let mut stmt = conn.prepare("SELECT COUNT(*) FROM files").unwrap();
let total: i64 = stmt.query_row([], |r| r.get(0)).unwrap();
assert_eq!(total, 2);
}

View File

@@ -3,16 +3,34 @@
use std::path::PathBuf;
/// Determine a filesystem root to limit recursive walking on glob scans.
///
/// If the pattern contains any of `*?[`, we take everything up to the
/// first such character, and then (if that still contains metacharacters)
/// walk up until there arent any left. If there are *no* metachars at
/// all, we treat the entire string as a path and return its parent
/// directory (or `.` if it has no parent).
pub fn determine_scan_root(pattern: &str) -> PathBuf {
// find first wildcard char
let first_wild = pattern
.find(|c| matches!(c, '*' | '?' | '['))
.unwrap_or(pattern.len());
let mut root = PathBuf::from(&pattern[..first_wild]);
// everything up to the wildcard (or the whole string if none)
let prefix = &pattern[..first_wild];
let mut root = PathBuf::from(prefix);
// If there were NO wildcards at all, just return the parent directory
if first_wild == pattern.len() {
return root.parent().map(|p| p.to_path_buf()).unwrap_or_else(|| PathBuf::from("."));
}
// Otherwise, if the prefix still has any wildcards (e.g. "foo*/bar"),
// walk back up until it doesnt
while root
.as_os_str()
.to_string_lossy()
.contains(|c| matches!(c, '*' | '?' | '['))
.chars()
.any(|c| matches!(c, '*' | '?' | '['))
{
root = root.parent().map(|p| p.to_path_buf()).unwrap_or_default();
}

View File

@@ -0,0 +1,22 @@
// libmarlin/src/utils_tests.rs
use super::utils::determine_scan_root;
use std::path::PathBuf;
#[test]
fn determine_scan_root_plain_path() {
let root = determine_scan_root("foo/bar/baz.txt");
assert_eq!(root, PathBuf::from("foo/bar"));
}
#[test]
fn determine_scan_root_glob() {
let root = determine_scan_root("foo/*/baz.rs");
assert_eq!(root, PathBuf::from("foo"));
}
#[test]
fn determine_scan_root_only_wildcards() {
let root = determine_scan_root("**/*.txt");
assert_eq!(root, PathBuf::from("."));
}

View File

@@ -1 +1 @@
{"rustc_fingerprint":10768506583288887294,"outputs":{"7971740275564407648":{"success":true,"status":"","code":0,"stdout":"___\nlib___.rlib\nlib___.so\nlib___.so\nlib___.a\nlib___.so\n/home/user/.rustup/toolchains/stable-x86_64-unknown-linux-gnu\noff\npacked\nunpacked\n___\ndebug_assertions\npanic=\"unwind\"\nproc_macro\ntarget_abi=\"\"\ntarget_arch=\"x86_64\"\ntarget_endian=\"little\"\ntarget_env=\"gnu\"\ntarget_family=\"unix\"\ntarget_feature=\"fxsr\"\ntarget_feature=\"sse\"\ntarget_feature=\"sse2\"\ntarget_has_atomic=\"16\"\ntarget_has_atomic=\"32\"\ntarget_has_atomic=\"64\"\ntarget_has_atomic=\"8\"\ntarget_has_atomic=\"ptr\"\ntarget_os=\"linux\"\ntarget_pointer_width=\"64\"\ntarget_vendor=\"unknown\"\nunix\n","stderr":""},"17747080675513052775":{"success":true,"status":"","code":0,"stdout":"rustc 1.86.0 (05f9846f8 2025-03-31)\nbinary: rustc\ncommit-hash: 05f9846f893b09a1be1fc8560e33fc3c815cfecb\ncommit-date: 2025-03-31\nhost: x86_64-unknown-linux-gnu\nrelease: 1.86.0\nLLVM version: 19.1.7\n","stderr":""}},"successes":{}}
{"rustc_fingerprint":17558195974417946175,"outputs":{"7971740275564407648":{"success":true,"status":"","code":0,"stdout":"___\nlib___.rlib\nlib___.so\nlib___.so\nlib___.a\nlib___.so\n/home/user/.rustup/toolchains/nightly-x86_64-unknown-linux-gnu\noff\npacked\nunpacked\n___\ndebug_assertions\nfmt_debug=\"full\"\noverflow_checks\npanic=\"unwind\"\nproc_macro\nrelocation_model=\"pic\"\ntarget_abi=\"\"\ntarget_arch=\"x86_64\"\ntarget_endian=\"little\"\ntarget_env=\"gnu\"\ntarget_family=\"unix\"\ntarget_feature=\"fxsr\"\ntarget_feature=\"sse\"\ntarget_feature=\"sse2\"\ntarget_feature=\"x87\"\ntarget_has_atomic\ntarget_has_atomic=\"16\"\ntarget_has_atomic=\"32\"\ntarget_has_atomic=\"64\"\ntarget_has_atomic=\"8\"\ntarget_has_atomic=\"ptr\"\ntarget_has_atomic_equal_alignment=\"16\"\ntarget_has_atomic_equal_alignment=\"32\"\ntarget_has_atomic_equal_alignment=\"64\"\ntarget_has_atomic_equal_alignment=\"8\"\ntarget_has_atomic_equal_alignment=\"ptr\"\ntarget_has_atomic_load_store\ntarget_has_atomic_load_store=\"16\"\ntarget_has_atomic_load_store=\"32\"\ntarget_has_atomic_load_store=\"64\"\ntarget_has_atomic_load_store=\"8\"\ntarget_has_atomic_load_store=\"ptr\"\ntarget_has_reliable_f128\ntarget_has_reliable_f16\ntarget_has_reliable_f16_math\ntarget_os=\"linux\"\ntarget_pointer_width=\"64\"\ntarget_thread_local\ntarget_vendor=\"unknown\"\nub_checks\nunix\n","stderr":""},"10431901537437931773":{"success":true,"status":"","code":0,"stdout":"___\nlib___.rlib\nlib___.so\nlib___.so\nlib___.a\nlib___.so\n/home/user/.rustup/toolchains/nightly-x86_64-unknown-linux-gnu\noff\npacked\nunpacked\n___\ndebug_assertions\nfmt_debug=\"full\"\noverflow_checks\npanic=\"unwind\"\nproc_macro\nrelocation_model=\"pic\"\ntarget_abi=\"\"\ntarget_arch=\"x86_64\"\ntarget_endian=\"little\"\ntarget_env=\"gnu\"\ntarget_family=\"unix\"\ntarget_feature=\"fxsr\"\ntarget_feature=\"sse\"\ntarget_feature=\"sse2\"\ntarget_feature=\"x87\"\ntarget_has_atomic\ntarget_has_atomic=\"16\"\ntarget_has_atomic=\"32\"\ntarget_has_atomic=\"64\"\ntarget_has_atomic=\"8\"\ntarget_has_atomic=\"ptr\"\ntarget_has_atomic_equal_alignment=\"16\"\ntarget_has_atomic_equal_alignment=\"32\"\ntarget_has_atomic_equal_alignment=\"64\"\ntarget_has_atomic_equal_alignment=\"8\"\ntarget_has_atomic_equal_alignment=\"ptr\"\ntarget_has_atomic_load_store\ntarget_has_atomic_load_store=\"16\"\ntarget_has_atomic_load_store=\"32\"\ntarget_has_atomic_load_store=\"64\"\ntarget_has_atomic_load_store=\"8\"\ntarget_has_atomic_load_store=\"ptr\"\ntarget_has_reliable_f128\ntarget_has_reliable_f16\ntarget_has_reliable_f16_math\ntarget_os=\"linux\"\ntarget_pointer_width=\"64\"\ntarget_thread_local\ntarget_vendor=\"unknown\"\ntarpaulin\nub_checks\nunix\n","stderr":""},"17747080675513052775":{"success":true,"status":"","code":0,"stdout":"rustc 1.89.0-nightly (777d37277 2025-05-17)\nbinary: rustc\ncommit-hash: 777d372772aa3b39ba7273fcb8208a89f2ab0afd\ncommit-date: 2025-05-17\nhost: x86_64-unknown-linux-gnu\nrelease: 1.89.0-nightly\nLLVM version: 20.1.4\n","stderr":""}},"successes":{}}

Binary file not shown.