fix: address code quality issues, validation gaps, and add test coverage

Phase 1 - Code Quality:
- Rename script/script.rs to script/types.rs (module inception fix)
- Apply Clippy lint fixes (is_none_or, is_some_and, char patterns, etc.)
- Implement FromStr for CatalogFile and Category
- Add filtered_targets() and filtered_repos() helpers to Config

Phase 2 - Validation & Error Handling:
- Add validate_repo_identifier() for GitHub shorthand validation
- Fix first-run setup to fail gracefully if no targets configured
- Improve import to collect failures and only save on success
- Add AssetInstallResult for detailed install failure tracking
- Fix lockfile timestamp documentation (Unix epoch, not RFC 3339)
- Add comprehensive RevType heuristics documentation
- Add checkout warning when local modifications will be discarded

Phase 3 - Test Coverage:
- Add tempfile, assert_cmd, predicates dev dependencies
- Add security tests (symlink boundaries, copy mode)
- Add git operations tests (init, head_commit, RevType parsing)
- Add lockfile tests (roundtrip, lock/get operations)
- Add CLI integration tests (help, validation, duplicates)
- Add config validation tests for new helper methods

All 48 tests pass, clippy clean, release build verified.
This commit is contained in:
2026-01-26 10:19:10 +01:00
parent 6f714e58fa
commit 6be61df8a0
20 changed files with 1290 additions and 122 deletions

190
Cargo.lock generated
View File

@@ -2,6 +2,15 @@
# It is not intended for manual editing.
version = 4
[[package]]
name = "aho-corasick"
version = "1.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301"
dependencies = [
"memchr",
]
[[package]]
name = "allocator-api2"
version = "0.2.21"
@@ -58,12 +67,44 @@ dependencies = [
"windows-sys 0.61.2",
]
[[package]]
name = "assert_cmd"
version = "2.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c5bcfa8749ac45dd12cb11055aeeb6b27a3895560d60d71e3c23bf979e60514"
dependencies = [
"anstyle",
"bstr",
"libc",
"predicates",
"predicates-core",
"predicates-tree",
"wait-timeout",
]
[[package]]
name = "autocfg"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
[[package]]
name = "bitflags"
version = "2.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3"
[[package]]
name = "bstr"
version = "1.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "63044e1ae8e69f3b5a92c736ca6269b8d12fa7efe39bf34ddb06d102cf0e2cab"
dependencies = [
"memchr",
"regex-automata",
"serde",
]
[[package]]
name = "bumpalo"
version = "3.19.0"
@@ -196,7 +237,7 @@ dependencies = [
"crossterm_winapi",
"mio",
"parking_lot",
"rustix",
"rustix 0.38.44",
"signal-hook",
"signal-hook-mio",
"winapi",
@@ -246,6 +287,12 @@ dependencies = [
"syn",
]
[[package]]
name = "difflib"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6184e33543162437515c2e2b48714794e37845ec9851711914eec9d308f6ebe8"
[[package]]
name = "dirs"
version = "5.0.1"
@@ -288,14 +335,17 @@ checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
name = "empeve"
version = "0.1.0"
dependencies = [
"assert_cmd",
"clap",
"colored",
"crossterm",
"dirs",
"git2",
"indicatif",
"predicates",
"ratatui",
"serde",
"tempfile",
"thiserror",
"toml",
"walkdir",
@@ -323,12 +373,27 @@ dependencies = [
"windows-sys 0.61.2",
]
[[package]]
name = "fastrand"
version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
[[package]]
name = "find-msvc-tools"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3a3076410a55c90011c298b04d0cfa770b00fa04e1e3c97d3f6c9de105a03844"
[[package]]
name = "float-cmp"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b09cf3155332e944990140d967ff5eceb70df778b34f77d8075db46e4704e6d8"
dependencies = [
"num-traits",
]
[[package]]
name = "fnv"
version = "1.0.7"
@@ -673,6 +738,12 @@ version = "0.4.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab"
[[package]]
name = "linux-raw-sys"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039"
[[package]]
name = "litemap"
version = "0.8.1"
@@ -721,6 +792,21 @@ dependencies = [
"windows-sys 0.61.2",
]
[[package]]
name = "normalize-line-endings"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be"
[[package]]
name = "num-traits"
version = "0.2.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841"
dependencies = [
"autocfg",
]
[[package]]
name = "number_prefix"
version = "0.4.0"
@@ -819,6 +905,36 @@ dependencies = [
"zerovec",
]
[[package]]
name = "predicates"
version = "3.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a5d19ee57562043d37e82899fade9a22ebab7be9cef5026b07fda9cdd4293573"
dependencies = [
"anstyle",
"difflib",
"float-cmp",
"normalize-line-endings",
"predicates-core",
"regex",
]
[[package]]
name = "predicates-core"
version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "727e462b119fe9c93fd0eb1429a5f7647394014cf3c04ab2c0350eeb09095ffa"
[[package]]
name = "predicates-tree"
version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72dd2d6d381dfb73a193c7fca536518d7caee39fc8503f74e7dc0be0531b425c"
dependencies = [
"predicates-core",
"termtree",
]
[[package]]
name = "proc-macro2"
version = "1.0.103"
@@ -884,6 +1000,35 @@ dependencies = [
"thiserror",
]
[[package]]
name = "regex"
version = "1.12.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4"
dependencies = [
"aho-corasick",
"memchr",
"regex-automata",
"regex-syntax",
]
[[package]]
name = "regex-automata"
version = "0.4.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c"
dependencies = [
"aho-corasick",
"memchr",
"regex-syntax",
]
[[package]]
name = "regex-syntax"
version = "0.8.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58"
[[package]]
name = "rustix"
version = "0.38.44"
@@ -893,10 +1038,23 @@ dependencies = [
"bitflags",
"errno",
"libc",
"linux-raw-sys",
"linux-raw-sys 0.4.15",
"windows-sys 0.59.0",
]
[[package]]
name = "rustix"
version = "1.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "146c9e247ccc180c1f61615433868c99f3de3ae256a30a43b49f67c2d9171f34"
dependencies = [
"bitflags",
"errno",
"libc",
"linux-raw-sys 0.11.0",
"windows-sys 0.61.2",
]
[[package]]
name = "rustversion"
version = "1.0.22"
@@ -1067,6 +1225,25 @@ dependencies = [
"syn",
]
[[package]]
name = "tempfile"
version = "3.24.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "655da9c7eb6305c55742045d5a8d2037996d61d8de95806335c7c86ce0f82e9c"
dependencies = [
"fastrand",
"getrandom 0.3.4",
"once_cell",
"rustix 1.1.3",
"windows-sys 0.61.2",
]
[[package]]
name = "termtree"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f50febec83f5ee1df3015341d8bd429f2d1cc62bcba7ea2076759d315084683"
[[package]]
name = "thiserror"
version = "1.0.69"
@@ -1203,6 +1380,15 @@ version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
[[package]]
name = "wait-timeout"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09ac3b126d3914f9849036f826e054cbabdc8519970b8998ddaf3b5bd3c65f11"
dependencies = [
"libc",
]
[[package]]
name = "walkdir"
version = "2.5.0"

View File

@@ -20,6 +20,11 @@ walkdir = "2"
colored = "2"
indicatif = "0.17"
[dev-dependencies]
tempfile = "3"
assert_cmd = "2"
predicates = "3"
[features]
default = []
tui = ["ratatui", "crossterm"]

View File

@@ -4,6 +4,7 @@
use serde::{Deserialize, Serialize};
use std::path::Path;
use std::str::FromStr;
use crate::error::Result;
@@ -37,14 +38,16 @@ impl CatalogFile {
/// Load a catalog from a TOML file
pub fn load(path: &Path) -> Result<Self> {
let content = std::fs::read_to_string(path)?;
let catalog: CatalogFile = toml::from_str(&content)?;
let catalog: CatalogFile = content.parse()?;
Ok(catalog)
}
}
/// Parse a catalog from a TOML string
pub fn from_str(content: &str) -> Result<Self> {
let catalog: CatalogFile = toml::from_str(content)?;
Ok(catalog)
impl FromStr for CatalogFile {
type Err = toml::de::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
toml::from_str(s)
}
}
@@ -91,16 +94,31 @@ impl Category {
Category::Utility,
]
}
}
/// Match a category string to a Category enum (case-insensitive)
pub fn from_str(s: &str) -> Option<Self> {
/// Error returned when parsing an invalid category string
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ParseCategoryError(String);
impl std::fmt::Display for ParseCategoryError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "unknown category: '{}'", self.0)
}
}
impl std::error::Error for ParseCategoryError {}
impl FromStr for Category {
type Err = ParseCategoryError;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
match s.to_lowercase().as_str() {
"ui" | "ui/osc" => Some(Category::Ui),
"playback" => Some(Category::Playback),
"subtitles" => Some(Category::Subtitles),
"media" => Some(Category::Media),
"utility" => Some(Category::Utility),
_ => None,
"ui" | "ui/osc" => Ok(Category::Ui),
"playback" => Ok(Category::Playback),
"subtitles" => Ok(Category::Subtitles),
"media" => Ok(Category::Media),
"utility" => Ok(Category::Utility),
_ => Err(ParseCategoryError(s.to_string())),
}
}
}
@@ -116,7 +134,7 @@ impl CatalogManager {
let mut entries = Vec::new();
// 1. Load bundled catalog
if let Ok(bundled) = CatalogFile::from_str(BUNDLED_CATALOG) {
if let Ok(bundled) = BUNDLED_CATALOG.parse::<CatalogFile>() {
entries.extend(bundled.entries);
}
@@ -146,7 +164,8 @@ impl CatalogManager {
/// Load only the bundled catalog (for backwards compatibility)
pub fn bundled() -> Self {
let entries = CatalogFile::from_str(BUNDLED_CATALOG)
let entries = BUNDLED_CATALOG
.parse::<CatalogFile>()
.map(|c| c.entries)
.unwrap_or_default();
Self { entries }

View File

@@ -1,7 +1,7 @@
use colored::Colorize;
use crate::config::{Config, RepoEntry};
use crate::error::Result;
use crate::error::{EmpveError, Result};
use crate::paths::Paths;
/// Execute the `add` command - add a repository to the config
@@ -9,8 +9,9 @@ pub fn execute(repo: &str, rev: Option<String>, scripts: Option<Vec<String>>) ->
let paths = Paths::new()?;
let mut config = Config::load(&paths.config_file)?;
// Build the repo entry
let mut entry = RepoEntry::new(repo);
// Build the repo entry with validation
let mut entry = RepoEntry::try_new(repo)
.map_err(EmpveError::InvalidRepo)?;
if let Some(r) = rev {
entry = entry.with_rev(r);
}

View File

@@ -177,7 +177,7 @@ fn parse_selection(input: &str, max: usize) -> Vec<usize> {
let mut result = Vec::new();
// Split by comma or space
for part in input.split(|c| c == ',' || c == ' ') {
for part in input.split([',', ' ']) {
let part = part.trim();
if part.is_empty() {
continue;

View File

@@ -24,12 +24,7 @@ pub fn execute(skip_confirm: bool, target_filter: Option<Vec<String>>) -> Result
let mut orphaned_scripts: Vec<OrphanedScript> = Vec::new();
let targets_to_check: Vec<_> = config
.enabled_targets()
.filter(|t| {
target_filter.as_ref().map_or(true, |filter| {
filter.iter().any(|f| t.name == *f)
})
})
.filtered_targets(target_filter.as_deref())
.collect();
for target in targets_to_check {

View File

@@ -122,7 +122,10 @@ pub fn execute(convert_local: bool, script_filter: Option<String>) -> Result<()>
return Ok(());
}
// Import each repo
// Import each repo, tracking successes and failures
let mut successful_imports: Vec<String> = Vec::new();
let mut failed_imports: Vec<(String, String)> = Vec::new();
for remote in repos_to_import {
let repo_id = extract_repo_identifier(remote);
let scripts_in_repo: Vec<_> = git_repos[remote]
@@ -138,21 +141,55 @@ pub fn execute(convert_local: bool, script_filter: Option<String>) -> Result<()>
entry = entry.with_scripts(scripts_in_repo);
}
if let Err(e) = config.add_repo(entry) {
eprintln!("{}: Failed to add {}: {}", "Warning".yellow(), repo_id, e);
} else {
println!("{} {}", "Added".green().bold(), repo_id.cyan());
match config.add_repo(entry) {
Ok(_) => {
println!("{} {}", "Added".green().bold(), repo_id.cyan());
successful_imports.push(repo_id);
}
Err(e) => {
println!("{} {} - {}", "".red(), repo_id.cyan(), e.to_string().dimmed());
failed_imports.push((repo_id, e.to_string()));
}
}
}
// Only save config if at least one repo was successfully added
if successful_imports.is_empty() {
println!();
println!("{}", "No repositories were imported.".yellow());
if !failed_imports.is_empty() {
println!();
println!("{} {} import(s) failed:", "Summary:".bold(), failed_imports.len());
for (repo, reason) in &failed_imports {
println!(" {} {} - {}", "".red(), repo, reason.dimmed());
}
}
return Ok(());
}
// Save config
paths.ensure_directories()?;
config.save(&paths.config_file)?;
println!();
// Show failure summary if any
if !failed_imports.is_empty() {
println!(
"{} {} import(s) failed:",
"".yellow(),
failed_imports.len()
);
for (repo, reason) in &failed_imports {
println!(" {} {} - {}", "".red(), repo, reason.dimmed());
}
println!();
}
println!(
"{} Run {} to clone the repositories.",
"{} Imported {} repository(ies). Run {} to clone them.",
"Done!".green().bold(),
successful_imports.len().to_string().cyan(),
"empeve install".cyan()
);

View File

@@ -45,12 +45,7 @@ pub fn execute(
// Check if we have any targets configured, filtered by --target flag
let targets: Vec<&TargetConfig> = config
.enabled_targets()
.filter(|t| {
target_filter.as_ref().map_or(true, |filter| {
filter.iter().any(|f| t.name == *f)
})
})
.filtered_targets(target_filter.as_deref())
.collect();
// Proactively ensure all target directories exist
@@ -86,16 +81,10 @@ pub fn execute(
let mut total_repos = 0;
let mut total_scripts = 0;
let mut total_assets = 0;
let mut total_asset_failures = 0;
let mut failed_repos: Vec<(String, String)> = Vec::new();
for entry in config.enabled_repos() {
// Skip if filter is set and this repo isn't in it
if let Some(ref filter) = repos_filter {
if !filter.iter().any(|f| entry.repo.contains(f)) {
continue;
}
}
for entry in config.filtered_repos(repos_filter.as_deref()) {
println!("\n{} {}", "Processing".blue().bold(), entry.repo.cyan());
let mut repo = Repository::from_entry(entry.clone(), &paths);
@@ -141,6 +130,15 @@ pub fn execute(
if let Ok(git_repo) = repo.open() {
let current_commit = GitOps::head_commit(&git_repo).unwrap_or_default();
if current_commit != locked_repo.commit {
// Warn if there are local modifications that will be discarded
if has_local_modifications(&git_repo) {
println!(
" {} Local changes in {} will be discarded by checkout",
"".yellow(),
entry.repo.cyan()
);
}
// Checkout the locked commit
if let Err(e) = checkout_commit(&git_repo, &locked_repo.commit) {
println!(
@@ -225,7 +223,7 @@ pub fn execute(
// Show assets if any were installed
let asset_count = result.script_opts_count + result.fonts_count + result.shaders_count;
if asset_count > 0 {
if asset_count > 0 || result.asset_failures > 0 {
let mut parts = Vec::new();
if result.script_opts_count > 0 {
parts.push(format!("{} conf", result.script_opts_count));
@@ -236,7 +234,13 @@ pub fn execute(
if result.shaders_count > 0 {
parts.push(format!("{} shader", result.shaders_count));
}
print!(" {}", format!("(+{})", parts.join(", ")).dimmed());
if asset_count > 0 {
print!(" {}", format!("(+{})", parts.join(", ")).dimmed());
}
if result.asset_failures > 0 {
print!(" {}", format!("({} asset(s) failed)", result.asset_failures).yellow());
total_asset_failures += result.asset_failures;
}
total_assets += asset_count;
}
@@ -290,6 +294,13 @@ pub fn execute(
print!(" + {} asset(s)", total_assets.to_string().cyan());
}
if total_asset_failures > 0 {
print!(
" ({} asset(s) failed)",
total_asset_failures.to_string().yellow()
);
}
if targets.len() > 1 {
print!(" to {} target(s)", targets.len().to_string().cyan());
}
@@ -326,3 +337,27 @@ fn checkout_commit(repo: &git2::Repository, commit: &str) -> Result<()> {
repo.checkout_head(Some(git2::build::CheckoutBuilder::default().force()))?;
Ok(())
}
/// Check if a repository has local modifications (uncommitted changes)
fn has_local_modifications(repo: &git2::Repository) -> bool {
let Ok(statuses) = repo.statuses(None) else {
return false;
};
statuses.iter().any(|entry| {
let status = entry.status();
// Check for any kind of modification (staged or unstaged)
status.intersects(
git2::Status::WT_MODIFIED
| git2::Status::WT_NEW
| git2::Status::WT_DELETED
| git2::Status::WT_RENAMED
| git2::Status::WT_TYPECHANGE
| git2::Status::INDEX_MODIFIED
| git2::Status::INDEX_NEW
| git2::Status::INDEX_DELETED
| git2::Status::INDEX_RENAMED
| git2::Status::INDEX_TYPECHANGE,
)
})
}

View File

@@ -20,12 +20,7 @@ pub fn execute(detailed: bool, target_filter: Option<Vec<String>>) -> Result<()>
let config = Config::load(&paths.config_file)?;
let targets: Vec<_> = config
.enabled_targets()
.filter(|t| {
target_filter.as_ref().map_or(true, |filter| {
filter.iter().any(|f| t.name == *f)
})
})
.filtered_targets(target_filter.as_deref())
.collect();
let has_multiple_targets = targets.len() > 1;

View File

@@ -20,14 +20,7 @@ pub fn execute(repos_filter: Option<Vec<String>>) -> Result<()> {
let mut up_to_date = 0;
let mut errors = 0;
for entry in config.enabled_repos() {
// Skip if filter is set and this repo isn't in it
if let Some(ref filter) = repos_filter {
if !filter.iter().any(|f| entry.repo.contains(f)) {
continue;
}
}
for entry in config.filtered_repos(repos_filter.as_deref()) {
let repo = Repository::from_entry(entry.clone(), &paths);
if !repo.is_cloned {

View File

@@ -149,6 +149,43 @@ fn is_false(value: &bool) -> bool {
!*value
}
/// Validate a repository identifier (GitHub shorthand or full URL)
///
/// For GitHub shorthand (user/repo), requires exactly one `/` separator.
/// Full URLs (https://, http://, git@) are accepted without additional validation.
/// Local repos (local/...) are also accepted.
pub fn validate_repo_identifier(repo: &str) -> std::result::Result<(), String> {
// Local repos are always valid
if repo.starts_with("local/") {
return Ok(());
}
// Full URLs are accepted as-is
if repo.starts_with("https://") || repo.starts_with("http://") || repo.starts_with("git@") {
return Ok(());
}
// GitHub shorthand: must be "user/repo" format (exactly one slash)
let slash_count = repo.chars().filter(|&c| c == '/').count();
if slash_count != 1 {
return Err(format!(
"invalid repository format '{}': expected 'user/repo' or a full git URL",
repo
));
}
// Check that both parts are non-empty
let parts: Vec<&str> = repo.split('/').collect();
if parts.len() != 2 || parts[0].is_empty() || parts[1].is_empty() {
return Err(format!(
"invalid repository format '{}': expected 'user/repo' format",
repo
));
}
Ok(())
}
impl RepoEntry {
/// Create a new repo entry with just the identifier
pub fn new(repo: impl Into<String>) -> Self {
@@ -163,6 +200,15 @@ impl RepoEntry {
}
}
/// Create a new repo entry with validation
///
/// Returns an error if the repo identifier is invalid.
pub fn try_new(repo: impl Into<String>) -> std::result::Result<Self, String> {
let repo = repo.into();
validate_repo_identifier(&repo)?;
Ok(Self::new(repo))
}
/// Set the targets for this repo
pub fn with_targets(mut self, targets: Vec<String>) -> Self {
self.targets = Some(targets);
@@ -301,6 +347,30 @@ impl Config {
pub fn has_targets(&self) -> bool {
!self.targets.is_empty()
}
/// Get enabled targets, optionally filtered by name
///
/// If `filter` is `None`, returns all enabled targets.
/// If `filter` is `Some`, returns only enabled targets whose name is in the filter list.
pub fn filtered_targets<'a>(
&'a self,
filter: Option<&'a [String]>,
) -> impl Iterator<Item = &'a TargetConfig> {
self.enabled_targets()
.filter(move |t| filter.is_none_or(|f| f.contains(&t.name)))
}
/// Get enabled repos, optionally filtered by identifier substring match
///
/// If `filter` is `None`, returns all enabled repos.
/// If `filter` is `Some`, returns only enabled repos where any filter string is a substring of the repo identifier.
pub fn filtered_repos<'a>(
&'a self,
filter: Option<&'a [String]>,
) -> impl Iterator<Item = &'a RepoEntry> {
self.enabled_repos()
.filter(move |r| filter.is_none_or(|f| f.iter().any(|s| r.repo.contains(s))))
}
}
#[cfg(test)]
@@ -348,4 +418,107 @@ mod tests {
assert_eq!(config.repos[0].repo, parsed.repos[0].repo);
assert_eq!(config.repos[0].rev, parsed.repos[0].rev);
}
#[test]
fn test_validate_repo_identifier_github_shorthand() {
// Valid shorthand
assert!(validate_repo_identifier("user/repo").is_ok());
assert!(validate_repo_identifier("org-name/repo_name").is_ok());
// Invalid: no slash
assert!(validate_repo_identifier("just_repo").is_err());
// Invalid: too many slashes
assert!(validate_repo_identifier("user/repo/extra").is_err());
// Invalid: empty parts
assert!(validate_repo_identifier("/repo").is_err());
assert!(validate_repo_identifier("user/").is_err());
}
#[test]
fn test_validate_repo_identifier_full_urls() {
// Full URLs are always valid
assert!(validate_repo_identifier("https://github.com/user/repo.git").is_ok());
assert!(validate_repo_identifier("http://gitlab.com/user/repo").is_ok());
assert!(validate_repo_identifier("git@github.com:user/repo.git").is_ok());
}
#[test]
fn test_validate_repo_identifier_local() {
// Local repos are always valid
assert!(validate_repo_identifier("local/my-script").is_ok());
assert!(validate_repo_identifier("local/anything").is_ok());
}
#[test]
fn test_repo_entry_try_new_validates() {
// Valid
assert!(RepoEntry::try_new("user/repo").is_ok());
assert!(RepoEntry::try_new("https://github.com/user/repo").is_ok());
// Invalid
assert!(RepoEntry::try_new("invalid").is_err());
assert!(RepoEntry::try_new("user/repo/extra").is_err());
}
#[test]
fn test_filtered_targets_no_filter() {
let mut config = Config::default();
config.add_target(TargetConfig::new("mpv", "/path/to/mpv"));
config.add_target(TargetConfig::new("jellyfin", "/path/to/jellyfin"));
let targets: Vec<_> = config.filtered_targets(None).collect();
assert_eq!(targets.len(), 2);
}
#[test]
fn test_filtered_targets_with_filter() {
let mut config = Config::default();
config.add_target(TargetConfig::new("mpv", "/path/to/mpv"));
config.add_target(TargetConfig::new("jellyfin", "/path/to/jellyfin"));
let filter = vec!["mpv".to_string()];
let targets: Vec<_> = config.filtered_targets(Some(&filter)).collect();
assert_eq!(targets.len(), 1);
assert_eq!(targets[0].name, "mpv");
}
#[test]
fn test_filtered_repos_no_filter() {
let mut config = Config::default();
config.add_repo(RepoEntry::new("user/repo1")).unwrap();
config.add_repo(RepoEntry::new("user/repo2")).unwrap();
config.add_repo(RepoEntry::new("other/repo")).unwrap();
let repos: Vec<_> = config.filtered_repos(None).collect();
assert_eq!(repos.len(), 3);
}
#[test]
fn test_filtered_repos_with_filter() {
let mut config = Config::default();
config.add_repo(RepoEntry::new("user/repo1")).unwrap();
config.add_repo(RepoEntry::new("user/repo2")).unwrap();
config.add_repo(RepoEntry::new("other/repo")).unwrap();
// Filter by substring match
let filter = vec!["user".to_string()];
let repos: Vec<_> = config.filtered_repos(Some(&filter)).collect();
assert_eq!(repos.len(), 2);
}
#[test]
fn test_filtered_repos_excludes_disabled() {
let mut config = Config::default();
config.add_repo(RepoEntry::new("user/repo1")).unwrap();
let mut disabled_entry = RepoEntry::new("user/disabled");
disabled_entry.disabled = true;
config.repos.push(disabled_entry);
let repos: Vec<_> = config.filtered_repos(None).collect();
assert_eq!(repos.len(), 1);
assert_eq!(repos[0].repo, "user/repo1");
}
}

View File

@@ -11,7 +11,7 @@ use crate::error::Result;
pub struct Lockfile {
/// Version of lockfile format
pub version: u32,
/// Timestamp when lockfile was created (RFC 3339 format)
/// Timestamp when lockfile was created (Unix epoch seconds as string)
pub created_at: String,
/// Locked repository states
pub repos: HashMap<String, LockedRepo>,
@@ -79,3 +79,128 @@ impl Lockfile {
format!("{}", duration.as_secs())
}
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::TempDir;
#[test]
fn test_new_lockfile_is_empty() {
let lockfile = Lockfile::new();
assert_eq!(lockfile.version, 1);
assert!(lockfile.is_empty());
assert!(lockfile.repos.is_empty());
// created_at should be a valid timestamp (numeric string)
assert!(lockfile.created_at.parse::<u64>().is_ok());
}
#[test]
fn test_lock_and_get_repo() {
let mut lockfile = Lockfile::new();
let locked = LockedRepo {
commit: "abc123def456789012345678901234567890abcd".to_string(),
source: "https://github.com/user/repo.git".to_string(),
rev: Some("main".to_string()),
};
lockfile.lock_repo("user/repo", locked.clone());
assert!(!lockfile.is_empty());
let retrieved = lockfile.get_locked("user/repo");
assert!(retrieved.is_some());
let retrieved = retrieved.unwrap();
assert_eq!(retrieved.commit, locked.commit);
assert_eq!(retrieved.source, locked.source);
assert_eq!(retrieved.rev, locked.rev);
}
#[test]
fn test_get_locked_returns_none_for_missing() {
let lockfile = Lockfile::new();
assert!(lockfile.get_locked("nonexistent/repo").is_none());
}
#[test]
fn test_save_and_load_roundtrip() {
let temp = TempDir::new().unwrap();
let lockfile_path = temp.path().join("empeve.lock");
let mut lockfile = Lockfile::new();
lockfile.lock_repo(
"user/repo1",
LockedRepo {
commit: "1111111111111111111111111111111111111111".to_string(),
source: "https://github.com/user/repo1.git".to_string(),
rev: None,
},
);
lockfile.lock_repo(
"other/repo2",
LockedRepo {
commit: "2222222222222222222222222222222222222222".to_string(),
source: "https://github.com/other/repo2.git".to_string(),
rev: Some("v1.0.0".to_string()),
},
);
// Save
lockfile.save(&lockfile_path).unwrap();
assert!(lockfile_path.exists());
// Load
let loaded = Lockfile::load(&lockfile_path).unwrap();
assert_eq!(loaded.version, lockfile.version);
assert_eq!(loaded.repos.len(), 2);
let repo1 = loaded.get_locked("user/repo1").unwrap();
assert_eq!(repo1.commit, "1111111111111111111111111111111111111111");
assert!(repo1.rev.is_none());
let repo2 = loaded.get_locked("other/repo2").unwrap();
assert_eq!(repo2.commit, "2222222222222222222222222222222222222222");
assert_eq!(repo2.rev, Some("v1.0.0".to_string()));
}
#[test]
fn test_load_missing_file_returns_default() {
let temp = TempDir::new().unwrap();
let lockfile_path = temp.path().join("nonexistent.lock");
let lockfile = Lockfile::load(&lockfile_path).unwrap();
assert!(lockfile.is_empty());
}
#[test]
fn test_lock_repo_updates_existing() {
let mut lockfile = Lockfile::new();
lockfile.lock_repo(
"user/repo",
LockedRepo {
commit: "old_commit".to_string(),
source: "source".to_string(),
rev: None,
},
);
lockfile.lock_repo(
"user/repo",
LockedRepo {
commit: "new_commit".to_string(),
source: "source".to_string(),
rev: Some("main".to_string()),
},
);
assert_eq!(lockfile.repos.len(), 1);
let repo = lockfile.get_locked("user/repo").unwrap();
assert_eq!(repo.commit, "new_commit");
assert_eq!(repo.rev, Some("main".to_string()));
}
}

View File

@@ -150,22 +150,57 @@ fn check_first_run(command: &commands::Commands) -> Result<()> {
selected_indices
};
let mut success_count = 0;
let mut failed_targets: Vec<String> = Vec::new();
for i in indices_to_use {
if let Some(target) = detected.get(i) {
let target_config = target.to_target_config();
if let Err(e) = target_config.ensure_directories() {
eprintln!(" {}: Could not create directories for {}: {}", "Warning".yellow(), target.name, e);
eprintln!(
" {} {} - could not create directories: {}",
"".red(),
target.name.cyan(),
e.to_string().dimmed()
);
failed_targets.push(target.name.clone());
continue;
}
config.add_target(target_config);
println!(" {} {}", "".green(), target.name.cyan());
success_count += 1;
}
}
// Fail if no targets could be configured
if success_count == 0 {
eprintln!();
eprintln!(
"{} Could not configure any targets. Check directory permissions.",
"Error:".red().bold()
);
return Err(empeve::error::EmpveError::Config(
"no targets could be configured".to_string(),
));
}
// Save config
paths.ensure_directories()?;
config.save(&paths.config_file)?;
println!();
// Show warning if some targets failed
if !failed_targets.is_empty() {
println!(
"{} {} target(s) could not be configured: {}",
"".yellow(),
failed_targets.len(),
failed_targets.join(", ").dimmed()
);
println!();
}
println!(
"{} Configuration saved. Run {} to browse and add scripts.",
"Done!".green().bold(),
@@ -180,7 +215,7 @@ fn check_first_run(command: &commands::Commands) -> Result<()> {
fn parse_target_selection(input: &str, max: usize) -> Vec<usize> {
let mut result = Vec::new();
for part in input.split(|c| c == ',' || c == ' ') {
for part in input.split([',', ' ']) {
let part = part.trim();
if part.is_empty() {
continue;

View File

@@ -1,4 +1,4 @@
use std::path::PathBuf;
use std::path::{Path, PathBuf};
use crate::config::TargetConfig;
use crate::error::{EmpveError, Result};
@@ -93,8 +93,7 @@ fn sanitize_repo_name(identifier: &str) -> String {
.replace("github.com/", "")
.replace("github.com:", "")
.replace(".git", "")
.replace('/', "_")
.replace(':', "_")
.replace(['/', ':'], "_")
}
/// Known mpv configuration locations to auto-detect
@@ -174,7 +173,7 @@ pub fn detect_mpv_configs() -> Vec<DetectedTarget> {
}
/// Check if a mpv config folder has a scripts directory with scripts
fn check_scripts_dir(mpv_path: &PathBuf) -> (bool, usize) {
fn check_scripts_dir(mpv_path: &Path) -> (bool, usize) {
let scripts_dir = mpv_path.join("scripts");
if !scripts_dir.exists() || !scripts_dir.is_dir() {
return (false, 0);

View File

@@ -139,10 +139,10 @@ impl ScriptDiscovery {
if let Ok(entries) = std::fs::read_dir(&script_dir) {
for entry in entries.filter_map(|e| e.ok()) {
let path = entry.path();
if path.extension().map(|e| e == "conf").unwrap_or(false) {
if !assets.script_opts.contains(&path) {
assets.script_opts.push(path);
}
if path.extension().map(|e| e == "conf").unwrap_or(false)
&& !assets.script_opts.contains(&path)
{
assets.script_opts.push(path);
}
}
}
@@ -176,13 +176,12 @@ impl ScriptDiscovery {
let script_lower = script_name.to_lowercase();
// Include if exact match OR prefix match
if name_lower == script_lower
if (name_lower == script_lower
|| name_lower.starts_with(&format!("{}_", script_lower))
|| name_lower.starts_with(&format!("{}.", script_lower))
|| name_lower.starts_with(&format!("{}.", script_lower)))
&& !dest.contains(&path)
{
if !dest.contains(&path) {
dest.push(path);
}
dest.push(path);
}
}
}
@@ -197,10 +196,10 @@ impl ScriptDiscovery {
for entry in entries.filter_map(|e| e.ok()) {
let path = entry.path();
if let Some(ext) = path.extension().and_then(|e| e.to_str()) {
if matches!(ext.to_lowercase().as_str(), "ttf" | "otf" | "woff" | "woff2") {
if !dest.contains(&path) {
dest.push(path);
}
if matches!(ext.to_lowercase().as_str(), "ttf" | "otf" | "woff" | "woff2")
&& !dest.contains(&path)
{
dest.push(path);
}
}
}
@@ -213,10 +212,10 @@ impl ScriptDiscovery {
for entry in entries.filter_map(|e| e.ok()) {
let path = entry.path();
if let Some(ext) = path.extension().and_then(|e| e.to_str()) {
if matches!(ext.to_lowercase().as_str(), "glsl" | "hook") {
if !dest.contains(&path) {
dest.push(path);
}
if matches!(ext.to_lowercase().as_str(), "glsl" | "hook")
&& !dest.contains(&path)
{
dest.push(path);
}
}
}

View File

@@ -3,7 +3,10 @@ use std::path::Path;
use crate::error::Result;
/// Classification of a revision string
/// Classification of a revision string for determining update behavior.
///
/// This enum determines whether a repository should auto-update or remain pinned
/// at a specific version.
#[derive(Debug, Clone, PartialEq)]
pub enum RevType {
/// A specific commit hash (40-char hex) - pinned, never auto-updates
@@ -17,7 +20,27 @@ pub enum RevType {
}
impl RevType {
/// Parse a revision string into a RevType
/// Parse a revision string into a RevType using heuristics.
///
/// # Detection heuristics
///
/// The function uses the following rules to classify revision strings:
///
/// 1. **Commit hash**: Exactly 40 hexadecimal characters (full SHA-1 hash).
/// Short hashes are NOT detected as commits to avoid ambiguity with branch names.
///
/// 2. **Tag (semver)**: Starts with 'v' followed by a digit (e.g., `v1.2.3`, `v2.0`).
/// This pattern follows the common GitHub release tag convention.
///
/// 3. **Branch**: Any other string that doesn't match the above patterns.
///
/// # Limitations
///
/// - Tags without the 'v' prefix (e.g., `1.2.3`, `release-2024`) are treated as branches.
/// - Short commit hashes (e.g., `abc1234`) are treated as branches.
/// - Tags that don't follow semver-like patterns (e.g., `stable`, `latest`) are treated as branches.
///
/// If you need exact pinning behavior, use a full 40-character commit hash.
pub fn from_rev(rev: Option<&str>) -> Self {
match rev {
None => RevType::Default,
@@ -27,7 +50,7 @@ impl RevType {
return RevType::Commit(r.to_string());
}
// Semver pattern (v followed by digit) = tag
if r.starts_with('v') && r.chars().nth(1).map_or(false, |c| c.is_ascii_digit()) {
if r.starts_with('v') && r.chars().nth(1).is_some_and(|c| c.is_ascii_digit()) {
return RevType::Tag(r.to_string());
}
// Otherwise assume branch
@@ -164,7 +187,7 @@ impl GitOps {
return false;
}
// Semver pattern (v followed by digit) = tag
if rev.starts_with('v') && rev.chars().nth(1).map_or(false, |c| c.is_ascii_digit()) {
if rev.starts_with('v') && rev.chars().nth(1).is_some_and(|c| c.is_ascii_digit()) {
return false;
}
// Otherwise assume branch
@@ -293,10 +316,129 @@ impl GitOps {
#[cfg(test)]
mod tests {
use super::*;
use tempfile::TempDir;
#[test]
fn test_is_repo() {
// Current directory is likely a git repo during development
// /tmp is not a git repo
assert!(!GitOps::is_repo(Path::new("/tmp")));
}
#[test]
fn test_init_creates_valid_repo() {
let temp = TempDir::new().unwrap();
let repo = GitOps::init(temp.path()).unwrap();
// Should be able to get the path
assert_eq!(repo.path().parent().unwrap(), temp.path());
// Should be recognized as a repo
assert!(GitOps::is_repo(temp.path()));
}
#[test]
fn test_head_commit_returns_short_hash() {
let temp = TempDir::new().unwrap();
let repo = GitOps::init(temp.path()).unwrap();
// Create a file and commit
let file_path = temp.path().join("test.txt");
std::fs::write(&file_path, "test content").unwrap();
GitOps::add_all_and_commit(&repo, "Initial commit").unwrap();
// Get short commit hash
let short_hash = GitOps::head_commit_short(&repo).unwrap();
assert_eq!(short_hash.len(), 7);
assert!(short_hash.chars().all(|c| c.is_ascii_hexdigit()));
}
#[test]
fn test_head_commit_returns_full_hash() {
let temp = TempDir::new().unwrap();
let repo = GitOps::init(temp.path()).unwrap();
// Create a file and commit
let file_path = temp.path().join("test.txt");
std::fs::write(&file_path, "test content").unwrap();
GitOps::add_all_and_commit(&repo, "Initial commit").unwrap();
// Get full commit hash
let full_hash = GitOps::head_commit(&repo).unwrap();
assert_eq!(full_hash.len(), 40);
assert!(full_hash.chars().all(|c| c.is_ascii_hexdigit()));
}
#[test]
fn test_rev_type_commit_detection() {
// Full 40-char hex is a commit
let commit = RevType::from_rev(Some("abc123def456789012345678901234567890abcd"));
assert!(matches!(commit, RevType::Commit(_)));
assert!(commit.is_pinned());
assert!(!commit.is_tracking());
}
#[test]
fn test_rev_type_tag_detection() {
// v followed by digit is a tag
assert!(matches!(RevType::from_rev(Some("v1.2.3")), RevType::Tag(_)));
assert!(matches!(RevType::from_rev(Some("v2")), RevType::Tag(_)));
assert!(matches!(RevType::from_rev(Some("v0.0.1")), RevType::Tag(_)));
// Tags are pinned
let tag = RevType::from_rev(Some("v1.0.0"));
assert!(tag.is_pinned());
assert!(!tag.is_tracking());
}
#[test]
fn test_rev_type_branch_detection() {
// Other strings are branches
assert!(matches!(RevType::from_rev(Some("main")), RevType::Branch(_)));
assert!(matches!(RevType::from_rev(Some("develop")), RevType::Branch(_)));
assert!(matches!(RevType::from_rev(Some("feature/foo")), RevType::Branch(_)));
// Branches track
let branch = RevType::from_rev(Some("main"));
assert!(!branch.is_pinned());
assert!(branch.is_tracking());
}
#[test]
fn test_rev_type_default() {
let default = RevType::from_rev(None);
assert!(matches!(default, RevType::Default));
assert!(!default.is_pinned());
assert!(default.is_tracking());
}
#[test]
fn test_rev_type_edge_cases() {
// Short hash is treated as branch (not commit)
assert!(matches!(RevType::from_rev(Some("abc1234")), RevType::Branch(_)));
// Version without v prefix is treated as branch
assert!(matches!(RevType::from_rev(Some("1.2.3")), RevType::Branch(_)));
// v without digit is treated as branch
assert!(matches!(RevType::from_rev(Some("version-1")), RevType::Branch(_)));
}
#[test]
fn test_is_pinned_and_is_tracking() {
// Commits and tags are pinned
assert!(RevType::Commit("abc".to_string()).is_pinned());
assert!(RevType::Tag("v1.0".to_string()).is_pinned());
// Branches and default are tracking
assert!(RevType::Branch("main".to_string()).is_tracking());
assert!(RevType::Default.is_tracking());
// Pinned is not tracking
assert!(!RevType::Commit("abc".to_string()).is_tracking());
assert!(!RevType::Tag("v1.0".to_string()).is_tracking());
// Tracking is not pinned
assert!(!RevType::Branch("main".to_string()).is_pinned());
assert!(!RevType::Default.is_pinned());
}
}

View File

@@ -5,6 +5,31 @@ use crate::repo::discovery::{DiscoveredScript, ScriptAssets};
use super::InstalledScript;
/// Result of installing files to a specific directory
#[derive(Debug, Default)]
pub struct AssetInstallResult {
/// Number of files successfully installed
pub success_count: usize,
/// Number of files that failed to install
pub failure_count: usize,
/// Number of files skipped (e.g., already exists)
pub skipped_count: usize,
}
impl AssetInstallResult {
/// Check if any failures occurred
pub fn has_failures(&self) -> bool {
self.failure_count > 0
}
/// Total number of files attempted
pub fn total(&self) -> usize {
self.success_count + self.failure_count + self.skipped_count
}
}
/// Result of installing a script with its assets
#[derive(Debug)]
pub struct InstallResult {
@@ -19,6 +44,9 @@ pub struct InstallResult {
/// Number of shaders installed
pub shaders_count: usize,
/// Number of asset installation failures
pub asset_failures: usize,
}
/// Handles installing/uninstalling scripts to mpv's scripts directory
@@ -96,73 +124,80 @@ impl ScriptInstaller {
}
// Install associated assets
let (script_opts_count, fonts_count, shaders_count) = self.install_assets(&script.assets)?;
let (script_opts_count, fonts_count, shaders_count, asset_failures) = self.install_assets(&script.assets)?;
Ok(InstallResult {
script_path: destination,
script_opts_count,
fonts_count,
shaders_count,
asset_failures,
})
}
/// Install assets (script-opts, fonts, shaders)
fn install_assets(&self, assets: &ScriptAssets) -> Result<(usize, usize, usize)> {
fn install_assets(&self, assets: &ScriptAssets) -> Result<(usize, usize, usize, usize)> {
let mut script_opts_count = 0;
let mut fonts_count = 0;
let mut shaders_count = 0;
let mut total_failures = 0;
// Install script-opts
// Install script-opts (don't overwrite existing configs)
if !assets.script_opts.is_empty() {
if let Some(count) = self.try_install_files(&assets.script_opts, &self.mpv_script_opts_dir, false) {
script_opts_count = count;
}
let result = self.try_install_files(&assets.script_opts, &self.mpv_script_opts_dir, false);
script_opts_count = result.success_count;
total_failures += result.failure_count;
}
// Install fonts
// Install fonts (overwrite existing)
if !assets.fonts.is_empty() {
if let Some(count) = self.try_install_files(&assets.fonts, &self.mpv_fonts_dir, true) {
fonts_count = count;
}
let result = self.try_install_files(&assets.fonts, &self.mpv_fonts_dir, true);
fonts_count = result.success_count;
total_failures += result.failure_count;
}
// Install shaders
// Install shaders (overwrite existing)
if !assets.shaders.is_empty() {
if let Some(count) = self.try_install_files(&assets.shaders, &self.mpv_shaders_dir, true) {
shaders_count = count;
}
let result = self.try_install_files(&assets.shaders, &self.mpv_shaders_dir, true);
shaders_count = result.success_count;
total_failures += result.failure_count;
}
Ok((script_opts_count, fonts_count, shaders_count))
Ok((script_opts_count, fonts_count, shaders_count, total_failures))
}
/// Try to install files to a directory, returns None if directory is not writable
fn try_install_files(&self, sources: &[PathBuf], dest_dir: &Path, overwrite: bool) -> Option<usize> {
/// Try to install files to a directory, returns detailed result
fn try_install_files(&self, sources: &[PathBuf], dest_dir: &Path, overwrite: bool) -> AssetInstallResult {
let mut result = AssetInstallResult::default();
// Check if dest_dir is a symlink to somewhere else (likely system dir)
if dest_dir.is_symlink() {
// Can't install to a symlinked directory (likely /usr/share/...)
return None;
result.failure_count = sources.len();
return result;
}
// Try to create the directory
if std::fs::create_dir_all(dest_dir).is_err() {
return None;
result.failure_count = sources.len();
return result;
}
// Check if directory is writable
let test_file = dest_dir.join(".empeve-test");
if std::fs::write(&test_file, "").is_err() {
return None;
result.failure_count = sources.len();
return result;
}
let _ = std::fs::remove_file(&test_file);
let mut count = 0;
for src in sources {
if let Some(filename) = src.file_name() {
let dest = dest_dir.join(filename);
// Skip if exists and we shouldn't overwrite (for configs)
if !overwrite && dest.exists() {
result.skipped_count += 1;
continue;
}
@@ -172,19 +207,21 @@ impl ScriptInstaller {
}
// Install the file
let result = if self.use_symlinks {
let install_result = if self.use_symlinks {
self.create_symlink(src, &dest)
} else {
std::fs::copy(src, &dest).map(|_| ()).map_err(|e| e.into())
};
if result.is_ok() {
count += 1;
if install_result.is_ok() {
result.success_count += 1;
} else {
result.failure_count += 1;
}
}
}
Some(count)
result
}
/// Uninstall a script by path
@@ -264,7 +301,7 @@ impl ScriptInstaller {
/// Copy a script (file or directory)
fn copy_script(&self, source: &Path, destination: &Path, is_dir: bool) -> Result<()> {
if is_dir {
self.copy_dir_recursive(source, destination)?;
Self::copy_dir_recursive(source, destination)?;
} else {
std::fs::copy(source, destination)?;
}
@@ -272,7 +309,7 @@ impl ScriptInstaller {
}
/// Recursively copy a directory
fn copy_dir_recursive(&self, source: &Path, destination: &Path) -> Result<()> {
fn copy_dir_recursive(source: &Path, destination: &Path) -> Result<()> {
std::fs::create_dir_all(destination)?;
for entry in std::fs::read_dir(source)? {
@@ -281,7 +318,7 @@ impl ScriptInstaller {
let dest_path = destination.join(entry.file_name());
if source_path.is_dir() {
self.copy_dir_recursive(&source_path, &dest_path)?;
Self::copy_dir_recursive(&source_path, &dest_path)?;
} else {
std::fs::copy(&source_path, &dest_path)?;
}
@@ -303,3 +340,196 @@ impl ScriptInstaller {
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::TempDir;
fn setup_test_dirs() -> (TempDir, TempDir) {
let scripts_dir = TempDir::new().unwrap();
let repos_dir = TempDir::new().unwrap();
(scripts_dir, repos_dir)
}
fn create_installer(
scripts_dir: &Path,
repos_dir: &Path,
use_symlinks: bool,
) -> ScriptInstaller {
ScriptInstaller::new(
scripts_dir.to_path_buf(),
scripts_dir.join("script-opts"),
scripts_dir.join("fonts"),
scripts_dir.join("shaders"),
repos_dir.to_path_buf(),
use_symlinks,
)
}
#[test]
fn test_find_orphaned_detects_broken_symlinks() {
let (scripts_temp, repos_temp) = setup_test_dirs();
let scripts_dir = scripts_temp.path();
let repos_dir = repos_temp.path();
// Create a symlink pointing to a non-existent file in repos dir
let fake_target = repos_dir.join("fake_repo").join("script.lua");
let symlink_path = scripts_dir.join("broken.lua");
// Create the target directory structure (but not the file)
std::fs::create_dir_all(repos_dir.join("fake_repo")).unwrap();
// Create symlink to non-existent target
#[cfg(unix)]
std::os::unix::fs::symlink(&fake_target, &symlink_path).unwrap();
#[cfg(windows)]
std::os::windows::fs::symlink_file(&fake_target, &symlink_path).unwrap();
let installer = create_installer(scripts_dir, repos_dir, true);
let orphaned = installer.find_orphaned().unwrap();
assert_eq!(orphaned.len(), 1);
assert_eq!(orphaned[0], symlink_path);
}
#[test]
fn test_find_orphaned_ignores_valid_symlinks() {
let (scripts_temp, repos_temp) = setup_test_dirs();
let scripts_dir = scripts_temp.path();
let repos_dir = repos_temp.path();
// Create a valid target file
let target_dir = repos_dir.join("valid_repo");
std::fs::create_dir_all(&target_dir).unwrap();
let target_file = target_dir.join("script.lua");
std::fs::write(&target_file, "-- lua script").unwrap();
// Create symlink to valid target
let symlink_path = scripts_dir.join("valid.lua");
#[cfg(unix)]
std::os::unix::fs::symlink(&target_file, &symlink_path).unwrap();
#[cfg(windows)]
std::os::windows::fs::symlink_file(&target_file, &symlink_path).unwrap();
let installer = create_installer(scripts_dir, repos_dir, true);
let orphaned = installer.find_orphaned().unwrap();
assert!(orphaned.is_empty());
}
#[test]
fn test_find_orphaned_ignores_symlinks_outside_repos_dir() {
let (scripts_temp, repos_temp) = setup_test_dirs();
let scripts_dir = scripts_temp.path();
let repos_dir = repos_temp.path();
let external_temp = TempDir::new().unwrap();
// Create a symlink pointing outside the repos directory
let external_target = external_temp.path().join("external.lua");
std::fs::write(&external_target, "-- external script").unwrap();
let symlink_path = scripts_dir.join("external.lua");
#[cfg(unix)]
std::os::unix::fs::symlink(&external_target, &symlink_path).unwrap();
#[cfg(windows)]
std::os::windows::fs::symlink_file(&external_target, &symlink_path).unwrap();
let installer = create_installer(scripts_dir, repos_dir, true);
let orphaned = installer.find_orphaned().unwrap();
// Should not detect as orphaned since it's outside repos_dir
assert!(orphaned.is_empty());
}
#[test]
fn test_install_copy_mode_works() {
use crate::repo::discovery::{DiscoveredScript, ScriptAssets, ScriptType};
let (scripts_temp, repos_temp) = setup_test_dirs();
let scripts_dir = scripts_temp.path();
let repos_dir = repos_temp.path();
// Create a script file in the repos directory
let script_dir = repos_dir.join("test_repo");
std::fs::create_dir_all(&script_dir).unwrap();
let script_file = script_dir.join("test.lua");
std::fs::write(&script_file, "-- test lua script").unwrap();
// Create installer in copy mode (not symlink)
let installer = create_installer(scripts_dir, repos_dir, false);
let script = DiscoveredScript {
name: "test".to_string(),
script_type: ScriptType::Lua,
is_multi_file: false,
repo_path: PathBuf::from("test.lua"),
absolute_path: script_file.clone(),
assets: ScriptAssets::default(),
};
let result = installer.install(&script, None).unwrap();
// Verify the file was copied (not symlinked)
assert!(result.script_path.exists());
assert!(!result.script_path.is_symlink());
// Verify content matches
let content = std::fs::read_to_string(&result.script_path).unwrap();
assert_eq!(content, "-- test lua script");
}
#[test]
fn test_asset_install_result_tracking() {
let result = AssetInstallResult {
success_count: 3,
failure_count: 1,
skipped_count: 2,
};
assert!(result.has_failures());
assert_eq!(result.total(), 6);
let no_failures = AssetInstallResult {
success_count: 5,
failure_count: 0,
skipped_count: 0,
};
assert!(!no_failures.has_failures());
}
#[test]
fn test_symlink_target_stays_within_repos_dir() {
use crate::repo::discovery::{DiscoveredScript, ScriptAssets, ScriptType};
let (scripts_temp, repos_temp) = setup_test_dirs();
let scripts_dir = scripts_temp.path();
let repos_dir = repos_temp.path();
// Create a script file in the repos directory
let script_dir = repos_dir.join("test_repo");
std::fs::create_dir_all(&script_dir).unwrap();
let script_file = script_dir.join("good_script.lua");
std::fs::write(&script_file, "-- good script").unwrap();
let installer = create_installer(scripts_dir, repos_dir, true);
let script = DiscoveredScript {
name: "good_script".to_string(),
script_type: ScriptType::Lua,
is_multi_file: false,
repo_path: PathBuf::from("good_script.lua"),
absolute_path: script_file.clone(),
assets: ScriptAssets::default(),
};
let result = installer.install(&script, None).unwrap();
// Verify it's a symlink
assert!(result.script_path.is_symlink());
// Verify the symlink target is within repos_dir
let target = std::fs::read_link(&result.script_path).unwrap();
assert!(target.starts_with(repos_dir));
}
}

View File

@@ -1,5 +1,5 @@
pub mod installer;
pub mod script;
pub mod types;
pub use installer::ScriptInstaller;
pub use script::{InstalledScript, ScriptInfo};
pub use types::{InstalledScript, ScriptInfo};

199
tests/cli_integration.rs Normal file
View File

@@ -0,0 +1,199 @@
//! CLI integration tests for empeve
//!
//! These tests exercise the binary's command-line interface to ensure
//! proper user-facing behavior.
use assert_cmd::Command;
use predicates::prelude::*;
use tempfile::TempDir;
/// Get a command instance for empeve
#[allow(deprecated)]
fn empeve() -> Command {
Command::cargo_bin("empeve").unwrap()
}
#[test]
fn test_cli_help() {
empeve()
.arg("--help")
.assert()
.success()
.stdout(predicate::str::contains("Plugin manager for mpv"));
}
#[test]
fn test_cli_version() {
empeve()
.arg("--version")
.assert()
.success()
.stdout(predicate::str::contains("empeve"));
}
#[test]
fn test_cli_help_add() {
empeve()
.args(["add", "--help"])
.assert()
.success()
.stdout(predicate::str::contains("Add a repository"));
}
#[test]
fn test_cli_help_install() {
empeve()
.args(["install", "--help"])
.assert()
.success()
.stdout(predicate::str::contains("Install all configured repositories"));
}
#[test]
fn test_cli_help_update() {
empeve()
.args(["update", "--help"])
.assert()
.success()
.stdout(predicate::str::contains("Update all repositories"));
}
/// Test that the add command rejects invalid repo formats
#[test]
fn test_cli_add_invalid_repo_format() {
let temp = TempDir::new().unwrap();
let config_dir = temp.path().join(".config").join("empeve");
std::fs::create_dir_all(&config_dir).unwrap();
// Create minimal config
let config_path = config_dir.join("config.toml");
std::fs::write(&config_path, "[settings]\n").unwrap();
empeve()
.env("XDG_CONFIG_HOME", temp.path().join(".config"))
.args(["add", "invalid_repo_no_slash"])
.assert()
.failure()
.stderr(predicate::str::contains("invalid repository format"));
}
/// Test that the add command accepts valid GitHub shorthand
#[test]
fn test_cli_add_valid_repo_format() {
let temp = TempDir::new().unwrap();
let config_dir = temp.path().join(".config").join("empeve");
std::fs::create_dir_all(&config_dir).unwrap();
// Create minimal config
let config_path = config_dir.join("config.toml");
std::fs::write(&config_path, "[settings]\n").unwrap();
empeve()
.env("XDG_CONFIG_HOME", temp.path().join(".config"))
.args(["add", "user/repo"])
.assert()
.success()
.stdout(predicate::str::contains("Added"));
// Verify config was updated
let config_content = std::fs::read_to_string(&config_path).unwrap();
assert!(config_content.contains("user/repo"));
}
/// Test that adding a duplicate repo fails
#[test]
fn test_cli_add_duplicate_fails() {
let temp = TempDir::new().unwrap();
let config_dir = temp.path().join(".config").join("empeve");
std::fs::create_dir_all(&config_dir).unwrap();
// Create config with existing repo
let config_path = config_dir.join("config.toml");
std::fs::write(
&config_path,
r#"[settings]
[[repos]]
repo = "existing/repo"
"#,
)
.unwrap();
empeve()
.env("XDG_CONFIG_HOME", temp.path().join(".config"))
.args(["add", "existing/repo"])
.assert()
.failure()
.stderr(predicate::str::contains("already exists"));
}
/// Test that removing a non-existent repo fails
#[test]
fn test_cli_remove_nonexistent_fails() {
let temp = TempDir::new().unwrap();
let config_dir = temp.path().join(".config").join("empeve");
std::fs::create_dir_all(&config_dir).unwrap();
// Create empty config
let config_path = config_dir.join("config.toml");
std::fs::write(&config_path, "[settings]\n").unwrap();
empeve()
.env("XDG_CONFIG_HOME", temp.path().join(".config"))
.args(["remove", "nonexistent/repo"])
.assert()
.failure()
.stderr(predicate::str::contains("not found"));
}
/// Test status command with empty config
#[test]
fn test_cli_status_empty_config() {
let temp = TempDir::new().unwrap();
let config_dir = temp.path().join(".config").join("empeve");
std::fs::create_dir_all(&config_dir).unwrap();
// Create empty config
let config_path = config_dir.join("config.toml");
std::fs::write(&config_path, "[settings]\n").unwrap();
empeve()
.env("XDG_CONFIG_HOME", temp.path().join(".config"))
.arg("status")
.assert()
.success()
.stdout(predicate::str::contains("No repositories"));
}
/// Test that --target flag is recognized
#[test]
fn test_cli_target_flag() {
let temp = TempDir::new().unwrap();
let config_dir = temp.path().join(".config").join("empeve");
std::fs::create_dir_all(&config_dir).unwrap();
// Create config with target
let config_path = config_dir.join("config.toml");
let mpv_path = temp.path().join("mpv");
std::fs::create_dir_all(&mpv_path).unwrap();
std::fs::write(
&config_path,
format!(
r#"[settings]
[[targets]]
name = "mpv"
path = "{}"
"#,
mpv_path.display()
),
)
.unwrap();
empeve()
.env("XDG_CONFIG_HOME", temp.path().join(".config"))
.args(["--target", "mpv", "list"])
.assert()
.success();
}