fix(parser): bug in indentation

This commit is contained in:
Ray Sinurat 2026-02-05 22:35:09 -06:00
parent c53b4db9cf
commit ca86eaae6e
45 changed files with 1555 additions and 603 deletions

35
Cargo.lock generated
View file

@ -1046,6 +1046,7 @@ dependencies = [
"glob", "glob",
"hostname", "hostname",
"indicatif", "indicatif",
"minijinja",
"os_info", "os_info",
"regex-lite", "regex-lite",
"serde", "serde",
@ -1055,6 +1056,7 @@ dependencies = [
"thiserror 2.0.18", "thiserror 2.0.18",
"toml 0.8.23", "toml 0.8.23",
"walkdir", "walkdir",
"which",
] ]
[[package]] [[package]]
@ -1106,6 +1108,12 @@ dependencies = [
"cfg-if", "cfg-if",
] ]
[[package]]
name = "env_home"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7f84e12ccf0a7ddc17a6c41c93326024c42920d7ee630d04950e6926645c0fe"
[[package]] [[package]]
name = "equivalent" name = "equivalent"
version = "1.0.2" version = "1.0.2"
@ -2030,6 +2038,15 @@ dependencies = [
"unicase", "unicase",
] ]
[[package]]
name = "minijinja"
version = "2.15.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b479616bb6f0779fb0f3964246beda02d4b01144e1b0d5519616e012ccc2a245"
dependencies = [
"serde",
]
[[package]] [[package]]
name = "minimal-lexical" name = "minimal-lexical"
version = "0.2.1" version = "0.2.1"
@ -3716,6 +3733,18 @@ dependencies = [
"wasm-bindgen", "wasm-bindgen",
] ]
[[package]]
name = "which"
version = "7.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24d643ce3fd3e5b54854602a080f34fb10ab75e0b813ee32d00ca2b44fa74762"
dependencies = [
"either",
"env_home",
"rustix 1.1.3",
"winsafe",
]
[[package]] [[package]]
name = "winapi" name = "winapi"
version = "0.3.9" version = "0.3.9"
@ -3918,6 +3947,12 @@ dependencies = [
"memchr", "memchr",
] ]
[[package]]
name = "winsafe"
version = "0.0.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d135d17ab770252ad95e9a872d365cf3090e3be864a34ab46f48555993efc904"
[[package]] [[package]]
name = "wit-bindgen" name = "wit-bindgen"
version = "0.51.0" version = "0.51.0"

View file

@ -3,7 +3,7 @@ use doot_core::state::{StateStore, SyncStatus};
use doot_core::{Config, Deployer}; use doot_core::{Config, Deployer};
use doot_lang::ast::HookStage; use doot_lang::ast::HookStage;
use doot_lang::evaluator::{DotfileConfig, HookConfig}; use doot_lang::evaluator::{DotfileConfig, HookConfig};
use doot_lang::{validate_dotfile_targets, DotfileConflict, Evaluator}; use doot_lang::{DotfileConflict, Evaluator, validate_dotfile_targets};
use indicatif::{ProgressBar, ProgressStyle}; use indicatif::{ProgressBar, ProgressStyle};
use std::io::{self, Write}; use std::io::{self, Write};
use std::path::PathBuf; use std::path::PathBuf;
@ -112,7 +112,11 @@ pub fn run(
for dotfile in &ordered_dotfiles { for dotfile in &ordered_dotfiles {
let full_source = source_dir.join(&dotfile.source); let full_source = source_dir.join(&dotfile.source);
let status = state.check_sync_status(&full_source, &dotfile.target); let status = state.check_sync_status_with_template(
&full_source,
&dotfile.target,
Some(dotfile.template),
);
// For directories, check individual files for smarter merging // For directories, check individual files for smarter merging
if full_source.is_dir() { if full_source.is_dir() {
@ -124,7 +128,9 @@ pub fn run(
for (src, tgt, file_status) in changed_files { for (src, tgt, file_status) in changed_files {
match file_status { match file_status {
SyncStatus::Synced => {} SyncStatus::Synced => {}
SyncStatus::NotDeployed | SyncStatus::TargetMissing | SyncStatus::SourceChanged => { SyncStatus::NotDeployed
| SyncStatus::TargetMissing
| SyncStatus::SourceChanged => {
// Can auto-merge: just copy from source // Can auto-merge: just copy from source
has_changes = true; has_changes = true;
if verbose { if verbose {
@ -185,10 +191,7 @@ pub fn run(
conflicts.push((dotfile, status)); conflicts.push((dotfile, status));
} }
SyncStatus::SourceMissing => { SyncStatus::SourceMissing => {
eprintln!( eprintln!(" [error] source missing: {}", dotfile.source.display());
" [error] source missing: {}",
dotfile.source.display()
);
} }
} }
} }
@ -255,7 +258,9 @@ pub fn run(
dotfile.source.display(), dotfile.source.display(),
dotfile.target.display() dotfile.target.display()
); );
println!(" [s] Use source [t] Keep target [d] Show diff [m] Merge in editor"); println!(
" [s] Use source [t] Keep target [d] Show diff [m] Merge in editor"
);
print!(" Choice [s/t/d/m]: "); print!(" Choice [s/t/d/m]: ");
io::stdout().flush()?; io::stdout().flush()?;
@ -307,7 +312,11 @@ pub fn run(
} else { } else {
println!("\n[dry-run] would deploy:"); println!("\n[dry-run] would deploy:");
for dotfile in &to_deploy { for dotfile in &to_deploy {
println!(" {} -> {}", dotfile.source.display(), dotfile.target.display()); println!(
" {} -> {}",
dotfile.source.display(),
dotfile.target.display()
);
} }
} }
@ -387,11 +396,7 @@ pub fn run(
} }
for skipped in &deploy_result.skipped { for skipped in &deploy_result.skipped {
println!( println!(" [skip] {} ({})", skipped.target.display(), skipped.reason);
" [skip] {} ({})",
skipped.target.display(),
skipped.reason
);
} }
for error in &deploy_result.errors { for error in &deploy_result.errors {
@ -433,7 +438,10 @@ pub fn run(
} }
if to_install.is_empty() { if to_install.is_empty() {
println!("\nall {} packages already installed", already_installed.len()); println!(
"\nall {} packages already installed",
already_installed.len()
);
} else { } else {
println!("\ninstalling {} packages...", to_install.len()); println!("\ninstalling {} packages...", to_install.len());
manager.install(&to_install)?; manager.install(&to_install)?;
@ -549,8 +557,8 @@ fn merge_in_editor(source: &PathBuf, target: &PathBuf) -> anyhow::Result<bool> {
let status = Command::new(&editor) let status = Command::new(&editor)
.arg("-d") .arg("-d")
.arg(&merged_path) // target (editable) .arg(&merged_path) // target (editable)
.arg(source) // source (reference) .arg(source) // source (reference)
.status()?; .status()?;
if !status.success() { if !status.success() {
@ -562,9 +570,7 @@ fn merge_in_editor(source: &PathBuf, target: &PathBuf) -> anyhow::Result<bool> {
println!(" Opening {} with target content...", editor); println!(" Opening {} with target content...", editor);
println!(" Reference source: {}", source.display()); println!(" Reference source: {}", source.display());
let status = Command::new(&editor) let status = Command::new(&editor).arg(&merged_path).status()?;
.arg(&merged_path)
.status()?;
if !status.success() { if !status.success() {
let _ = std::fs::remove_file(&merged_path); let _ = std::fs::remove_file(&merged_path);

View file

@ -1,4 +1,4 @@
use doot_core::{encryption::AgeEncryption, Config}; use doot_core::{Config, encryption::AgeEncryption};
use std::path::PathBuf; use std::path::PathBuf;
pub fn run(file: PathBuf, identity: Option<PathBuf>, verbose: bool) -> anyhow::Result<()> { pub fn run(file: PathBuf, identity: Option<PathBuf>, verbose: bool) -> anyhow::Result<()> {
@ -8,7 +8,9 @@ pub fn run(file: PathBuf, identity: Option<PathBuf>, verbose: bool) -> anyhow::R
} else if let Ok(key) = std::env::var("DOOT_AGE_IDENTITY") { } else if let Ok(key) = std::env::var("DOOT_AGE_IDENTITY") {
key key
} else if config.identity_file.exists() { } else if config.identity_file.exists() {
std::fs::read_to_string(&config.identity_file)?.trim().to_string() std::fs::read_to_string(&config.identity_file)?
.trim()
.to_string()
} else { } else {
anyhow::bail!( anyhow::bail!(
"no identity specified. use --identity, DOOT_AGE_IDENTITY env var, or {}", "no identity specified. use --identity, DOOT_AGE_IDENTITY env var, or {}",

View file

@ -1,12 +1,12 @@
use super::{find_config_file, parse_config, type_check}; use super::{find_config_file, parse_config, type_check};
use doot_core::{ use doot_core::{
deploy::Linker,
state::{DeployMode, StateStore},
Config, Config,
deploy::{Linker, TemplateEngine},
state::{DeployMode, StateStore},
}; };
use doot_lang::Evaluator; use doot_lang::Evaluator;
use std::io::{self, Write}; use std::io::{self, Write};
use std::path::PathBuf; use std::path::{Path, PathBuf};
use std::process::Command; use std::process::Command;
pub fn run( pub fn run(
@ -67,7 +67,7 @@ pub fn run(
if should_apply { if should_apply {
if let Some(df) = dotfile { if let Some(df) = dotfile {
apply_single(&source_file, &df.target, &df, &config, verbose)?; apply_single(&source_file, &df.target, df, &config, verbose)?;
println!("applied changes to {}", df.target.display()); println!("applied changes to {}", df.target.display());
} else { } else {
println!("hint: run 'doot apply' to deploy changes"); println!("hint: run 'doot apply' to deploy changes");
@ -109,6 +109,28 @@ fn apply_single(
let mut state = StateStore::new(&config.state_file); let mut state = StateStore::new(&config.state_file);
// Handle templates specially
if dotfile.template {
if let Some(parent) = target.parent() {
std::fs::create_dir_all(parent)?;
}
let content = std::fs::read_to_string(source)?;
let engine = TemplateEngine::new();
let rendered = engine
.render(&content)
.map_err(|e| anyhow::anyhow!("template error: {}", e))?;
std::fs::write(target, rendered)?;
if verbose {
println!("rendered {} -> {}", source.display(), target.display());
}
state.record_deployment_with_template(source, target, DeployMode::Copy, true);
state.save()?;
return Ok(());
}
match deploy_mode { match deploy_mode {
DeployMode::Link => { DeployMode::Link => {
let linker = Linker::new(config.clone()); let linker = Linker::new(config.clone());
@ -147,7 +169,7 @@ fn copy_dir_recursive(src: &PathBuf, dst: &PathBuf) -> std::io::Result<()> {
let dst_path = dst.join(entry.file_name()); let dst_path = dst.join(entry.file_name());
if ty.is_dir() { if ty.is_dir() {
copy_dir_recursive(&src_path.into(), &dst_path)?; copy_dir_recursive(&src_path, &dst_path)?;
} else { } else {
std::fs::copy(&src_path, &dst_path)?; std::fs::copy(&src_path, &dst_path)?;
} }
@ -156,18 +178,17 @@ fn copy_dir_recursive(src: &PathBuf, dst: &PathBuf) -> std::io::Result<()> {
} }
fn expand_tilde(path: &str) -> PathBuf { fn expand_tilde(path: &str) -> PathBuf {
if path.starts_with("~/") { if path.starts_with("~/")
if let Some(home) = dirs::home_dir() { && let Some(home) = dirs::home_dir() {
return home.join(&path[2..]); return home.join(&path[2..]);
} }
}
PathBuf::from(path) PathBuf::from(path)
} }
fn find_source_and_dotfile<'a>( fn find_source_and_dotfile<'a>(
target: &PathBuf, target: &PathBuf,
dotfiles: &'a [doot_lang::evaluator::DotfileConfig], dotfiles: &'a [doot_lang::evaluator::DotfileConfig],
source_dir: &PathBuf, source_dir: &Path,
state: &StateStore, state: &StateStore,
) -> anyhow::Result<(PathBuf, Option<&'a doot_lang::evaluator::DotfileConfig>)> { ) -> anyhow::Result<(PathBuf, Option<&'a doot_lang::evaluator::DotfileConfig>)> {
// Exact match with dotfile targets // Exact match with dotfile targets

View file

@ -1,4 +1,4 @@
use doot_core::{encryption::AgeEncryption, Config}; use doot_core::{Config, encryption::AgeEncryption};
use std::path::PathBuf; use std::path::PathBuf;
pub fn run(file: PathBuf, recipient: Option<String>, verbose: bool) -> anyhow::Result<()> { pub fn run(file: PathBuf, recipient: Option<String>, verbose: bool) -> anyhow::Result<()> {
@ -20,7 +20,11 @@ pub fn run(file: PathBuf, recipient: Option<String>, verbose: bool) -> anyhow::R
}; };
if verbose { if verbose {
println!("encrypting {} with recipient {}", file.display(), &recipient_key[..20]); println!(
"encrypting {} with recipient {}",
file.display(),
&recipient_key[..20]
);
} }
let mut encryption = AgeEncryption::new(); let mut encryption = AgeEncryption::new();

View file

@ -14,13 +14,11 @@ pub fn run(config_path: Option<PathBuf>, check: bool, _verbose: bool) -> anyhow:
} else { } else {
println!("{} is formatted correctly", path.display()); println!("{} is formatted correctly", path.display());
} }
} else if formatted != source {
std::fs::write(&path, &formatted)?;
println!("formatted {}", path.display());
} else { } else {
if formatted != source { println!("{} is already formatted", path.display());
std::fs::write(&path, &formatted)?;
println!("formatted {}", path.display());
} else {
println!("{} is already formatted", path.display());
}
} }
Ok(()) Ok(())

View file

@ -1,5 +1,5 @@
use doot_core::Config; use doot_core::Config;
use std::path::PathBuf; use std::path::{Path, PathBuf};
pub fn run(path: Option<PathBuf>, verbose: bool) -> anyhow::Result<()> { pub fn run(path: Option<PathBuf>, verbose: bool) -> anyhow::Result<()> {
let source_dir = path.unwrap_or_else(Config::default_source_dir); let source_dir = path.unwrap_or_else(Config::default_source_dir);
@ -49,7 +49,10 @@ pub fn run(path: Option<PathBuf>, verbose: bool) -> anyhow::Result<()> {
println!(" state: {}", config.state_dir.display()); println!(" state: {}", config.state_dir.display());
println!(); println!();
println!("next steps:"); println!("next steps:");
println!(" 1. add dotfiles to {}/config/", config.config_dir.display()); println!(
" 1. add dotfiles to {}/config/",
config.config_dir.display()
);
println!(" 2. edit {}/doot.doot", config.config_dir.display()); println!(" 2. edit {}/doot.doot", config.config_dir.display());
println!(" 3. run 'doot apply -n' to preview"); println!(" 3. run 'doot apply -n' to preview");
println!(" 4. run 'doot apply' to deploy"); println!(" 4. run 'doot apply' to deploy");
@ -57,7 +60,7 @@ pub fn run(path: Option<PathBuf>, verbose: bool) -> anyhow::Result<()> {
Ok(()) Ok(())
} }
fn example_config_with_source(source_dir: &PathBuf) -> String { fn example_config_with_source(source_dir: &Path) -> String {
format!( format!(
r#"# doot.doot r#"# doot.doot
# source directory: {source_dir} # source directory: {source_dir}

View file

@ -25,10 +25,7 @@ pub fn find_config_file(base: Option<PathBuf>) -> anyhow::Result<PathBuf> {
anyhow::bail!("config file not found: {}", path.display()); anyhow::bail!("config file not found: {}", path.display());
} }
let candidates = vec![ let candidates = vec![PathBuf::from("doot.doot"), Config::default_config_file()];
PathBuf::from("doot.doot"),
Config::default_config_file(),
];
for candidate in candidates { for candidate in candidates {
if candidate.exists() { if candidate.exists() {

View file

@ -27,14 +27,12 @@ pub fn install(config_path: Option<PathBuf>, verbose: bool) -> anyhow::Result<()
let package_names: Vec<String> = result let package_names: Vec<String> = result
.packages .packages
.iter() .iter()
.filter_map(|p| { .filter_map(|p| match manager.name() {
match manager.name() { "brew" => p.brew.clone().or_else(|| p.default.clone()),
"brew" => p.brew.clone().or_else(|| p.default.clone()), "apt" => p.apt.clone().or_else(|| p.default.clone()),
"apt" => p.apt.clone().or_else(|| p.default.clone()), "pacman" => p.pacman.clone().or_else(|| p.default.clone()),
"pacman" => p.pacman.clone().or_else(|| p.default.clone()), "yay" => p.yay.clone().or_else(|| p.default.clone()),
"yay" => p.yay.clone().or_else(|| p.default.clone()), _ => p.default.clone(),
_ => p.default.clone(),
}
}) })
.collect(); .collect();

View file

@ -1,6 +1,6 @@
use doot_core::{ use doot_core::{
state::{DeployMode, Snapshot},
Config, Config,
state::{DeployMode, Snapshot},
}; };
use std::path::PathBuf; use std::path::PathBuf;

View file

@ -1,6 +1,6 @@
use doot_core::{ use doot_core::{
state::{Snapshot, StateStore},
Config, Config,
state::{Snapshot, StateStore},
}; };
use std::path::PathBuf; use std::path::PathBuf;
@ -16,7 +16,8 @@ pub fn run(_config_path: Option<PathBuf>, name: String, verbose: bool) -> anyhow
println!(" snapshot dir: {}", config.snapshot_dir.display()); println!(" snapshot dir: {}", config.snapshot_dir.display());
} }
let state_content = std::fs::read_to_string(&config.state_file).unwrap_or_else(|_| "{}".to_string()); let state_content =
std::fs::read_to_string(&config.state_file).unwrap_or_else(|_| "{}".to_string());
let state_data: doot_core::state::store::State = serde_json::from_str(&state_content)?; let state_data: doot_core::state::store::State = serde_json::from_str(&state_content)?;
Snapshot::create(&name, &state_data, &config.snapshot_dir)?; Snapshot::create(&name, &state_data, &config.snapshot_dir)?;

View file

@ -2,19 +2,19 @@ use super::{find_config_file, parse_config, type_check};
use crossterm::{ use crossterm::{
event::{self, DisableMouseCapture, EnableMouseCapture, Event, KeyCode, KeyEventKind}, event::{self, DisableMouseCapture, EnableMouseCapture, Event, KeyCode, KeyEventKind},
execute, execute,
terminal::{disable_raw_mode, enable_raw_mode, EnterAlternateScreen, LeaveAlternateScreen}, terminal::{EnterAlternateScreen, LeaveAlternateScreen, disable_raw_mode, enable_raw_mode},
}; };
use doot_core::config::Config; use doot_core::config::Config;
use doot_core::deploy::Linker; use doot_core::deploy::Linker;
use doot_core::state::{DeployMode, StateStore}; use doot_core::state::{DeployMode, StateStore};
use doot_lang::Evaluator; use doot_lang::Evaluator;
use ratatui::{ use ratatui::{
Frame, Terminal,
backend::CrosstermBackend, backend::CrosstermBackend,
layout::{Constraint, Direction, Layout}, layout::{Constraint, Direction, Layout},
style::{Color, Modifier, Style}, style::{Color, Modifier, Style},
text::{Line, Span}, text::{Line, Span},
widgets::{Block, Borders, List, ListItem, ListState, Paragraph, Tabs, Gauge}, widgets::{Block, Borders, Gauge, List, ListItem, ListState, Paragraph, Tabs},
Frame, Terminal,
}; };
use std::io; use std::io;
use std::path::PathBuf; use std::path::PathBuf;
@ -234,14 +234,22 @@ impl App {
Tab::Dotfiles => { Tab::Dotfiles => {
let len = self.dotfiles.len(); let len = self.dotfiles.len();
if len > 0 { if len > 0 {
let i = self.dotfile_state.selected().map(|i| (i + 1) % len).unwrap_or(0); let i = self
.dotfile_state
.selected()
.map(|i| (i + 1) % len)
.unwrap_or(0);
self.dotfile_state.select(Some(i)); self.dotfile_state.select(Some(i));
} }
} }
Tab::Packages => { Tab::Packages => {
let len = self.packages.len(); let len = self.packages.len();
if len > 0 { if len > 0 {
let i = self.package_state.selected().map(|i| (i + 1) % len).unwrap_or(0); let i = self
.package_state
.selected()
.map(|i| (i + 1) % len)
.unwrap_or(0);
self.package_state.select(Some(i)); self.package_state.select(Some(i));
} }
} }
@ -257,14 +265,22 @@ impl App {
Tab::Dotfiles => { Tab::Dotfiles => {
let len = self.dotfiles.len(); let len = self.dotfiles.len();
if len > 0 { if len > 0 {
let i = self.dotfile_state.selected().map(|i| if i == 0 { len - 1 } else { i - 1 }).unwrap_or(0); let i = self
.dotfile_state
.selected()
.map(|i| if i == 0 { len - 1 } else { i - 1 })
.unwrap_or(0);
self.dotfile_state.select(Some(i)); self.dotfile_state.select(Some(i));
} }
} }
Tab::Packages => { Tab::Packages => {
let len = self.packages.len(); let len = self.packages.len();
if len > 0 { if len > 0 {
let i = self.package_state.selected().map(|i| if i == 0 { len - 1 } else { i - 1 }).unwrap_or(0); let i = self
.package_state
.selected()
.map(|i| if i == 0 { len - 1 } else { i - 1 })
.unwrap_or(0);
self.package_state.select(Some(i)); self.package_state.select(Some(i));
} }
} }
@ -278,20 +294,17 @@ impl App {
} }
match self.tab { match self.tab {
Tab::Dotfiles => { Tab::Dotfiles => {
if let Some(i) = self.dotfile_state.selected() { if let Some(i) = self.dotfile_state.selected()
if let Some(item) = self.dotfiles.get_mut(i) { && let Some(item) = self.dotfiles.get_mut(i)
if item.status != FileStatus::Error { && item.status != FileStatus::Error {
item.selected = !item.selected; item.selected = !item.selected;
} }
}
}
} }
Tab::Packages => { Tab::Packages => {
if let Some(i) = self.package_state.selected() { if let Some(i) = self.package_state.selected()
if let Some(item) = self.packages.get_mut(i) { && let Some(item) = self.packages.get_mut(i) {
item.selected = !item.selected; item.selected = !item.selected;
} }
}
} }
_ => {} _ => {}
} }
@ -347,20 +360,25 @@ impl App {
self.apply_logs.clear(); self.apply_logs.clear();
self.log_scroll = 0; self.log_scroll = 0;
let selected_dotfiles: Vec<_> = self.dotfiles.iter() let selected_dotfiles: Vec<_> = self
.dotfiles
.iter()
.enumerate() .enumerate()
.filter(|(_, d)| d.selected && d.status != FileStatus::Error) .filter(|(_, d)| d.selected && d.status != FileStatus::Error)
.map(|(i, _)| i) .map(|(i, _)| i)
.collect(); .collect();
let selected_packages: Vec<_> = self.packages.iter() let selected_packages: Vec<_> = self
.packages
.iter()
.enumerate() .enumerate()
.filter(|(_, p)| p.selected && !p.installed) .filter(|(_, p)| p.selected && !p.installed)
.map(|(i, _)| i) .map(|(i, _)| i)
.collect(); .collect();
if selected_dotfiles.is_empty() && selected_packages.is_empty() { if selected_dotfiles.is_empty() && selected_packages.is_empty() {
self.apply_logs.push(("Nothing to apply".to_string(), LogLevel::Info)); self.apply_logs
.push(("Nothing to apply".to_string(), LogLevel::Info));
self.apply_state = ApplyState::Done; self.apply_state = ApplyState::Done;
return; return;
} }
@ -386,23 +404,26 @@ impl App {
let has_packages = self.packages.iter().any(|p| p.selected && !p.installed); let has_packages = self.packages.iter().any(|p| p.selected && !p.installed);
let has_owner = self.dotfiles.iter().any(|d| d.selected); let has_owner = self.dotfiles.iter().any(|d| d.selected);
if has_packages { if has_packages
if let Some(manager) = doot_core::package::detect_package_manager() { && let Some(manager) = doot_core::package::detect_package_manager() {
return manager.needs_sudo(); return manager.needs_sudo();
} }
}
has_owner has_owner
} }
fn apply_with_sudo(&mut self) { fn apply_with_sudo(&mut self) {
let selected_dotfiles: Vec<_> = self.dotfiles.iter() let selected_dotfiles: Vec<_> = self
.dotfiles
.iter()
.enumerate() .enumerate()
.filter(|(_, d)| d.selected && d.status != FileStatus::Error) .filter(|(_, d)| d.selected && d.status != FileStatus::Error)
.map(|(i, _)| i) .map(|(i, _)| i)
.collect(); .collect();
let selected_packages: Vec<_> = self.packages.iter() let selected_packages: Vec<_> = self
.packages
.iter()
.enumerate() .enumerate()
.filter(|(_, p)| p.selected && !p.installed) .filter(|(_, p)| p.selected && !p.installed)
.map(|(i, _)| i) .map(|(i, _)| i)
@ -427,12 +448,20 @@ impl App {
}; };
self.apply_logs.push(( self.apply_logs.push((
format!("{} {} -> {}", action_name, dotfile.source.display(), target.display()), format!(
"{} {} -> {}",
action_name,
dotfile.source.display(),
target.display()
),
LogLevel::Info, LogLevel::Info,
)); ));
let result: Result<(), String> = match dotfile.deploy_mode { let result: Result<(), String> = match dotfile.deploy_mode {
DeployMode::Link => linker.link(&full_source, target).map(|_| ()).map_err(|e| e.to_string()), DeployMode::Link => linker
.link(&full_source, target)
.map(|_| ())
.map_err(|e| e.to_string()),
DeployMode::Copy => copy_file(&full_source, target), DeployMode::Copy => copy_file(&full_source, target),
}; };
@ -448,10 +477,8 @@ impl App {
self.dotfiles[idx].selected = false; self.dotfiles[idx].selected = false;
} }
Err(e) => { Err(e) => {
self.apply_logs.push(( self.apply_logs
format!(" ✗ Failed: {}", e), .push((format!(" ✗ Failed: {}", e), LogLevel::Error));
LogLevel::Error,
));
self.dotfiles[idx].status = FileStatus::Error; self.dotfiles[idx].status = FileStatus::Error;
} }
} }
@ -471,37 +498,31 @@ impl App {
let result = if manager.needs_sudo() { let result = if manager.needs_sudo() {
if let Some(ref password) = self.sudo_password { if let Some(ref password) = self.sudo_password {
manager.install_with_sudo(&[package.name.clone()], password) manager.install_with_sudo(std::slice::from_ref(&package.name), password)
} else { } else {
manager.install(&[package.name.clone()]) manager.install(std::slice::from_ref(&package.name))
} }
} else { } else {
manager.install(&[package.name.clone()]) manager.install(std::slice::from_ref(&package.name))
}; };
match result { match result {
Ok(_) => { Ok(_) => {
self.apply_logs.push(( self.apply_logs
format!(" ✓ Installed {}", package.name), .push((format!(" ✓ Installed {}", package.name), LogLevel::Success));
LogLevel::Success,
));
self.packages[idx].installed = true; self.packages[idx].installed = true;
self.packages[idx].selected = false; self.packages[idx].selected = false;
} }
Err(e) => { Err(e) => {
self.apply_logs.push(( self.apply_logs
format!(" ✗ Failed: {}", e), .push((format!(" ✗ Failed: {}", e), LogLevel::Error));
LogLevel::Error,
));
} }
} }
self.apply_progress += 1; self.apply_progress += 1;
} }
} else { } else {
self.apply_logs.push(( self.apply_logs
"No package manager available".to_string(), .push(("No package manager available".to_string(), LogLevel::Error));
LogLevel::Error,
));
} }
self.apply_state = ApplyState::Done; self.apply_state = ApplyState::Done;
@ -529,8 +550,8 @@ fn run_app(
loop { loop {
terminal.draw(|f| ui(f, &mut app))?; terminal.draw(|f| ui(f, &mut app))?;
if let Event::Key(key) = event::read()? { if let Event::Key(key) = event::read()?
if key.kind == KeyEventKind::Press { && key.kind == KeyEventKind::Press {
match app.input_mode { match app.input_mode {
InputMode::Password => match key.code { InputMode::Password => match key.code {
KeyCode::Enter => { KeyCode::Enter => {
@ -583,7 +604,9 @@ fn run_app(
_ => {} _ => {}
}, },
ApplyState::Done => match key.code { ApplyState::Done => match key.code {
KeyCode::Enter | KeyCode::Esc | KeyCode::Char('q') => app.dismiss_apply(), KeyCode::Enter | KeyCode::Esc | KeyCode::Char('q') => {
app.dismiss_apply()
}
KeyCode::Up | KeyCode::Char('k') => app.scroll_log_up(), KeyCode::Up | KeyCode::Char('k') => app.scroll_log_up(),
KeyCode::Down | KeyCode::Char('j') => app.scroll_log_down(), KeyCode::Down | KeyCode::Char('j') => app.scroll_log_down(),
_ => {} _ => {}
@ -591,7 +614,6 @@ fn run_app(
}, },
} }
} }
}
} }
} }
@ -615,7 +637,11 @@ fn ui(f: &mut Frame, app: &mut App) {
Tab::Status => 3, Tab::Status => 3,
}) })
.style(Style::default().fg(Color::Cyan)) .style(Style::default().fg(Color::Cyan))
.highlight_style(Style::default().fg(Color::Yellow).add_modifier(Modifier::BOLD)); .highlight_style(
Style::default()
.fg(Color::Yellow)
.add_modifier(Modifier::BOLD),
);
f.render_widget(tabs, chunks[0]); f.render_widget(tabs, chunks[0]);
match app.input_mode { match app.input_mode {
@ -652,8 +678,7 @@ fn ui(f: &mut Frame, app: &mut App) {
} else { } else {
"Applying..." "Applying..."
}; };
let help = Paragraph::new(help_text) let help = Paragraph::new(help_text).block(Block::default().borders(Borders::ALL));
.block(Block::default().borders(Borders::ALL));
f.render_widget(help, chunks[2]); f.render_widget(help, chunks[2]);
} }
}, },
@ -663,10 +688,7 @@ fn ui(f: &mut Frame, app: &mut App) {
fn render_apply_progress(f: &mut Frame, app: &App, area: ratatui::layout::Rect) { fn render_apply_progress(f: &mut Frame, app: &App, area: ratatui::layout::Rect) {
let chunks = Layout::default() let chunks = Layout::default()
.direction(Direction::Vertical) .direction(Direction::Vertical)
.constraints([ .constraints([Constraint::Length(3), Constraint::Min(0)])
Constraint::Length(3),
Constraint::Min(0),
])
.split(area); .split(area);
// Progress bar // Progress bar
@ -697,7 +719,10 @@ fn render_apply_progress(f: &mut Frame, app: &App, area: ratatui::layout::Rect)
LogLevel::Success => Color::Green, LogLevel::Success => Color::Green,
LogLevel::Error => Color::Red, LogLevel::Error => Color::Red,
}; };
ListItem::new(Line::from(Span::styled(msg.as_str(), Style::default().fg(color)))) ListItem::new(Line::from(Span::styled(
msg.as_str(),
Style::default().fg(color),
)))
}) })
.collect(); .collect();
@ -707,8 +732,7 @@ fn render_apply_progress(f: &mut Frame, app: &App, area: ratatui::layout::Rect)
"Applying..." "Applying..."
}; };
let list = List::new(items) let list = List::new(items).block(Block::default().borders(Borders::ALL).title(title));
.block(Block::default().borders(Borders::ALL).title(title));
f.render_widget(list, chunks[1]); f.render_widget(list, chunks[1]);
} }
@ -782,35 +806,63 @@ fn render_secrets(f: &mut Frame, area: ratatui::layout::Rect) {
} }
fn render_status(f: &mut Frame, app: &App, area: ratatui::layout::Rect) { fn render_status(f: &mut Frame, app: &App, area: ratatui::layout::Rect) {
let synced = app.dotfiles.iter().filter(|d| matches!(d.status, FileStatus::Synced)).count(); let synced = app
let pending = app.dotfiles.iter().filter(|d| matches!(d.status, FileStatus::Pending)).count(); .dotfiles
let modified = app.dotfiles.iter().filter(|d| matches!(d.status, FileStatus::Modified)).count(); .iter()
let errors = app.dotfiles.iter().filter(|d| matches!(d.status, FileStatus::Error)).count(); .filter(|d| matches!(d.status, FileStatus::Synced))
.count();
let pending = app
.dotfiles
.iter()
.filter(|d| matches!(d.status, FileStatus::Pending))
.count();
let modified = app
.dotfiles
.iter()
.filter(|d| matches!(d.status, FileStatus::Modified))
.count();
let errors = app
.dotfiles
.iter()
.filter(|d| matches!(d.status, FileStatus::Error))
.count();
let installed = app.packages.iter().filter(|p| p.installed).count(); let installed = app.packages.iter().filter(|p| p.installed).count();
let text = format!( let text = format!(
"Source: {}\n\nDotfiles:\n Synced: {}\n Pending: {}\n Modified: {}\n Errors: {}\n\nPackages:\n Installed: {}/{}", "Source: {}\n\nDotfiles:\n Synced: {}\n Pending: {}\n Modified: {}\n Errors: {}\n\nPackages:\n Installed: {}/{}",
app.source_dir.display(), app.source_dir.display(),
synced, pending, modified, errors, installed, app.packages.len() synced,
pending,
modified,
errors,
installed,
app.packages.len()
); );
let paragraph = Paragraph::new(text) let paragraph =
.block(Block::default().borders(Borders::ALL).title("Status")); Paragraph::new(text).block(Block::default().borders(Borders::ALL).title("Status"));
f.render_widget(paragraph, area); f.render_widget(paragraph, area);
} }
fn render_sudo_prompt(f: &mut Frame, area: ratatui::layout::Rect) { fn render_sudo_prompt(f: &mut Frame, area: ratatui::layout::Rect) {
let text = "Package installation requires sudo privileges.\n\nDo you want to enter your password?"; let text =
let paragraph = Paragraph::new(text) "Package installation requires sudo privileges.\n\nDo you want to enter your password?";
.block(Block::default().borders(Borders::ALL).title("Sudo Required")); let paragraph = Paragraph::new(text).block(
Block::default()
.borders(Borders::ALL)
.title("Sudo Required"),
);
f.render_widget(paragraph, area); f.render_widget(paragraph, area);
} }
fn render_password_input(f: &mut Frame, app: &App, area: ratatui::layout::Rect) { fn render_password_input(f: &mut Frame, app: &App, area: ratatui::layout::Rect) {
let masked: String = "*".repeat(app.password_input.len()); let masked: String = "*".repeat(app.password_input.len());
let text = format!("Password: {}_", masked); let text = format!("Password: {}_", masked);
let paragraph = Paragraph::new(text) let paragraph = Paragraph::new(text).block(
.block(Block::default().borders(Borders::ALL).title("Enter sudo password")); Block::default()
.borders(Borders::ALL)
.title("Enter sudo password"),
);
f.render_widget(paragraph, area); f.render_widget(paragraph, area);
} }
@ -837,7 +889,7 @@ fn copy_dir_recursive(src: &PathBuf, dst: &PathBuf) -> std::io::Result<()> {
let dst_path = dst.join(entry.file_name()); let dst_path = dst.join(entry.file_name());
if ty.is_dir() { if ty.is_dir() {
copy_dir_recursive(&src_path.into(), &dst_path)?; copy_dir_recursive(&src_path, &dst_path)?;
} else { } else {
std::fs::copy(&src_path, &dst_path)?; std::fs::copy(&src_path, &dst_path)?;
} }

View file

@ -118,9 +118,7 @@ fn main() -> anyhow::Result<()> {
Commands::Encrypt { file, recipient } => { Commands::Encrypt { file, recipient } => {
commands::encrypt::run(file, recipient, cli.verbose) commands::encrypt::run(file, recipient, cli.verbose)
} }
Commands::Decrypt { file, identity } => { Commands::Decrypt { file, identity } => commands::decrypt::run(file, identity, cli.verbose),
commands::decrypt::run(file, identity, cli.verbose)
}
Commands::Package { action } => match action { Commands::Package { action } => match action {
PackageAction::Install => commands::package::install(cli.config, cli.verbose), PackageAction::Install => commands::package::install(cli.config, cli.verbose),
PackageAction::Update => commands::package::update(cli.verbose), PackageAction::Update => commands::package::update(cli.verbose),

View file

@ -72,18 +72,29 @@ fn test_init_creates_structure() {
assert!(output.status.success(), "init failed: {:?}", output); assert!(output.status.success(), "init failed: {:?}", output);
assert!(sandbox.config_file().exists(), "config file not created"); assert!(sandbox.config_file().exists(), "config file not created");
assert!(sandbox.config_dir().join("config").exists(), "config dir not created"); assert!(
assert!(sandbox.state_dir().join("backups").exists(), "backups dir not created"); sandbox.config_dir().join("config").exists(),
assert!(sandbox.state_dir().join("snapshots").exists(), "snapshots dir not created"); "config dir not created"
);
assert!(
sandbox.state_dir().join("backups").exists(),
"backups dir not created"
);
assert!(
sandbox.state_dir().join("snapshots").exists(),
"snapshots dir not created"
);
} }
#[test] #[test]
fn test_check_valid_config() { fn test_check_valid_config() {
let sandbox = Sandbox::new("check-valid"); let sandbox = Sandbox::new("check-valid");
sandbox.write_config(r#" sandbox.write_config(
r#"
package: "ripgrep" package: "ripgrep"
package: "fd" package: "fd"
"#); "#,
);
let output = sandbox.run(&["check"]); let output = sandbox.run(&["check"]);
assert!(output.status.success(), "check failed: {:?}", output); assert!(output.status.success(), "check failed: {:?}", output);
@ -92,11 +103,13 @@ package: "fd"
#[test] #[test]
fn test_apply_dry_run() { fn test_apply_dry_run() {
let sandbox = Sandbox::new("apply-dry"); let sandbox = Sandbox::new("apply-dry");
sandbox.write_config(r#" sandbox.write_config(
r#"
dotfile: dotfile:
source = "config/test.conf" source = "config/test.conf"
target = "~/.config/test/test.conf" target = "~/.config/test/test.conf"
"#); "#,
);
sandbox.write_source("config/test.conf", "test content"); sandbox.write_source("config/test.conf", "test content");
let output = sandbox.run(&["apply", "-n"]); let output = sandbox.run(&["apply", "-n"]);
@ -109,12 +122,14 @@ dotfile:
#[test] #[test]
fn test_apply_creates_symlink() { fn test_apply_creates_symlink() {
let sandbox = Sandbox::new("apply-symlink"); let sandbox = Sandbox::new("apply-symlink");
sandbox.write_config(r#" sandbox.write_config(
r#"
dotfile: dotfile:
source = "config/app.conf" source = "config/app.conf"
target = "~/.config/app/app.conf" target = "~/.config/app/app.conf"
deploy = "link" deploy = "link"
"#); "#,
);
sandbox.write_source("config/app.conf", "app config content"); sandbox.write_source("config/app.conf", "app config content");
let output = sandbox.run(&["apply"]); let output = sandbox.run(&["apply"]);
@ -147,17 +162,22 @@ fn test_apply_unchanged_on_rerun() {
// Second apply should succeed (symlink already exists and points correctly) // Second apply should succeed (symlink already exists and points correctly)
let target = sandbox.path.join(".config/app/app.conf"); let target = sandbox.path.join(".config/app/app.conf");
assert!(target.is_symlink(), "target should still be symlink after second apply"); assert!(
target.is_symlink(),
"target should still be symlink after second apply"
);
} }
#[test] #[test]
fn test_apply_creates_copy() { fn test_apply_creates_copy() {
let sandbox = Sandbox::new("apply-copy"); let sandbox = Sandbox::new("apply-copy");
sandbox.write_config(r#" sandbox.write_config(
r#"
dotfile: dotfile:
source = "config/app.conf" source = "config/app.conf"
target = "~/.config/app/app.conf" target = "~/.config/app/app.conf"
"#); "#,
);
sandbox.write_source("config/app.conf", "app config content"); sandbox.write_source("config/app.conf", "app config content");
let output = sandbox.run(&["apply"]); let output = sandbox.run(&["apply"]);
@ -165,7 +185,10 @@ dotfile:
let target = sandbox.path.join(".config/app/app.conf"); let target = sandbox.path.join(".config/app/app.conf");
assert!(target.exists(), "target should exist"); assert!(target.exists(), "target should exist");
assert!(!target.is_symlink(), "target should be a copy, not a symlink"); assert!(
!target.is_symlink(),
"target should be a copy, not a symlink"
);
let content = std::fs::read_to_string(&target).unwrap(); let content = std::fs::read_to_string(&target).unwrap();
assert_eq!(content, "app config content", "content should match source"); assert_eq!(content, "app config content", "content should match source");
@ -193,11 +216,13 @@ fn test_apply_copy_unchanged_on_rerun() {
#[test] #[test]
fn test_status_shows_state() { fn test_status_shows_state() {
let sandbox = Sandbox::new("status"); let sandbox = Sandbox::new("status");
sandbox.write_config(r#" sandbox.write_config(
r#"
dotfile: dotfile:
source = "config/app.conf" source = "config/app.conf"
target = "~/.config/app/app.conf" target = "~/.config/app/app.conf"
"#); "#,
);
sandbox.write_source("config/app.conf", "content"); sandbox.write_source("config/app.conf", "content");
sandbox.run(&["apply"]); sandbox.run(&["apply"]);
@ -208,16 +233,22 @@ dotfile:
#[test] #[test]
fn test_snapshot_and_rollback() { fn test_snapshot_and_rollback() {
let sandbox = Sandbox::new("snapshot"); let sandbox = Sandbox::new("snapshot");
sandbox.write_config(r#" sandbox.write_config(
r#"
dotfile: dotfile:
source = "config/app.conf" source = "config/app.conf"
target = "~/.config/app/app.conf" target = "~/.config/app/app.conf"
"#); "#,
);
sandbox.write_source("config/app.conf", "v1"); sandbox.write_source("config/app.conf", "v1");
sandbox.run(&["apply"]); sandbox.run(&["apply"]);
let snap_output = sandbox.run(&["snapshot", "v1"]); let snap_output = sandbox.run(&["snapshot", "v1"]);
assert!(snap_output.status.success(), "snapshot failed: {:?}", snap_output); assert!(
snap_output.status.success(),
"snapshot failed: {:?}",
snap_output
);
let snapshot_file = sandbox.state_dir().join("snapshots/v1.json"); let snapshot_file = sandbox.state_dir().join("snapshots/v1.json");
assert!(snapshot_file.exists(), "snapshot file not created"); assert!(snapshot_file.exists(), "snapshot file not created");
@ -240,7 +271,10 @@ fn test_dotfile_with_when_condition() {
assert!(output.status.success(), "apply failed: {:?}", output); assert!(output.status.success(), "apply failed: {:?}", output);
let target = sandbox.path.join(".config/test.conf"); let target = sandbox.path.join(".config/test.conf");
assert!(target.exists(), "file should be deployed when condition is true"); assert!(
target.exists(),
"file should be deployed when condition is true"
);
} }
#[test] #[test]
@ -259,17 +293,22 @@ fn test_dotfile_when_false_skips() {
assert!(output.status.success(), "apply failed: {:?}", output); assert!(output.status.success(), "apply failed: {:?}", output);
let target = sandbox.path.join(".config/skip.conf"); let target = sandbox.path.join(".config/skip.conf");
assert!(!target.exists(), "file should NOT be deployed when condition is false"); assert!(
!target.exists(),
"file should NOT be deployed when condition is false"
);
} }
#[test] #[test]
fn test_diff_shows_changes() { fn test_diff_shows_changes() {
let sandbox = Sandbox::new("diff"); let sandbox = Sandbox::new("diff");
sandbox.write_config(r#" sandbox.write_config(
r#"
dotfile: dotfile:
source = "config/app.conf" source = "config/app.conf"
target = "~/.config/app/app.conf" target = "~/.config/app/app.conf"
"#); "#,
);
sandbox.write_source("config/app.conf", "new content"); sandbox.write_source("config/app.conf", "new content");
let target_dir = sandbox.path.join(".config/app"); let target_dir = sandbox.path.join(".config/app");

View file

@ -22,3 +22,5 @@ anyhow.workspace = true
hostname = "0.4" hostname = "0.4"
regex-lite = "0.1" regex-lite = "0.1"
glob = "0.3" glob = "0.3"
minijinja = { version = "2", features = ["builtins"] }
which = "7"

View file

@ -72,7 +72,7 @@ impl Config {
return PathBuf::from(doot_home).join(".local/state/doot"); return PathBuf::from(doot_home).join(".local/state/doot");
} }
dirs::state_dir() dirs::state_dir()
.or_else(|| dirs::data_local_dir()) .or_else(dirs::data_local_dir)
.unwrap_or_else(|| Self::home_dir().join(".local/state")) .unwrap_or_else(|| Self::home_dir().join(".local/state"))
.join("doot") .join("doot")
} }

View file

@ -5,8 +5,8 @@ pub mod linker;
pub mod template; pub mod template;
use crate::config::Config; use crate::config::Config;
use crate::state::store::DeployMode;
use crate::state::StateStore; use crate::state::StateStore;
use crate::state::store::DeployMode;
use doot_lang::evaluator::DotfileConfig; use doot_lang::evaluator::DotfileConfig;
use glob::Pattern; use glob::Pattern;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
@ -181,12 +181,11 @@ impl Deployer {
} }
// For files or link mode, handle as before // For files or link mode, handle as before
if target.exists() && !target.is_symlink() { if target.exists() && !target.is_symlink()
if !self.config.dry_run { && !self.config.dry_run {
self.backup_existing(target)?; self.backup_existing(target)?;
std::fs::remove_file(target)?; std::fs::remove_file(target)?;
} }
}
let action = if dotfile.template { let action = if dotfile.template {
self.deploy_template(&source, target)? self.deploy_template(&source, target)?
@ -198,18 +197,21 @@ impl Deployer {
}; };
// Set permissions if specified (only for copy mode, symlinks inherit from source) // Set permissions if specified (only for copy mode, symlinks inherit from source)
if !dotfile.permissions.is_empty() && deploy_mode == DeployMode::Copy && !self.config.dry_run { if !dotfile.permissions.is_empty()
&& deploy_mode == DeployMode::Copy
&& !self.config.dry_run
{
apply_permissions(target, &dotfile.permissions)?; apply_permissions(target, &dotfile.permissions)?;
} }
// Set owner if specified // Set owner if specified
if let Some(ref owner) = dotfile.owner { if let Some(ref owner) = dotfile.owner
if !self.config.dry_run { && !self.config.dry_run {
set_owner(target, owner)?; set_owner(target, owner)?;
} }
}
self.state.record_deployment(&source, target, deploy_mode); self.state
.record_deployment_with_template(&source, target, deploy_mode, dotfile.template);
Ok(DeployedFile { Ok(DeployedFile {
source: source.clone(), source: source.clone(),
@ -242,9 +244,7 @@ impl Deployer {
for (src_file, tgt_file, status) in changed_files { for (src_file, tgt_file, status) in changed_files {
match status { match status {
SyncStatus::NotDeployed SyncStatus::NotDeployed | SyncStatus::TargetMissing | SyncStatus::SourceChanged => {
| SyncStatus::TargetMissing
| SyncStatus::SourceChanged => {
// Copy from source to target // Copy from source to target
if !self.config.dry_run { if !self.config.dry_run {
if let Some(parent) = tgt_file.parent() { if let Some(parent) = tgt_file.parent() {
@ -264,12 +264,14 @@ impl Deployer {
any_updated = true; any_updated = true;
} }
self.state.record_deployment(&src_file, &tgt_file, deploy_mode); self.state
.record_deployment(&src_file, &tgt_file, deploy_mode);
} }
SyncStatus::TargetChanged => { SyncStatus::TargetChanged => {
// Target changed but source didn't - keep target, just update state // Target changed but source didn't - keep target, just update state
// This is like keeping local changes in git // This is like keeping local changes in git
self.state.record_deployment(&src_file, &tgt_file, deploy_mode); self.state
.record_deployment(&src_file, &tgt_file, deploy_mode);
} }
SyncStatus::Conflict => { SyncStatus::Conflict => {
// Real conflict - user already chose "use source" at directory level // Real conflict - user already chose "use source" at directory level
@ -284,7 +286,8 @@ impl Deployer {
} }
} }
any_updated = true; any_updated = true;
self.state.record_deployment(&src_file, &tgt_file, deploy_mode); self.state
.record_deployment(&src_file, &tgt_file, deploy_mode);
} }
SyncStatus::SourceMissing => { SyncStatus::SourceMissing => {
// File was deleted from source, remove from target // File was deleted from source, remove from target
@ -301,11 +304,10 @@ impl Deployer {
} }
// Set owner if specified (for entire directory) // Set owner if specified (for entire directory)
if let Some(ref owner) = dotfile.owner { if let Some(ref owner) = dotfile.owner
if !self.config.dry_run { && !self.config.dry_run {
set_owner(target, owner)?; set_owner(target, owner)?;
} }
}
// Also record the directory-level deployment for sync status checks // Also record the directory-level deployment for sync status checks
self.state.record_deployment(source, target, deploy_mode); self.state.record_deployment(source, target, deploy_mode);
@ -340,21 +342,19 @@ impl Deployer {
match base_mode { match base_mode {
DeployMode::Copy => { DeployMode::Copy => {
for pattern in &dotfile.link_patterns { for pattern in &dotfile.link_patterns {
if let Ok(p) = Pattern::new(pattern) { if let Ok(p) = Pattern::new(pattern)
if p.matches(&relative_path) { && p.matches(&relative_path) {
return DeployMode::Link; return DeployMode::Link;
} }
}
} }
DeployMode::Copy DeployMode::Copy
} }
DeployMode::Link => { DeployMode::Link => {
for pattern in &dotfile.copy_patterns { for pattern in &dotfile.copy_patterns {
if let Ok(p) = Pattern::new(pattern) { if let Ok(p) = Pattern::new(pattern)
if p.matches(&relative_path) { && p.matches(&relative_path) {
return DeployMode::Copy; return DeployMode::Copy;
} }
}
} }
DeployMode::Link DeployMode::Link
} }
@ -523,12 +523,11 @@ fn apply_permissions_recursive(
break; break;
} }
PermissionRule::Pattern { pattern, mode } => { PermissionRule::Pattern { pattern, mode } => {
if let Ok(p) = Pattern::new(pattern) { if let Ok(p) = Pattern::new(pattern)
if p.matches(&relative) { && p.matches(&relative) {
set_file_permissions(&path, *mode)?; set_file_permissions(&path, *mode)?;
break; break;
} }
}
} }
} }
} }
@ -577,7 +576,10 @@ fn set_owner(path: &Path, owner: &str) -> Result<(), DeployError> {
if !sudo_output.status.success() { if !sudo_output.status.success() {
let stderr = String::from_utf8_lossy(&sudo_output.stderr); let stderr = String::from_utf8_lossy(&sudo_output.stderr);
return Err(DeployError::ChownFailed(path.to_path_buf(), stderr.to_string())); return Err(DeployError::ChownFailed(
path.to_path_buf(),
stderr.to_string(),
));
} }
Ok(()) Ok(())

View file

@ -1,68 +1,47 @@
//! Template rendering for dotfiles. //! Template rendering for dotfiles using MiniJinja.
use minijinja::{Environment, Value};
use std::collections::HashMap; use std::collections::HashMap;
use std::path::PathBuf;
/// Renders templates with variable substitution. /// Renders templates with Jinja2-style syntax.
pub struct TemplateEngine { pub struct TemplateEngine {
variables: HashMap<String, String>, env: Environment<'static>,
variables: HashMap<String, Value>,
} }
impl TemplateEngine { impl TemplateEngine {
/// Creates a new engine with default variables. /// Creates a new engine with default variables and functions.
pub fn new() -> Self { pub fn new() -> Self {
let mut variables = HashMap::new(); let mut env = Environment::new();
if let Some(home) = dirs::home_dir() { // Register custom functions
variables.insert("home".to_string(), home.display().to_string()); register_functions(&mut env);
}
if let Some(config) = dirs::config_dir() {
variables.insert("config_dir".to_string(), config.display().to_string());
}
if let Some(data) = dirs::data_dir() {
variables.insert("data_dir".to_string(), data.display().to_string());
}
if let Some(cache) = dirs::cache_dir() {
variables.insert("cache_dir".to_string(), cache.display().to_string());
}
variables.insert("os".to_string(), std::env::consts::OS.to_string()); // Build default variables
variables.insert("arch".to_string(), std::env::consts::ARCH.to_string()); let variables = build_default_variables();
if let Ok(hostname) = hostname::get() { Self { env, variables }
variables.insert("hostname".to_string(), hostname.to_string_lossy().to_string());
}
for (key, value) in std::env::vars() {
variables.insert(format!("env.{}", key), value);
}
Self { variables }
} }
/// Sets a template variable. /// Sets a template variable.
pub fn set_variable(&mut self, key: String, value: String) { pub fn set_variable(&mut self, key: String, value: impl Into<Value>) {
self.variables.insert(key, value); self.variables.insert(key, value.into());
} }
/// Renders a template string. /// Renders a template string.
pub fn render(&self, template: &str) -> Result<String, String> { pub fn render(&self, template: &str) -> Result<String, String> {
let mut result = template.to_string(); // Add template to environment
let tmpl = self
.env
.template_from_str(template)
.map_err(|e| format!("template parse error: {}", e))?;
for (key, value) in &self.variables { // Build context from variables
result = result.replace(&format!("{{{{ {} }}}}", key), value); let ctx = Value::from_iter(self.variables.clone());
result = result.replace(&format!("{{{{{}}}}}", key), value);
}
let re = regex_lite::Regex::new(r"\{\{[^}]+\}\}").unwrap(); tmpl.render(ctx)
if re.is_match(&result) { .map_err(|e| format!("template render error: {}", e))
let unresolved: Vec<&str> = re.find_iter(&result).map(|m| m.as_str()).collect();
return Err(format!(
"unresolved template variables: {}",
unresolved.join(", ")
));
}
Ok(result)
} }
} }
@ -71,3 +50,532 @@ impl Default for TemplateEngine {
Self::new() Self::new()
} }
} }
/// Builds the default template variables.
fn build_default_variables() -> HashMap<String, Value> {
let mut vars = HashMap::new();
// Directory paths
if let Some(home) = dirs::home_dir() {
vars.insert("home".to_string(), Value::from(home.display().to_string()));
}
if let Some(config) = dirs::config_dir() {
vars.insert(
"config_dir".to_string(),
Value::from(config.display().to_string()),
);
}
if let Some(data) = dirs::data_dir() {
vars.insert(
"data_dir".to_string(),
Value::from(data.display().to_string()),
);
}
if let Some(cache) = dirs::cache_dir() {
vars.insert(
"cache_dir".to_string(),
Value::from(cache.display().to_string()),
);
}
// System info
vars.insert("os".to_string(), Value::from(std::env::consts::OS));
vars.insert("arch".to_string(), Value::from(std::env::consts::ARCH));
if let Ok(hostname) = hostname::get() {
vars.insert(
"hostname".to_string(),
Value::from(hostname.to_string_lossy().to_string()),
);
}
// Detect Linux distro
if std::env::consts::OS == "linux"
&& let Some(distro) = detect_distro() {
vars.insert("distro".to_string(), Value::from(distro));
}
// Environment variables as a nested object
let env_vars: HashMap<String, Value> =
std::env::vars().map(|(k, v)| (k, Value::from(v))).collect();
vars.insert("env".to_string(), Value::from_iter(env_vars));
vars
}
/// Registers custom functions available in templates.
fn register_functions(env: &mut Environment<'static>) {
// ===== File System Functions =====
// file_exists(path) - check if a file or directory exists
env.add_function("file_exists", |path: String| -> bool {
std::path::Path::new(&expand_path(&path)).exists()
});
// dir_exists(path) - check if a directory exists
env.add_function("dir_exists", |path: String| -> bool {
std::path::Path::new(&expand_path(&path)).is_dir()
});
// is_symlink(path) - check if path is a symlink
env.add_function("is_symlink", |path: String| -> bool {
std::path::Path::new(&expand_path(&path)).is_symlink()
});
// read_link(path) - get symlink target
env.add_function("read_link", |path: String| -> String {
std::fs::read_link(expand_path(&path))
.map(|p| p.display().to_string())
.unwrap_or_default()
});
// read_file(path) - read file contents (returns empty string on error)
env.add_function("read_file", |path: String| -> String {
std::fs::read_to_string(expand_path(&path)).unwrap_or_default()
});
// read_file_lines(path) - read file as list of lines
env.add_function("read_file_lines", |path: String| -> Vec<String> {
std::fs::read_to_string(expand_path(&path))
.map(|s| s.lines().map(|l| l.to_string()).collect())
.unwrap_or_default()
});
// list_dir(path) - list directory contents
env.add_function("list_dir", |path: String| -> Vec<String> {
std::fs::read_dir(expand_path(&path))
.map(|entries| {
entries
.filter_map(|e| e.ok())
.map(|e| e.path().display().to_string())
.collect()
})
.unwrap_or_default()
});
// glob(pattern) - find files matching glob pattern
env.add_function("glob", |pattern: String| -> Vec<String> {
glob::glob(&pattern)
.map(|paths| {
paths
.filter_map(|p| p.ok())
.map(|p| p.display().to_string())
.collect()
})
.unwrap_or_default()
});
// temp_dir() - get temp directory path
env.add_function("temp_dir", || -> String {
std::env::temp_dir().display().to_string()
});
// ===== Path Functions =====
// path_join(a, b, ...) - join path components
env.add_function("path_join", |parts: Vec<String>| -> String {
let mut result = PathBuf::new();
for part in parts {
result.push(part);
}
result.display().to_string()
});
// path_parent(path) - get parent directory
env.add_function("path_parent", |path: String| -> String {
std::path::Path::new(&path)
.parent()
.map(|p| p.display().to_string())
.unwrap_or_default()
});
// path_filename(path) - get filename component
env.add_function("path_filename", |path: String| -> String {
std::path::Path::new(&path)
.file_name()
.map(|s| s.to_string_lossy().to_string())
.unwrap_or_default()
});
// path_extension(path) - get file extension
env.add_function("path_extension", |path: String| -> String {
std::path::Path::new(&path)
.extension()
.map(|s| s.to_string_lossy().to_string())
.unwrap_or_default()
});
// config_path(app) - get config directory for an app
env.add_function("config_path", |app: String| -> String {
dirs::config_dir()
.map(|p| p.join(&app).display().to_string())
.unwrap_or_default()
});
// ===== Command/Process Functions =====
// command_exists(cmd) - check if a command is available in PATH
env.add_function("command_exists", |cmd: String| -> bool {
which::which(&cmd).is_ok()
});
// which(cmd) - get full path to command (empty string if not found)
env.add_function("which", |cmd: String| -> String {
which::which(&cmd)
.map(|p| p.display().to_string())
.unwrap_or_default()
});
// shell(cmd) - execute a shell command and return output (trimmed)
env.add_function("shell", |cmd: String| -> String {
std::process::Command::new("sh")
.arg("-c")
.arg(&cmd)
.output()
.map(|o| String::from_utf8_lossy(&o.stdout).trim().to_string())
.unwrap_or_default()
});
// shell_status(cmd) - execute command and return exit status
env.add_function("shell_status", |cmd: String| -> i32 {
std::process::Command::new("sh")
.arg("-c")
.arg(&cmd)
.status()
.map(|s| s.code().unwrap_or(-1))
.unwrap_or(-1)
});
// ===== Environment Functions =====
// get_env(name, default?) - get environment variable with optional default
env.add_function(
"get_env",
|name: String, default: Option<String>| -> String {
std::env::var(&name).unwrap_or_else(|_| default.unwrap_or_default())
},
);
// ===== OS Detection Functions =====
// is_macos() - convenience check
env.add_function("is_macos", || -> bool { std::env::consts::OS == "macos" });
// is_linux() - convenience check
env.add_function("is_linux", || -> bool { std::env::consts::OS == "linux" });
// is_windows() - convenience check
env.add_function("is_windows", || -> bool {
std::env::consts::OS == "windows"
});
// is_arch() - check if running Arch Linux
env.add_function("is_arch", || -> bool {
detect_distro().map(|d| d == "arch").unwrap_or(false)
});
// is_ubuntu() - check if running Ubuntu
env.add_function("is_ubuntu", || -> bool {
detect_distro().map(|d| d == "ubuntu").unwrap_or(false)
});
// is_fedora() - check if running Fedora
env.add_function("is_fedora", || -> bool {
detect_distro().map(|d| d == "fedora").unwrap_or(false)
});
// is_debian() - check if running Debian
env.add_function("is_debian", || -> bool {
detect_distro().map(|d| d == "debian").unwrap_or(false)
});
// is_nixos() - check if running NixOS
env.add_function("is_nixos", || -> bool {
detect_distro().map(|d| d == "nixos").unwrap_or(false)
});
// is_nix() - check if nix package manager is available
env.add_function("is_nix", || -> bool {
// Check for nix command or /nix directory
which::which("nix").is_ok() || std::path::Path::new("/nix").exists()
});
// hostname_matches(pattern) - check if hostname matches a pattern
env.add_function("hostname_matches", |pattern: String| -> bool {
if let Ok(hostname) = hostname::get() {
let hostname = hostname.to_string_lossy();
if pattern.contains('*') {
// Simple glob matching
let regex_pattern = format!("^{}$", pattern.replace('*', ".*"));
regex_lite::Regex::new(&regex_pattern)
.map(|re| re.is_match(&hostname))
.unwrap_or(false)
} else {
hostname == pattern
}
} else {
false
}
});
// ===== Parsing Functions =====
// from_json(str) - parse JSON string into object
env.add_function("from_json", |s: String| -> Value {
serde_json::from_str::<serde_json::Value>(&s)
.map(|v| json_to_minijinja(&v))
.unwrap_or(Value::UNDEFINED)
});
// from_toml(str) - parse TOML string into object
env.add_function("from_toml", |s: String| -> Value {
toml::from_str::<toml::Value>(&s)
.map(|v| toml_to_minijinja(&v))
.unwrap_or(Value::UNDEFINED)
});
// ===== Crypto Functions =====
// hash_str(str) - hash a string using BLAKE3
env.add_function("hash_str", |s: String| -> String {
blake3::hash(s.as_bytes()).to_hex().to_string()
});
// hash_file(path) - hash a file using BLAKE3
env.add_function("hash_file", |path: String| -> String {
std::fs::read(expand_path(&path))
.map(|content| blake3::hash(&content).to_hex().to_string())
.unwrap_or_default()
});
// ===== String Functions =====
// starts_with(str, prefix) - check if string starts with prefix
env.add_function("starts_with", |s: String, prefix: String| -> bool {
s.starts_with(&prefix)
});
// ends_with(str, suffix) - check if string ends with suffix
env.add_function("ends_with", |s: String, suffix: String| -> bool {
s.ends_with(&suffix)
});
// contains(str, needle) - check if string contains needle
env.add_function("contains", |s: String, needle: String| -> bool {
s.contains(&needle)
});
}
/// Expands ~ to home directory in paths.
fn expand_path(s: &str) -> PathBuf {
if let Some(stripped) = s.strip_prefix('~') {
let home = dirs::home_dir().unwrap_or_default();
home.join(stripped.strip_prefix('/').unwrap_or(stripped))
} else {
PathBuf::from(s)
}
}
/// Detects the Linux distribution.
fn detect_distro() -> Option<String> {
if std::env::consts::OS != "linux" {
return None;
}
// Try /etc/os-release first
if let Ok(content) = std::fs::read_to_string("/etc/os-release") {
for line in content.lines() {
if let Some(id) = line.strip_prefix("ID=") {
return Some(id.trim_matches('"').to_string());
}
}
}
None
}
/// Converts serde_json::Value to minijinja::Value.
fn json_to_minijinja(json: &serde_json::Value) -> Value {
match json {
serde_json::Value::Null => Value::from(()),
serde_json::Value::Bool(b) => Value::from(*b),
serde_json::Value::Number(n) => {
if let Some(i) = n.as_i64() {
Value::from(i)
} else if let Some(f) = n.as_f64() {
Value::from(f)
} else {
Value::from(())
}
}
serde_json::Value::String(s) => Value::from(s.clone()),
serde_json::Value::Array(arr) => {
Value::from(arr.iter().map(json_to_minijinja).collect::<Vec<_>>())
}
serde_json::Value::Object(obj) => {
let map: std::collections::BTreeMap<String, Value> = obj
.iter()
.map(|(k, v)| (k.clone(), json_to_minijinja(v)))
.collect();
Value::from_iter(map)
}
}
}
/// Converts toml::Value to minijinja::Value.
fn toml_to_minijinja(toml: &toml::Value) -> Value {
match toml {
toml::Value::Boolean(b) => Value::from(*b),
toml::Value::Integer(i) => Value::from(*i),
toml::Value::Float(f) => Value::from(*f),
toml::Value::String(s) => Value::from(s.clone()),
toml::Value::Array(arr) => {
Value::from(arr.iter().map(toml_to_minijinja).collect::<Vec<_>>())
}
toml::Value::Table(table) => {
let map: std::collections::BTreeMap<String, Value> = table
.iter()
.map(|(k, v)| (k.clone(), toml_to_minijinja(v)))
.collect();
Value::from_iter(map)
}
toml::Value::Datetime(dt) => Value::from(dt.to_string()),
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_simple_variable() {
let engine = TemplateEngine::new();
let result = engine.render("Hello {{ os }}!").unwrap();
assert!(result.contains("linux") || result.contains("macos") || result.contains("windows"));
}
#[test]
fn test_if_else() {
let engine = TemplateEngine::new();
let template = r#"{% if os == "linux" %}Linux{% else %}Other{% endif %}"#;
let result = engine.render(template).unwrap();
if std::env::consts::OS == "linux" {
assert_eq!(result, "Linux");
} else {
assert_eq!(result, "Other");
}
}
#[test]
fn test_command_exists() {
let engine = TemplateEngine::new();
// 'sh' should exist on all Unix systems
let template = r#"{% if command_exists("sh") %}yes{% else %}no{% endif %}"#;
let result = engine.render(template).unwrap();
assert_eq!(result, "yes");
}
#[test]
fn test_which() {
let engine = TemplateEngine::new();
let result = engine.render("{{ which('sh') }}").unwrap();
assert!(result.contains("sh"));
}
#[test]
fn test_env_access() {
// SAFETY: This is a test running in isolation
unsafe { std::env::set_var("DOOT_TEST_VAR", "test_value") };
let engine = TemplateEngine::new();
let result = engine.render("{{ env.DOOT_TEST_VAR }}").unwrap();
assert_eq!(result, "test_value");
}
#[test]
fn test_filters() {
let engine = TemplateEngine::new();
let result = engine.render("{{ 'hello' | upper }}").unwrap();
assert_eq!(result, "HELLO");
}
#[test]
fn test_for_loop() {
let mut engine = TemplateEngine::new();
engine.set_variable("items".to_string(), Value::from(vec!["a", "b", "c"]));
let result = engine
.render("{% for i in items %}{{ i }}{% endfor %}")
.unwrap();
assert_eq!(result, "abc");
}
#[test]
fn test_is_linux_function() {
let engine = TemplateEngine::new();
let result = engine
.render("{% if is_linux() %}yes{% else %}no{% endif %}")
.unwrap();
if std::env::consts::OS == "linux" {
assert_eq!(result, "yes");
} else {
assert_eq!(result, "no");
}
}
#[test]
fn test_default_filter() {
let engine = TemplateEngine::new();
let result = engine
.render("{{ undefined_var | default('fallback') }}")
.unwrap();
assert_eq!(result, "fallback");
}
#[test]
fn test_path_functions() {
let engine = TemplateEngine::new();
let result = engine
.render("{{ path_filename('/foo/bar.txt') }}")
.unwrap();
assert_eq!(result, "bar.txt");
let result = engine
.render("{{ path_extension('/foo/bar.txt') }}")
.unwrap();
assert_eq!(result, "txt");
let result = engine.render("{{ path_parent('/foo/bar.txt') }}").unwrap();
assert_eq!(result, "/foo");
}
#[test]
fn test_hash_str() {
let engine = TemplateEngine::new();
let result = engine.render("{{ hash_str('hello') }}").unwrap();
// BLAKE3 hash of "hello"
assert_eq!(result.len(), 64); // 32 bytes = 64 hex chars
}
#[test]
fn test_from_json() {
let engine = TemplateEngine::new();
let template = r#"{% set data = from_json('{"name": "test", "count": 42}') %}{{ data.name }}-{{ data.count }}"#;
let result = engine.render(template).unwrap();
assert_eq!(result, "test-42");
}
#[test]
fn test_starts_ends_with() {
let engine = TemplateEngine::new();
let result = engine
.render("{% if starts_with('hello', 'he') %}yes{% endif %}")
.unwrap();
assert_eq!(result, "yes");
let result = engine
.render("{% if ends_with('hello', 'lo') %}yes{% endif %}")
.unwrap();
assert_eq!(result, "yes");
}
}

View file

@ -91,10 +91,9 @@ impl AgeEncryption {
/// Decrypts data using the configured identity. /// Decrypts data using the configured identity.
pub fn decrypt(&self, data: &[u8]) -> Result<Vec<u8>, EncryptionError> { pub fn decrypt(&self, data: &[u8]) -> Result<Vec<u8>, EncryptionError> {
let identity = self let identity = self.identity.as_ref().ok_or_else(|| {
.identity EncryptionError::DecryptionFailed("no identity configured".to_string())
.as_ref() })?;
.ok_or_else(|| EncryptionError::DecryptionFailed("no identity configured".to_string()))?;
let decryptor = match age::Decryptor::new(data) let decryptor = match age::Decryptor::new(data)
.map_err(|e| EncryptionError::DecryptionFailed(e.to_string()))? .map_err(|e| EncryptionError::DecryptionFailed(e.to_string()))?
@ -103,7 +102,7 @@ impl AgeEncryption {
_ => { _ => {
return Err(EncryptionError::DecryptionFailed( return Err(EncryptionError::DecryptionFailed(
"unexpected decryptor type".to_string(), "unexpected decryptor type".to_string(),
)) ));
} }
}; };

View file

@ -12,7 +12,7 @@ pub mod package;
pub mod state; pub mod state;
pub use config::Config; pub use config::Config;
pub use deploy::{Deployer, DeployResult}; pub use deploy::{DeployResult, Deployer};
pub use encryption::AgeEncryption; pub use encryption::AgeEncryption;
pub use hooks::HookRunner; pub use hooks::HookRunner;
pub use os::OsInfo; pub use os::OsInfo;

View file

@ -135,7 +135,9 @@ fn command_exists(cmd: &str) -> bool {
} else { } else {
// Fallback to hardcoded paths // Fallback to hardcoded paths
let paths = ["/usr/bin/", "/usr/local/bin/", "/bin/"]; let paths = ["/usr/bin/", "/usr/local/bin/", "/bin/"];
paths.iter().any(|p| std::path::Path::new(&format!("{}{}", p, cmd)).exists()) paths
.iter()
.any(|p| std::path::Path::new(&format!("{}{}", p, cmd)).exists())
}; };
cache.insert(cmd.to_string(), exists); cache.insert(cmd.to_string(), exists);

View file

@ -33,10 +33,7 @@ impl Apt {
} }
let output = if self.use_sudo { let output = if self.use_sudo {
Command::new("sudo") Command::new("sudo").arg("apt").args(args).output()?
.arg("apt")
.args(args)
.output()?
} else { } else {
Command::new("apt").args(args).output()? Command::new("apt").args(args).output()?
}; };
@ -136,9 +133,7 @@ impl PackageManager for Apt {
} }
fn is_installed(&self, package: &str) -> Result<bool, PackageError> { fn is_installed(&self, package: &str) -> Result<bool, PackageError> {
let output = Command::new("dpkg") let output = Command::new("dpkg").args(["-s", package]).output()?;
.args(["-s", package])
.output()?;
Ok(output.status.success()) Ok(output.status.success())
} }

View file

@ -82,9 +82,7 @@ impl PackageManager for Brew {
} }
fn is_installed(&self, package: &str) -> Result<bool, PackageError> { fn is_installed(&self, package: &str) -> Result<bool, PackageError> {
let output = Command::new("brew") let output = Command::new("brew").args(["list", package]).output()?;
.args(["list", package])
.output()?;
Ok(output.status.success()) Ok(output.status.success())
} }

View file

@ -62,7 +62,9 @@ pub trait PackageManager: Send + Sync {
/// Returns true if running in test mode (DOOT_TEST_MODE=1) /// Returns true if running in test mode (DOOT_TEST_MODE=1)
pub fn is_test_mode() -> bool { pub fn is_test_mode() -> bool {
std::env::var("DOOT_TEST_MODE").map(|v| v == "1").unwrap_or(false) std::env::var("DOOT_TEST_MODE")
.map(|v| v == "1")
.unwrap_or(false)
} }
/// Mock package manager for testing - doesn't actually install anything /// Mock package manager for testing - doesn't actually install anything

View file

@ -33,10 +33,7 @@ impl Pacman {
} }
let output = if self.use_sudo { let output = if self.use_sudo {
Command::new("sudo") Command::new("sudo").arg("pacman").args(args).output()?
.arg("pacman")
.args(args)
.output()?
} else { } else {
Command::new("pacman").args(args).output()? Command::new("pacman").args(args).output()?
}; };
@ -136,9 +133,7 @@ impl PackageManager for Pacman {
} }
fn is_installed(&self, package: &str) -> Result<bool, PackageError> { fn is_installed(&self, package: &str) -> Result<bool, PackageError> {
let output = Command::new("pacman") let output = Command::new("pacman").args(["-Q", package]).output()?;
.args(["-Q", package])
.output()?;
Ok(output.status.success()) Ok(output.status.success())
} }

View file

@ -84,9 +84,7 @@ impl PackageManager for Yay {
} }
fn is_installed(&self, package: &str) -> Result<bool, PackageError> { fn is_installed(&self, package: &str) -> Result<bool, PackageError> {
let output = Command::new("yay") let output = Command::new("yay").args(["-Q", package]).output()?;
.args(["-Q", package])
.output()?;
Ok(output.status.success()) Ok(output.status.success())
} }

View file

@ -56,11 +56,10 @@ impl Snapshot {
for entry in std::fs::read_dir(snapshot_dir)? { for entry in std::fs::read_dir(snapshot_dir)? {
let entry = entry?; let entry = entry?;
let path = entry.path(); let path = entry.path();
if path.extension().map(|e| e == "json").unwrap_or(false) { if path.extension().map(|e| e == "json").unwrap_or(false)
if let Some(name) = path.file_stem() { && let Some(name) = path.file_stem() {
snapshots.push(name.to_string_lossy().to_string()); snapshots.push(name.to_string_lossy().to_string());
} }
}
} }
snapshots.sort(); snapshots.sort();

View file

@ -41,6 +41,9 @@ pub struct DeploymentRecord {
pub target_hash: String, pub target_hash: String,
pub deployed_at: String, pub deployed_at: String,
pub mode: DeployMode, pub mode: DeployMode,
/// Whether this file was deployed as a template (source != target content).
#[serde(default)]
pub template: bool,
} }
/// Sync status after comparing current hashes with recorded state. /// Sync status after comparing current hashes with recorded state.
@ -91,6 +94,17 @@ impl StateStore {
/// Records a deployment with both source and target hashes. /// Records a deployment with both source and target hashes.
pub fn record_deployment(&mut self, source: &Path, target: &Path, mode: DeployMode) { pub fn record_deployment(&mut self, source: &Path, target: &Path, mode: DeployMode) {
self.record_deployment_with_template(source, target, mode, false);
}
/// Records a deployment with template flag.
pub fn record_deployment_with_template(
&mut self,
source: &Path,
target: &Path,
mode: DeployMode,
template: bool,
) {
let source_hash = hash_path(source); let source_hash = hash_path(source);
let target_hash = hash_path(target); let target_hash = hash_path(target);
@ -101,6 +115,7 @@ impl StateStore {
target_hash, target_hash,
deployed_at: chrono_now(), deployed_at: chrono_now(),
mode, mode,
template,
}; };
self.state self.state
@ -111,10 +126,43 @@ impl StateStore {
/// Checks sync status by comparing current hashes with recorded state. /// Checks sync status by comparing current hashes with recorded state.
pub fn check_sync_status(&self, source: &Path, target: &Path) -> SyncStatus { pub fn check_sync_status(&self, source: &Path, target: &Path) -> SyncStatus {
self.check_sync_status_with_config(source, target, None, None)
}
/// Checks sync status, also detecting if template flag changed in config.
pub fn check_sync_status_with_template(
&self,
source: &Path,
target: &Path,
current_template: Option<bool>,
) -> SyncStatus {
self.check_sync_status_with_config(source, target, current_template, None)
}
/// Checks sync status, also detecting if config flags changed.
pub fn check_sync_status_with_config(
&self,
source: &Path,
target: &Path,
current_template: Option<bool>,
current_mode: Option<DeployMode>,
) -> SyncStatus {
let Some(record) = self.get_deployment(target) else { let Some(record) = self.get_deployment(target) else {
return SyncStatus::NotDeployed; return SyncStatus::NotDeployed;
}; };
// If template flag changed in config, force re-deploy
if let Some(is_template) = current_template
&& is_template != record.template {
return SyncStatus::SourceChanged;
}
// If deploy mode changed in config, force re-deploy
if let Some(mode) = current_mode
&& mode != record.mode {
return SyncStatus::SourceChanged;
}
if !source.exists() { if !source.exists() {
return SyncStatus::SourceMissing; return SyncStatus::SourceMissing;
} }
@ -124,6 +172,11 @@ impl StateStore {
// If stored hashes are empty (legacy record), treat as needing re-sync // If stored hashes are empty (legacy record), treat as needing re-sync
if record.source_hash.is_empty() || record.target_hash.is_empty() { if record.source_hash.is_empty() || record.target_hash.is_empty() {
// For templates, we can't compare source to target directly
if record.template {
return SyncStatus::SourceChanged;
}
let current_source_hash = hash_path(source); let current_source_hash = hash_path(source);
let current_target_hash = hash_path(target); let current_target_hash = hash_path(target);

View file

@ -36,7 +36,7 @@ pub enum Statement {
TypeAlias(TypeAlias), TypeAlias(TypeAlias),
Import(Import), Import(Import),
Dotfile(Dotfile), Dotfile(Dotfile),
Package(Package), Package(Box<Package>),
Secret(Secret), Secret(Secret),
Hook(Hook), Hook(Hook),
MacroDecl(MacroDecl), MacroDecl(MacroDecl),

View file

@ -11,7 +11,11 @@ pub fn race(args: &[Value]) -> Result<Value, EvalError> {
pub fn fetch(args: &[Value]) -> Result<Value, EvalError> { pub fn fetch(args: &[Value]) -> Result<Value, EvalError> {
let url = match args.first() { let url = match args.first() {
Some(Value::Str(s)) => s, Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("fetch expects a URL string".to_string())), _ => {
return Err(EvalError::TypeError(
"fetch expects a URL string".to_string(),
));
}
}; };
smol::block_on(async { smol::block_on(async {
@ -31,7 +35,11 @@ pub fn fetch(args: &[Value]) -> Result<Value, EvalError> {
pub fn fetch_json(args: &[Value]) -> Result<Value, EvalError> { pub fn fetch_json(args: &[Value]) -> Result<Value, EvalError> {
let url = match args.first() { let url = match args.first() {
Some(Value::Str(s)) => s, Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("fetch_json expects a URL string".to_string())), _ => {
return Err(EvalError::TypeError(
"fetch_json expects a URL string".to_string(),
));
}
}; };
smol::block_on(async { smol::block_on(async {
@ -51,7 +59,11 @@ pub fn fetch_json(args: &[Value]) -> Result<Value, EvalError> {
pub fn fetch_bytes(args: &[Value]) -> Result<Value, EvalError> { pub fn fetch_bytes(args: &[Value]) -> Result<Value, EvalError> {
let url = match args.first() { let url = match args.first() {
Some(Value::Str(s)) => s, Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("fetch_bytes expects a URL string".to_string())), _ => {
return Err(EvalError::TypeError(
"fetch_bytes expects a URL string".to_string(),
));
}
}; };
smol::block_on(async { smol::block_on(async {
@ -72,7 +84,11 @@ pub fn fetch_bytes(args: &[Value]) -> Result<Value, EvalError> {
pub fn post(args: &[Value]) -> Result<Value, EvalError> { pub fn post(args: &[Value]) -> Result<Value, EvalError> {
let url = match args.first() { let url = match args.first() {
Some(Value::Str(s)) => s, Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("post expects a URL string".to_string())), _ => {
return Err(EvalError::TypeError(
"post expects a URL string".to_string(),
));
}
}; };
let body = match args.get(1) { let body = match args.get(1) {
@ -98,7 +114,11 @@ pub fn post(args: &[Value]) -> Result<Value, EvalError> {
pub fn post_json(args: &[Value]) -> Result<Value, EvalError> { pub fn post_json(args: &[Value]) -> Result<Value, EvalError> {
let url = match args.first() { let url = match args.first() {
Some(Value::Str(s)) => s, Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("post_json expects a URL string".to_string())), _ => {
return Err(EvalError::TypeError(
"post_json expects a URL string".to_string(),
));
}
}; };
let data = args.get(1).unwrap_or(&Value::None); let data = args.get(1).unwrap_or(&Value::None);
@ -123,13 +143,21 @@ pub fn post_json(args: &[Value]) -> Result<Value, EvalError> {
pub fn download(args: &[Value]) -> Result<Value, EvalError> { pub fn download(args: &[Value]) -> Result<Value, EvalError> {
let url = match args.first() { let url = match args.first() {
Some(Value::Str(s)) => s, Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("download expects a URL string".to_string())), _ => {
return Err(EvalError::TypeError(
"download expects a URL string".to_string(),
));
}
}; };
let path = match args.get(1) { let path = match args.get(1) {
Some(Value::Path(p)) => p.clone(), Some(Value::Path(p)) => p.clone(),
Some(Value::Str(s)) => std::path::PathBuf::from(s), Some(Value::Str(s)) => std::path::PathBuf::from(s),
_ => return Err(EvalError::TypeError("download requires destination path".to_string())), _ => {
return Err(EvalError::TypeError(
"download requires destination path".to_string(),
));
}
}; };
smol::block_on(async { smol::block_on(async {
@ -161,9 +189,7 @@ fn json_to_value(json: &serde_json::Value) -> Value {
} }
} }
serde_json::Value::String(s) => Value::Str(s.clone()), serde_json::Value::String(s) => Value::Str(s.clone()),
serde_json::Value::Array(arr) => { serde_json::Value::Array(arr) => Value::List(arr.iter().map(json_to_value).collect()),
Value::List(arr.iter().map(json_to_value).collect())
}
serde_json::Value::Object(obj) => { serde_json::Value::Object(obj) => {
let fields: indexmap::IndexMap<String, Value> = obj let fields: indexmap::IndexMap<String, Value> = obj
.iter() .iter()
@ -183,9 +209,7 @@ fn value_to_json(val: &Value) -> serde_json::Value {
Value::Str(s) => serde_json::Value::String(s.clone()), Value::Str(s) => serde_json::Value::String(s.clone()),
Value::Bool(b) => serde_json::Value::Bool(*b), Value::Bool(b) => serde_json::Value::Bool(*b),
Value::Path(p) => serde_json::Value::String(p.display().to_string()), Value::Path(p) => serde_json::Value::String(p.display().to_string()),
Value::List(items) => { Value::List(items) => serde_json::Value::Array(items.iter().map(value_to_json).collect()),
serde_json::Value::Array(items.iter().map(value_to_json).collect())
}
Value::Struct(_, fields) => { Value::Struct(_, fields) => {
let map: serde_json::Map<String, serde_json::Value> = fields let map: serde_json::Map<String, serde_json::Value> = fields
.iter() .iter()

View file

@ -4,8 +4,17 @@ use crate::evaluator::{EvalError, Evaluator, Value};
pub fn map(eval: &mut Evaluator, args: &[Value], _arg_exprs: &[Expr]) -> Result<Value, EvalError> { pub fn map(eval: &mut Evaluator, args: &[Value], _arg_exprs: &[Expr]) -> Result<Value, EvalError> {
let list = match args.first() { let list = match args.first() {
Some(Value::List(items)) => items.clone(), Some(Value::List(items)) => items.clone(),
Some(v) => return Err(EvalError::TypeError(format!("map expects list, got {}", v.type_name()))), Some(v) => {
None => return Err(EvalError::TypeError("map requires a list argument".to_string())), return Err(EvalError::TypeError(format!(
"map expects list, got {}",
v.type_name()
)));
}
None => {
return Err(EvalError::TypeError(
"map requires a list argument".to_string(),
));
}
}; };
match args.get(1) { match args.get(1) {
@ -34,11 +43,24 @@ pub fn map(eval: &mut Evaluator, args: &[Value], _arg_exprs: &[Expr]) -> Result<
} }
} }
pub fn filter(eval: &mut Evaluator, args: &[Value], _arg_exprs: &[Expr]) -> Result<Value, EvalError> { pub fn filter(
eval: &mut Evaluator,
args: &[Value],
_arg_exprs: &[Expr],
) -> Result<Value, EvalError> {
let list = match args.first() { let list = match args.first() {
Some(Value::List(items)) => items.clone(), Some(Value::List(items)) => items.clone(),
Some(v) => return Err(EvalError::TypeError(format!("filter expects list, got {}", v.type_name()))), Some(v) => {
None => return Err(EvalError::TypeError("filter requires a list argument".to_string())), return Err(EvalError::TypeError(format!(
"filter expects list, got {}",
v.type_name()
)));
}
None => {
return Err(EvalError::TypeError(
"filter requires a list argument".to_string(),
));
}
}; };
match args.get(1) { match args.get(1) {
@ -60,22 +82,33 @@ pub fn filter(eval: &mut Evaluator, args: &[Value], _arg_exprs: &[Expr]) -> Resu
Some(Value::Function(func, func_env)) => { Some(Value::Function(func, func_env)) => {
let mut results = Vec::new(); let mut results = Vec::new();
for item in list { for item in list {
let result = eval.call_fn(func, func_env, &[item.clone()])?; let result = eval.call_fn(func, func_env, std::slice::from_ref(&item))?;
if result.is_truthy() { if result.is_truthy() {
results.push(item); results.push(item);
} }
} }
Ok(Value::List(results)) Ok(Value::List(results))
} }
_ => Err(EvalError::TypeError("filter requires a function".to_string())), _ => Err(EvalError::TypeError(
"filter requires a function".to_string(),
)),
} }
} }
pub fn fold(eval: &mut Evaluator, args: &[Value], _arg_exprs: &[Expr]) -> Result<Value, EvalError> { pub fn fold(eval: &mut Evaluator, args: &[Value], _arg_exprs: &[Expr]) -> Result<Value, EvalError> {
let list = match args.first() { let list = match args.first() {
Some(Value::List(items)) => items.clone(), Some(Value::List(items)) => items.clone(),
Some(v) => return Err(EvalError::TypeError(format!("fold expects list, got {}", v.type_name()))), Some(v) => {
None => return Err(EvalError::TypeError("fold requires a list argument".to_string())), return Err(EvalError::TypeError(format!(
"fold expects list, got {}",
v.type_name()
)));
}
None => {
return Err(EvalError::TypeError(
"fold requires a list argument".to_string(),
));
}
}; };
let init = args.get(1).cloned().unwrap_or(Value::None); let init = args.get(1).cloned().unwrap_or(Value::None);
@ -136,13 +169,18 @@ pub fn concat(args: &[Value]) -> Result<Value, EvalError> {
pub fn zip(args: &[Value]) -> Result<Value, EvalError> { pub fn zip(args: &[Value]) -> Result<Value, EvalError> {
if args.len() < 2 { if args.len() < 2 {
return Err(EvalError::TypeError("zip requires at least 2 lists".to_string())); return Err(EvalError::TypeError(
"zip requires at least 2 lists".to_string(),
));
} }
let lists: Result<Vec<&Vec<Value>>, _> = args.iter().map(|a| match a { let lists: Result<Vec<&Vec<Value>>, _> = args
Value::List(items) => Ok(items), .iter()
_ => Err(EvalError::TypeError("zip expects lists".to_string())), .map(|a| match a {
}).collect(); Value::List(items) => Ok(items),
_ => Err(EvalError::TypeError("zip expects lists".to_string())),
})
.collect();
let lists = lists?; let lists = lists?;
let min_len = lists.iter().map(|l| l.len()).min().unwrap_or(0); let min_len = lists.iter().map(|l| l.len()).min().unwrap_or(0);
@ -189,7 +227,9 @@ pub fn len(args: &[Value]) -> Result<Value, EvalError> {
match args.first() { match args.first() {
Some(Value::List(items)) => Ok(Value::Int(items.len() as i64)), Some(Value::List(items)) => Ok(Value::Int(items.len() as i64)),
Some(Value::Str(s)) => Ok(Value::Int(s.len() as i64)), Some(Value::Str(s)) => Ok(Value::Int(s.len() as i64)),
_ => Err(EvalError::TypeError("len expects a list or string".to_string())), _ => Err(EvalError::TypeError(
"len expects a list or string".to_string(),
)),
} }
} }
@ -245,7 +285,11 @@ pub fn sort(args: &[Value]) -> Result<Value, EvalError> {
Ok(Value::List(sortable.into_iter().map(|(v, _)| v).collect())) Ok(Value::List(sortable.into_iter().map(|(v, _)| v).collect()))
} }
pub fn sort_by(eval: &mut Evaluator, args: &[Value], _arg_exprs: &[Expr]) -> Result<Value, EvalError> { pub fn sort_by(
eval: &mut Evaluator,
args: &[Value],
_arg_exprs: &[Expr],
) -> Result<Value, EvalError> {
let list = match args.first() { let list = match args.first() {
Some(Value::List(items)) => items.clone(), Some(Value::List(items)) => items.clone(),
_ => return Err(EvalError::TypeError("sort_by expects a list".to_string())), _ => return Err(EvalError::TypeError("sort_by expects a list".to_string())),
@ -266,7 +310,9 @@ pub fn sort_by(eval: &mut Evaluator, args: &[Value], _arg_exprs: &[Expr]) -> Res
keyed.sort_by(|a, b| a.1.cmp(&b.1)); keyed.sort_by(|a, b| a.1.cmp(&b.1));
Ok(Value::List(keyed.into_iter().map(|(v, _)| v).collect())) Ok(Value::List(keyed.into_iter().map(|(v, _)| v).collect()))
} }
_ => Err(EvalError::TypeError("sort_by requires a function".to_string())), _ => Err(EvalError::TypeError(
"sort_by requires a function".to_string(),
)),
} }
} }
@ -305,7 +351,11 @@ pub fn seq(eval: &mut Evaluator, args: &[Value], _arg_exprs: &[Expr]) -> Result<
} }
} }
pub fn batch(eval: &mut Evaluator, args: &[Value], _arg_exprs: &[Expr]) -> Result<Value, EvalError> { pub fn batch(
eval: &mut Evaluator,
args: &[Value],
_arg_exprs: &[Expr],
) -> Result<Value, EvalError> {
let list = match args.first() { let list = match args.first() {
Some(Value::List(items)) => items.clone(), Some(Value::List(items)) => items.clone(),
_ => return Err(EvalError::TypeError("batch expects a list".to_string())), _ => return Err(EvalError::TypeError("batch expects a list".to_string())),
@ -313,7 +363,11 @@ pub fn batch(eval: &mut Evaluator, args: &[Value], _arg_exprs: &[Expr]) -> Resul
let batch_size = match args.get(1) { let batch_size = match args.get(1) {
Some(Value::Int(n)) => *n as usize, Some(Value::Int(n)) => *n as usize,
_ => return Err(EvalError::TypeError("batch requires batch size".to_string())), _ => {
return Err(EvalError::TypeError(
"batch requires batch size".to_string(),
));
}
}; };
match args.get(2) { match args.get(2) {
@ -332,7 +386,9 @@ pub fn batch(eval: &mut Evaluator, args: &[Value], _arg_exprs: &[Expr]) -> Resul
} }
Ok(Value::List(results)) Ok(Value::List(results))
} }
_ => Err(EvalError::TypeError("batch requires a function".to_string())), _ => Err(EvalError::TypeError(
"batch requires a function".to_string(),
)),
} }
} }

View file

@ -16,7 +16,11 @@ pub fn hash_file(args: &[Value]) -> Result<Value, EvalError> {
pub fn hash_str(args: &[Value]) -> Result<Value, EvalError> { pub fn hash_str(args: &[Value]) -> Result<Value, EvalError> {
let s = match args.first() { let s = match args.first() {
Some(Value::Str(s)) => s, Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("hash_str expects a string".to_string())), _ => {
return Err(EvalError::TypeError(
"hash_str expects a string".to_string(),
));
}
}; };
let hash = blake3::hash(s.as_bytes()); let hash = blake3::hash(s.as_bytes());
@ -26,12 +30,20 @@ pub fn hash_str(args: &[Value]) -> Result<Value, EvalError> {
pub fn encrypt_age(args: &[Value]) -> Result<Value, EvalError> { pub fn encrypt_age(args: &[Value]) -> Result<Value, EvalError> {
let content = match args.first() { let content = match args.first() {
Some(Value::Str(s)) => s, Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("encrypt_age expects content string".to_string())), _ => {
return Err(EvalError::TypeError(
"encrypt_age expects content string".to_string(),
));
}
}; };
let recipient = match args.get(1) { let recipient = match args.get(1) {
Some(Value::Str(s)) => s, Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("encrypt_age requires recipient public key".to_string())), _ => {
return Err(EvalError::TypeError(
"encrypt_age requires recipient public key".to_string(),
));
}
}; };
let recipient = recipient let recipient = recipient
@ -60,12 +72,20 @@ pub fn encrypt_age(args: &[Value]) -> Result<Value, EvalError> {
pub fn decrypt_age(args: &[Value]) -> Result<Value, EvalError> { pub fn decrypt_age(args: &[Value]) -> Result<Value, EvalError> {
let encrypted = match args.first() { let encrypted = match args.first() {
Some(Value::Str(s)) => s, Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("decrypt_age expects encrypted string".to_string())), _ => {
return Err(EvalError::TypeError(
"decrypt_age expects encrypted string".to_string(),
));
}
}; };
let identity_str = match args.get(1) { let identity_str = match args.get(1) {
Some(Value::Str(s)) => s, Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("decrypt_age requires identity".to_string())), _ => {
return Err(EvalError::TypeError(
"decrypt_age requires identity".to_string(),
));
}
}; };
let identity = identity_str let identity = identity_str
@ -79,7 +99,11 @@ pub fn decrypt_age(args: &[Value]) -> Result<Value, EvalError> {
.map_err(|e| EvalError::TypeError(format!("decryption error: {}", e)))? .map_err(|e| EvalError::TypeError(format!("decryption error: {}", e)))?
{ {
age::Decryptor::Recipients(d) => d, age::Decryptor::Recipients(d) => d,
_ => return Err(EvalError::TypeError("unexpected decryptor type".to_string())), _ => {
return Err(EvalError::TypeError(
"unexpected decryptor type".to_string(),
));
}
}; };
let mut decrypted = vec![]; let mut decrypted = vec![];
@ -92,10 +116,9 @@ pub fn decrypt_age(args: &[Value]) -> Result<Value, EvalError> {
.read_to_end(&mut decrypted) .read_to_end(&mut decrypted)
.map_err(|e| EvalError::TypeError(format!("decryption error: {}", e)))?; .map_err(|e| EvalError::TypeError(format!("decryption error: {}", e)))?;
Ok(Value::Str( Ok(Value::Str(String::from_utf8(decrypted).map_err(|e| {
String::from_utf8(decrypted) EvalError::TypeError(format!("invalid UTF-8: {}", e))
.map_err(|e| EvalError::TypeError(format!("invalid UTF-8: {}", e)))?, })?))
))
} }
fn base64_encode(data: &[u8]) -> String { fn base64_encode(data: &[u8]) -> String {

View file

@ -20,7 +20,11 @@ pub fn write_file(args: &[Value]) -> Result<Value, EvalError> {
let path = get_path(args)?; let path = get_path(args)?;
let content = match args.get(1) { let content = match args.get(1) {
Some(Value::Str(s)) => s, Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("write_file requires content string".to_string())), _ => {
return Err(EvalError::TypeError(
"write_file requires content string".to_string(),
));
}
}; };
std::fs::write(&path, content)?; std::fs::write(&path, content)?;
Ok(Value::Bool(true)) Ok(Value::Bool(true))
@ -31,7 +35,11 @@ pub fn copy_file(args: &[Value]) -> Result<Value, EvalError> {
let dst = match args.get(1) { let dst = match args.get(1) {
Some(Value::Path(p)) => p.clone(), Some(Value::Path(p)) => p.clone(),
Some(Value::Str(s)) => expand_path(s), Some(Value::Str(s)) => expand_path(s),
_ => return Err(EvalError::TypeError("copy_file requires destination path".to_string())), _ => {
return Err(EvalError::TypeError(
"copy_file requires destination path".to_string(),
));
}
}; };
std::fs::copy(&src, &dst)?; std::fs::copy(&src, &dst)?;
Ok(Value::Bool(true)) Ok(Value::Bool(true))
@ -71,7 +79,11 @@ pub fn list_dir(args: &[Value]) -> Result<Value, EvalError> {
pub fn glob_files(args: &[Value]) -> Result<Value, EvalError> { pub fn glob_files(args: &[Value]) -> Result<Value, EvalError> {
let pattern = match args.first() { let pattern = match args.first() {
Some(Value::Str(s)) => s, Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("glob expects a pattern string".to_string())), _ => {
return Err(EvalError::TypeError(
"glob expects a pattern string".to_string(),
));
}
}; };
let entries: Vec<Value> = glob::glob(pattern) let entries: Vec<Value> = glob::glob(pattern)
@ -127,7 +139,11 @@ pub fn path_join(args: &[Value]) -> Result<Value, EvalError> {
match arg { match arg {
Value::Path(p) => result.push(p), Value::Path(p) => result.push(p),
Value::Str(s) => result.push(s), Value::Str(s) => result.push(s),
_ => return Err(EvalError::TypeError("path_join expects paths or strings".to_string())), _ => {
return Err(EvalError::TypeError(
"path_join expects paths or strings".to_string(),
));
}
} }
} }
Ok(Value::Path(result)) Ok(Value::Path(result))
@ -135,7 +151,9 @@ pub fn path_join(args: &[Value]) -> Result<Value, EvalError> {
pub fn path_parent(args: &[Value]) -> Result<Value, EvalError> { pub fn path_parent(args: &[Value]) -> Result<Value, EvalError> {
let path = get_path(args)?; let path = get_path(args)?;
Ok(Value::Path(path.parent().map(|p| p.to_path_buf()).unwrap_or_default())) Ok(Value::Path(
path.parent().map(|p| p.to_path_buf()).unwrap_or_default(),
))
} }
pub fn path_filename(args: &[Value]) -> Result<Value, EvalError> { pub fn path_filename(args: &[Value]) -> Result<Value, EvalError> {
@ -167,7 +185,11 @@ pub fn config_dir() -> Result<Value, EvalError> {
pub fn config_path(args: &[Value]) -> Result<Value, EvalError> { pub fn config_path(args: &[Value]) -> Result<Value, EvalError> {
let app = match args.first() { let app = match args.first() {
Some(Value::Str(s)) => s, Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("config_path expects an app name string".to_string())), _ => {
return Err(EvalError::TypeError(
"config_path expects an app name string".to_string(),
));
}
}; };
let config = dirs::config_dir().unwrap_or_default(); let config = dirs::config_dir().unwrap_or_default();
Ok(Value::Path(config.join(app))) Ok(Value::Path(config.join(app)))
@ -184,27 +206,31 @@ pub fn cache_dir() -> Result<Value, EvalError> {
pub fn exec(args: &[Value]) -> Result<Value, EvalError> { pub fn exec(args: &[Value]) -> Result<Value, EvalError> {
let cmd = match args.first() { let cmd = match args.first() {
Some(Value::Str(s)) => s, Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("exec expects a command string".to_string())), _ => {
return Err(EvalError::TypeError(
"exec expects a command string".to_string(),
));
}
}; };
let output = Command::new("sh") let output = Command::new("sh").arg("-c").arg(cmd).output()?;
.arg("-c")
.arg(cmd)
.output()?;
Ok(Value::Str(String::from_utf8_lossy(&output.stdout).to_string())) Ok(Value::Str(
String::from_utf8_lossy(&output.stdout).to_string(),
))
} }
pub fn exec_with_status(args: &[Value]) -> Result<Value, EvalError> { pub fn exec_with_status(args: &[Value]) -> Result<Value, EvalError> {
let cmd = match args.first() { let cmd = match args.first() {
Some(Value::Str(s)) => s, Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("exec_with_status expects a command string".to_string())), _ => {
return Err(EvalError::TypeError(
"exec_with_status expects a command string".to_string(),
));
}
}; };
let status = Command::new("sh") let status = Command::new("sh").arg("-c").arg(cmd).status()?;
.arg("-c")
.arg(cmd)
.status()?;
Ok(Value::Int(status.code().unwrap_or(-1) as i64)) Ok(Value::Int(status.code().unwrap_or(-1) as i64))
} }
@ -216,12 +242,14 @@ pub fn shell(args: &[Value]) -> Result<Value, EvalError> {
pub fn which(args: &[Value]) -> Result<Value, EvalError> { pub fn which(args: &[Value]) -> Result<Value, EvalError> {
let cmd = match args.first() { let cmd = match args.first() {
Some(Value::Str(s)) => s, Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("which expects a command name".to_string())), _ => {
return Err(EvalError::TypeError(
"which expects a command name".to_string(),
));
}
}; };
let output = Command::new("which") let output = Command::new("which").arg(cmd).output()?;
.arg(cmd)
.output()?;
if output.status.success() { if output.status.success() {
let path = String::from_utf8_lossy(&output.stdout).trim().to_string(); let path = String::from_utf8_lossy(&output.stdout).trim().to_string();
@ -240,7 +268,11 @@ pub fn to_json(args: &[Value]) -> Result<Value, EvalError> {
pub fn from_json(args: &[Value]) -> Result<Value, EvalError> { pub fn from_json(args: &[Value]) -> Result<Value, EvalError> {
let s = match args.first() { let s = match args.first() {
Some(Value::Str(s)) => s, Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("from_json expects a string".to_string())), _ => {
return Err(EvalError::TypeError(
"from_json expects a string".to_string(),
));
}
}; };
let json: serde_json::Value = serde_json::from_str(s) let json: serde_json::Value = serde_json::from_str(s)
@ -260,11 +292,15 @@ pub fn to_toml(args: &[Value]) -> Result<Value, EvalError> {
pub fn from_toml(args: &[Value]) -> Result<Value, EvalError> { pub fn from_toml(args: &[Value]) -> Result<Value, EvalError> {
let s = match args.first() { let s = match args.first() {
Some(Value::Str(s)) => s, Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("from_toml expects a string".to_string())), _ => {
return Err(EvalError::TypeError(
"from_toml expects a string".to_string(),
));
}
}; };
let toml_val: toml::Value = toml::from_str(s) let toml_val: toml::Value =
.map_err(|e| EvalError::TypeError(format!("invalid TOML: {}", e)))?; toml::from_str(s).map_err(|e| EvalError::TypeError(format!("invalid TOML: {}", e)))?;
Ok(toml_to_value(&toml_val)) Ok(toml_to_value(&toml_val))
} }
@ -272,7 +308,9 @@ pub fn from_toml(args: &[Value]) -> Result<Value, EvalError> {
pub fn to_yaml(args: &[Value]) -> Result<Value, EvalError> { pub fn to_yaml(args: &[Value]) -> Result<Value, EvalError> {
let val = args.first().unwrap_or(&Value::None); let val = args.first().unwrap_or(&Value::None);
let json = value_to_json(val); let json = value_to_json(val);
Ok(Value::Str(serde_json::to_string_pretty(&json).unwrap_or_default())) Ok(Value::Str(
serde_json::to_string_pretty(&json).unwrap_or_default(),
))
} }
pub fn from_yaml(args: &[Value]) -> Result<Value, EvalError> { pub fn from_yaml(args: &[Value]) -> Result<Value, EvalError> {
@ -288,9 +326,9 @@ fn get_path(args: &[Value]) -> Result<PathBuf, EvalError> {
} }
fn expand_path(s: &str) -> PathBuf { fn expand_path(s: &str) -> PathBuf {
if s.starts_with('~') { if let Some(stripped) = s.strip_prefix('~') {
let home = dirs::home_dir().unwrap_or_default(); let home = dirs::home_dir().unwrap_or_default();
home.join(s.strip_prefix("~/").unwrap_or(&s[1..])) home.join(stripped.strip_prefix('/').unwrap_or(stripped))
} else { } else {
PathBuf::from(s) PathBuf::from(s)
} }
@ -314,9 +352,7 @@ fn value_to_json(val: &Value) -> serde_json::Value {
Value::Str(s) => serde_json::Value::String(s.clone()), Value::Str(s) => serde_json::Value::String(s.clone()),
Value::Bool(b) => serde_json::Value::Bool(*b), Value::Bool(b) => serde_json::Value::Bool(*b),
Value::Path(p) => serde_json::Value::String(p.display().to_string()), Value::Path(p) => serde_json::Value::String(p.display().to_string()),
Value::List(items) => { Value::List(items) => serde_json::Value::Array(items.iter().map(value_to_json).collect()),
serde_json::Value::Array(items.iter().map(value_to_json).collect())
}
Value::Struct(_, fields) => { Value::Struct(_, fields) => {
let map: serde_json::Map<String, serde_json::Value> = fields let map: serde_json::Map<String, serde_json::Value> = fields
.iter() .iter()
@ -343,9 +379,7 @@ fn json_to_value(json: &serde_json::Value) -> Value {
} }
} }
serde_json::Value::String(s) => Value::Str(s.clone()), serde_json::Value::String(s) => Value::Str(s.clone()),
serde_json::Value::Array(arr) => { serde_json::Value::Array(arr) => Value::List(arr.iter().map(json_to_value).collect()),
Value::List(arr.iter().map(json_to_value).collect())
}
serde_json::Value::Object(obj) => { serde_json::Value::Object(obj) => {
let fields: indexmap::IndexMap<String, Value> = obj let fields: indexmap::IndexMap<String, Value> = obj
.iter() .iter()
@ -363,9 +397,7 @@ fn value_to_toml(val: &Value) -> toml::Value {
Value::Str(s) => toml::Value::String(s.clone()), Value::Str(s) => toml::Value::String(s.clone()),
Value::Bool(b) => toml::Value::Boolean(*b), Value::Bool(b) => toml::Value::Boolean(*b),
Value::Path(p) => toml::Value::String(p.display().to_string()), Value::Path(p) => toml::Value::String(p.display().to_string()),
Value::List(items) => { Value::List(items) => toml::Value::Array(items.iter().map(value_to_toml).collect()),
toml::Value::Array(items.iter().map(value_to_toml).collect())
}
Value::Struct(_, fields) => { Value::Struct(_, fields) => {
let map: toml::map::Map<String, toml::Value> = fields let map: toml::map::Map<String, toml::Value> = fields
.iter() .iter()
@ -383,9 +415,7 @@ fn toml_to_value(toml: &toml::Value) -> Value {
toml::Value::Integer(i) => Value::Int(*i), toml::Value::Integer(i) => Value::Int(*i),
toml::Value::Float(f) => Value::Float(*f), toml::Value::Float(f) => Value::Float(*f),
toml::Value::String(s) => Value::Str(s.clone()), toml::Value::String(s) => Value::Str(s.clone()),
toml::Value::Array(arr) => { toml::Value::Array(arr) => Value::List(arr.iter().map(toml_to_value).collect()),
Value::List(arr.iter().map(toml_to_value).collect())
}
toml::Value::Table(table) => { toml::Value::Table(table) => {
let fields: indexmap::IndexMap<String, Value> = table let fields: indexmap::IndexMap<String, Value> = table
.iter() .iter()

View file

@ -146,22 +146,31 @@ pub fn call_method(
} }
} }
"map" => { "map" => {
let all_args = std::iter::once(obj.clone()).chain(args.iter().cloned()).collect::<Vec<_>>(); let all_args = std::iter::once(obj.clone())
.chain(args.iter().cloned())
.collect::<Vec<_>>();
collections::map(eval, &all_args, arg_exprs) collections::map(eval, &all_args, arg_exprs)
} }
"filter" => { "filter" => {
let all_args = std::iter::once(obj.clone()).chain(args.iter().cloned()).collect::<Vec<_>>(); let all_args = std::iter::once(obj.clone())
.chain(args.iter().cloned())
.collect::<Vec<_>>();
collections::filter(eval, &all_args, arg_exprs) collections::filter(eval, &all_args, arg_exprs)
} }
"fold" => { "fold" => {
let all_args = std::iter::once(obj.clone()).chain(args.iter().cloned()).collect::<Vec<_>>(); let all_args = std::iter::once(obj.clone())
.chain(args.iter().cloned())
.collect::<Vec<_>>();
collections::fold(eval, &all_args, arg_exprs) collections::fold(eval, &all_args, arg_exprs)
} }
"join" => { "join" => {
let sep = args.first().map(|v| match v { let sep = args
Value::Str(s) => s.as_str(), .first()
_ => "", .map(|v| match v {
}).unwrap_or(""); Value::Str(s) => s.as_str(),
_ => "",
})
.unwrap_or("");
let result = items let result = items
.iter() .iter()
.map(|v| v.to_string_repr()) .map(|v| v.to_string_repr())
@ -170,7 +179,9 @@ pub fn call_method(
Ok(Value::Str(result)) Ok(Value::Str(result))
} }
"sort" => { "sort" => {
let all_args = std::iter::once(obj.clone()).chain(args.iter().cloned()).collect::<Vec<_>>(); let all_args = std::iter::once(obj.clone())
.chain(args.iter().cloned())
.collect::<Vec<_>>();
collections::sort(&all_args) collections::sort(&all_args)
} }
"reverse" => { "reverse" => {
@ -179,7 +190,9 @@ pub fn call_method(
Ok(Value::List(reversed)) Ok(Value::List(reversed))
} }
"unique" => { "unique" => {
let all_args = std::iter::once(obj.clone()).chain(args.iter().cloned()).collect::<Vec<_>>(); let all_args = std::iter::once(obj.clone())
.chain(args.iter().cloned())
.collect::<Vec<_>>();
collections::unique(&all_args) collections::unique(&all_args)
} }
_ => Err(EvalError::UndefinedFunction(format!("list.{}", method))), _ => Err(EvalError::UndefinedFunction(format!("list.{}", method))),
@ -191,19 +204,21 @@ pub fn call_method(
"lower" => Ok(Value::Str(s.to_lowercase())), "lower" => Ok(Value::Str(s.to_lowercase())),
"trim" => Ok(Value::Str(s.trim().to_string())), "trim" => Ok(Value::Str(s.trim().to_string())),
"split" => { "split" => {
let sep = args.first().map(|v| match v { let sep = args
Value::Str(s) => s.as_str(), .first()
_ => " ", .map(|v| match v {
}).unwrap_or(" "); Value::Str(s) => s.as_str(),
_ => " ",
})
.unwrap_or(" ");
let parts: Vec<Value> = s.split(sep).map(|p| Value::Str(p.to_string())).collect(); let parts: Vec<Value> = s.split(sep).map(|p| Value::Str(p.to_string())).collect();
Ok(Value::List(parts)) Ok(Value::List(parts))
} }
"replace" => { "replace" => {
if args.len() >= 2 { if args.len() >= 2
if let (Value::Str(from), Value::Str(to)) = (&args[0], &args[1]) { && let (Value::Str(from), Value::Str(to)) = (&args[0], &args[1]) {
return Ok(Value::Str(s.replace(from, to))); return Ok(Value::Str(s.replace(from, to)));
} }
}
Ok(Value::Str(s.clone())) Ok(Value::Str(s.clone()))
} }
"starts_with" => { "starts_with" => {
@ -231,9 +246,19 @@ pub fn call_method(
}, },
Value::Path(p) => match method { Value::Path(p) => match method {
"parent" => Ok(Value::Path(p.parent().map(|p| p.to_path_buf()).unwrap_or_default())), "parent" => Ok(Value::Path(
"filename" => Ok(Value::Str(p.file_name().map(|s| s.to_string_lossy().to_string()).unwrap_or_default())), p.parent().map(|p| p.to_path_buf()).unwrap_or_default(),
"extension" => Ok(Value::Str(p.extension().map(|s| s.to_string_lossy().to_string()).unwrap_or_default())), )),
"filename" => Ok(Value::Str(
p.file_name()
.map(|s| s.to_string_lossy().to_string())
.unwrap_or_default(),
)),
"extension" => Ok(Value::Str(
p.extension()
.map(|s| s.to_string_lossy().to_string())
.unwrap_or_default(),
)),
"exists" => Ok(Value::Bool(p.exists())), "exists" => Ok(Value::Bool(p.exists())),
"is_file" => Ok(Value::Bool(p.is_file())), "is_file" => Ok(Value::Bool(p.is_file())),
"is_dir" => Ok(Value::Bool(p.is_dir())), "is_dir" => Ok(Value::Bool(p.is_dir())),
@ -256,15 +281,14 @@ pub fn call_method(
let mut method_args = vec![obj.clone()]; let mut method_args = vec![obj.clone()];
method_args.extend(args.iter().cloned()); method_args.extend(args.iter().cloned());
let env_clone = eval.env().clone(); let env_clone = eval.env().clone();
return eval.call_function(&m, &env_clone, &method_args); return eval.call_function(m, &env_clone, &method_args);
} }
} }
} }
if let Some(field) = fields.get(method) { if let Some(field) = fields.get(method)
if let Value::Function(func, env) = field { && let Value::Function(func, env) = field {
return eval.call_function(func, env, args); return eval.call_function(func, env, args);
} }
}
Err(EvalError::FieldNotFound { Err(EvalError::FieldNotFound {
ty: name.clone(), ty: name.clone(),
field: method.to_string(), field: method.to_string(),
@ -295,7 +319,9 @@ fn options_unwrap(args: &[Value]) -> Result<Value, EvalError> {
match args.first() { match args.first() {
Some(Value::None) => Err(EvalError::TypeError("unwrap called on none".to_string())), Some(Value::None) => Err(EvalError::TypeError("unwrap called on none".to_string())),
Some(v) => Ok(v.clone()), Some(v) => Ok(v.clone()),
None => Err(EvalError::TypeError("unwrap requires an argument".to_string())), None => Err(EvalError::TypeError(
"unwrap requires an argument".to_string(),
)),
} }
} }
@ -308,18 +334,22 @@ fn options_unwrap_or(args: &[Value]) -> Result<Value, EvalError> {
} }
fn options_is_some(args: &[Value]) -> Result<Value, EvalError> { fn options_is_some(args: &[Value]) -> Result<Value, EvalError> {
Ok(Value::Bool(!matches!(args.first(), Some(Value::None) | None))) Ok(Value::Bool(!matches!(
args.first(),
Some(Value::None) | None
)))
} }
fn options_is_none(args: &[Value]) -> Result<Value, EvalError> { fn options_is_none(args: &[Value]) -> Result<Value, EvalError> {
Ok(Value::Bool(matches!(args.first(), Some(Value::None) | None))) Ok(Value::Bool(matches!(
args.first(),
Some(Value::None) | None
)))
} }
fn env_get(args: &[Value]) -> Result<Value, EvalError> { fn env_get(args: &[Value]) -> Result<Value, EvalError> {
if let Some(Value::Str(key)) = args.first() { if let Some(Value::Str(key)) = args.first() {
Ok(std::env::var(key) Ok(std::env::var(key).map(Value::Str).unwrap_or(Value::None))
.map(Value::Str)
.unwrap_or(Value::None))
} else { } else {
Ok(Value::None) Ok(Value::None)
} }

View file

@ -64,12 +64,20 @@ pub fn replace(args: &[Value]) -> Result<Value, EvalError> {
let from = match args.get(1) { let from = match args.get(1) {
Some(Value::Str(s)) => s, Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("replace requires from string".to_string())), _ => {
return Err(EvalError::TypeError(
"replace requires from string".to_string(),
));
}
}; };
let to = match args.get(2) { let to = match args.get(2) {
Some(Value::Str(s)) => s, Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("replace requires to string".to_string())), _ => {
return Err(EvalError::TypeError(
"replace requires to string".to_string(),
));
}
}; };
Ok(Value::Str(s.replace(from.as_str(), to.as_str()))) Ok(Value::Str(s.replace(from.as_str(), to.as_str())))
@ -78,12 +86,20 @@ pub fn replace(args: &[Value]) -> Result<Value, EvalError> {
pub fn starts_with(args: &[Value]) -> Result<Value, EvalError> { pub fn starts_with(args: &[Value]) -> Result<Value, EvalError> {
let s = match args.first() { let s = match args.first() {
Some(Value::Str(s)) => s, Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("starts_with expects a string".to_string())), _ => {
return Err(EvalError::TypeError(
"starts_with expects a string".to_string(),
));
}
}; };
let prefix = match args.get(1) { let prefix = match args.get(1) {
Some(Value::Str(s)) => s, Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("starts_with requires prefix".to_string())), _ => {
return Err(EvalError::TypeError(
"starts_with requires prefix".to_string(),
));
}
}; };
Ok(Value::Bool(s.starts_with(prefix.as_str()))) Ok(Value::Bool(s.starts_with(prefix.as_str())))
@ -92,12 +108,20 @@ pub fn starts_with(args: &[Value]) -> Result<Value, EvalError> {
pub fn ends_with(args: &[Value]) -> Result<Value, EvalError> { pub fn ends_with(args: &[Value]) -> Result<Value, EvalError> {
let s = match args.first() { let s = match args.first() {
Some(Value::Str(s)) => s, Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("ends_with expects a string".to_string())), _ => {
return Err(EvalError::TypeError(
"ends_with expects a string".to_string(),
));
}
}; };
let suffix = match args.get(1) { let suffix = match args.get(1) {
Some(Value::Str(s)) => s, Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("ends_with requires suffix".to_string())), _ => {
return Err(EvalError::TypeError(
"ends_with requires suffix".to_string(),
));
}
}; };
Ok(Value::Bool(s.ends_with(suffix.as_str()))) Ok(Value::Bool(s.ends_with(suffix.as_str())))
@ -106,7 +130,11 @@ pub fn ends_with(args: &[Value]) -> Result<Value, EvalError> {
pub fn format(args: &[Value]) -> Result<Value, EvalError> { pub fn format(args: &[Value]) -> Result<Value, EvalError> {
let template = match args.first() { let template = match args.first() {
Some(Value::Str(s)) => s.clone(), Some(Value::Str(s)) => s.clone(),
_ => return Err(EvalError::TypeError("format expects a template string".to_string())), _ => {
return Err(EvalError::TypeError(
"format expects a template string".to_string(),
));
}
}; };
let mut result = template; let mut result = template;

View file

@ -128,7 +128,11 @@ impl Value {
Value::Path(p) => p.display().to_string(), Value::Path(p) => p.display().to_string(),
Value::List(items) => { Value::List(items) => {
// Join list items with colon (PATH-style) // Join list items with colon (PATH-style)
items.iter().map(|v| v.to_env_string()).collect::<Vec<_>>().join(":") items
.iter()
.map(|v| v.to_env_string())
.collect::<Vec<_>>()
.join(":")
} }
Value::None => String::new(), Value::None => String::new(),
_ => self.to_string_repr(), _ => self.to_string_repr(),
@ -347,9 +351,18 @@ impl Evaluator {
EnumDecl { EnumDecl {
name: "Os".to_string(), name: "Os".to_string(),
variants: vec![ variants: vec![
EnumVariant { name: "Linux".to_string(), fields: None }, EnumVariant {
EnumVariant { name: "MacOS".to_string(), fields: None }, name: "Linux".to_string(),
EnumVariant { name: "Windows".to_string(), fields: None }, fields: None,
},
EnumVariant {
name: "MacOS".to_string(),
fields: None,
},
EnumVariant {
name: "Windows".to_string(),
fields: None,
},
], ],
}, },
); );
@ -365,7 +378,10 @@ impl Evaluator {
}; };
env.define("os".to_string(), os_val); env.define("os".to_string(), os_val);
env.define("distro".to_string(), Value::Str(sys.distro.clone())); env.define("distro".to_string(), Value::Str(sys.distro.clone()));
env.define("pkg_manager".to_string(), Value::Str(sys.pkg_manager.clone())); env.define(
"pkg_manager".to_string(),
Value::Str(sys.pkg_manager.clone()),
);
env.define("hostname".to_string(), Value::Str(sys.hostname.clone())); env.define("hostname".to_string(), Value::Str(sys.hostname.clone()));
env.define("arch".to_string(), Value::Str(sys.arch.to_string())); env.define("arch".to_string(), Value::Str(sys.arch.to_string()));
} }
@ -383,13 +399,17 @@ impl Evaluator {
let mut vars = self.env.get_all_variables(); let mut vars = self.env.get_all_variables();
// Add doot global variables // Add doot global variables
vars.insert("DOOT_HOME".to_string(), Self::home_dir().display().to_string()); vars.insert(
vars.insert("DOOT_CONFIG_DIR".to_string(), "DOOT_HOME".to_string(),
Self::home_dir().display().to_string(),
);
vars.insert(
"DOOT_CONFIG_DIR".to_string(),
dirs::config_dir() dirs::config_dir()
.unwrap_or_else(|| Self::home_dir().join(".config")) .unwrap_or_else(|| Self::home_dir().join(".config"))
.join("doot") .join("doot")
.display() .display()
.to_string() .to_string(),
); );
vars.insert("DOOT_OS".to_string(), std::env::consts::OS.to_string()); vars.insert("DOOT_OS".to_string(), std::env::consts::OS.to_string());
vars.insert("DOOT_ARCH".to_string(), std::env::consts::ARCH.to_string()); vars.insert("DOOT_ARCH".to_string(), std::env::consts::ARCH.to_string());
@ -403,11 +423,10 @@ impl Evaluator {
let value = self.eval_expr(&decl.value)?; let value = self.eval_expr(&decl.value)?;
// Handle special config variables // Handle special config variables
if decl.name == "sandbox" { if decl.name == "sandbox"
if let Value::Bool(b) = &value { && let Value::Bool(b) = &value {
self.result.sandbox = *b; self.result.sandbox = *b;
} }
}
self.env.define(decl.name.clone(), value); self.env.define(decl.name.clone(), value);
Ok(None) Ok(None)
@ -718,12 +737,10 @@ impl Evaluator {
Value::Lambda(params, body, lambda_env) => { Value::Lambda(params, body, lambda_env) => {
self.call_lambda(&params, &body, &lambda_env, &arg_vals) self.call_lambda(&params, &body, &lambda_env, &arg_vals)
} }
_ => { _ => Err(EvalError::TypeError(format!(
Err(EvalError::TypeError(format!( "cannot call {}",
"cannot call {}", callee_val.type_name()
callee_val.type_name() ))),
)))
}
} }
} }
@ -741,10 +758,13 @@ impl Evaluator {
let obj_val = self.eval_expr(obj)?; let obj_val = self.eval_expr(obj)?;
match obj_val { match obj_val {
Value::Struct(name, fields) => { Value::Struct(name, fields) => {
fields.get(field).cloned().ok_or_else(|| EvalError::FieldNotFound { fields
ty: name, .get(field)
field: field.clone(), .cloned()
}) .ok_or_else(|| EvalError::FieldNotFound {
ty: name,
field: field.clone(),
})
} }
_ => Err(EvalError::TypeError(format!( _ => Err(EvalError::TypeError(format!(
"cannot access field on {}", "cannot access field on {}",
@ -759,22 +779,17 @@ impl Evaluator {
match (obj_val, idx_val) { match (obj_val, idx_val) {
(Value::List(items), Value::Int(i)) => { (Value::List(items), Value::Int(i)) => {
let index = if i < 0 { let index = if i < 0 { items.len() as i64 + i } else { i };
items.len() as i64 + i items
} else { .get(index as usize)
i .cloned()
}; .ok_or(EvalError::IndexOutOfBounds {
items.get(index as usize).cloned().ok_or(EvalError::IndexOutOfBounds { index: i,
index: i, len: items.len(),
len: items.len(), })
})
} }
(Value::Str(s), Value::Int(i)) => { (Value::Str(s), Value::Int(i)) => {
let index = if i < 0 { let index = if i < 0 { s.len() as i64 + i } else { i };
s.len() as i64 + i
} else {
i
};
s.chars() s.chars()
.nth(index as usize) .nth(index as usize)
.map(|c| Value::Str(c.to_string())) .map(|c| Value::Str(c.to_string()))
@ -815,9 +830,7 @@ impl Evaluator {
Ok(Value::Struct(name.clone(), values)) Ok(Value::Struct(name.clone(), values))
} }
Expr::EnumVariant(ty, variant) => { Expr::EnumVariant(ty, variant) => Ok(Value::Enum(ty.clone(), variant.clone())),
Ok(Value::Enum(ty.clone(), variant.clone()))
}
Expr::If(cond, then_expr, else_expr) => { Expr::If(cond, then_expr, else_expr) => {
let cond_val = self.eval_expr(cond)?; let cond_val = self.eval_expr(cond)?;
@ -830,13 +843,13 @@ impl Evaluator {
} }
} }
Expr::Lambda(params, body, ..) => { Expr::Lambda(params, body, ..) => Ok(Value::Lambda(
Ok(Value::Lambda(params.clone(), *body.clone(), self.env.clone())) params.clone(),
} *body.clone(),
self.env.clone(),
)),
Expr::Await(expr) => { Expr::Await(expr) => self.eval_expr(expr),
self.eval_expr(expr)
}
Expr::Path(left, right) => { Expr::Path(left, right) => {
let left_path = self.eval_to_path(left)?; let left_path = self.eval_to_path(left)?;
@ -1126,9 +1139,9 @@ impl Evaluator {
match val { match val {
Value::Path(p) => Ok(p), Value::Path(p) => Ok(p),
Value::Str(s) => { Value::Str(s) => {
if s.starts_with('~') { if let Some(stripped) = s.strip_prefix('~') {
let home = Self::home_dir(); let home = Self::home_dir();
Ok(home.join(s.strip_prefix("~/").unwrap_or(&s[1..]))) Ok(home.join(stripped.strip_prefix('/').unwrap_or(stripped)))
} else { } else {
Ok(PathBuf::from(s)) Ok(PathBuf::from(s))
} }
@ -1208,7 +1221,9 @@ fn command_exists(cmd: &str) -> bool {
} else { } else {
// Fallback to hardcoded paths // Fallback to hardcoded paths
let paths = ["/usr/bin/", "/usr/local/bin/", "/bin/"]; let paths = ["/usr/bin/", "/usr/local/bin/", "/bin/"];
paths.iter().any(|p| std::path::Path::new(&format!("{}{}", p, cmd)).exists()) paths
.iter()
.any(|p| std::path::Path::new(&format!("{}{}", p, cmd)).exists())
}; };
cache.insert(cmd.to_string(), exists); cache.insert(cmd.to_string(), exists);

View file

@ -198,17 +198,16 @@ impl Lexer {
.ignore_then(text::digits(16)) .ignore_then(text::digits(16))
.map(|s: String| Token::Int(i64::from_str_radix(&s, 16).unwrap_or(0))); .map(|s: String| Token::Int(i64::from_str_radix(&s, 16).unwrap_or(0)));
let decimal = text::int(10) let decimal = text::int(10).map(|s: String| Token::Int(s.parse().unwrap()));
.map(|s: String| Token::Int(s.parse().unwrap()));
let int = octal.or(hex).or(decimal); let int = octal.or(hex).or(decimal);
let float = text::int(10) let float = text::int(10).then(just('.').then(text::digits(10))).map(
.then(just('.').then(text::digits(10))) |(a, (_, b)): (String, (char, String))| {
.map(|(a, (_, b)): (String, (char, String))| {
let f: f64 = format!("{}.{}", a, b).parse().unwrap(); let f: f64 = format!("{}.{}", a, b).parse().unwrap();
Token::Float(OrderedFloat(f)) Token::Float(OrderedFloat(f))
}); },
);
let escape = just('\\').ignore_then( let escape = just('\\').ignore_then(
just('\\') just('\\')
@ -226,14 +225,15 @@ impl Lexer {
.map(Token::Str); .map(Token::Str);
// Heredoc: >>>...<<< // Heredoc: >>>...<<<
let heredoc = just(">>>") let heredoc =
.ignore_then(take_until(just("<<<"))) just(">>>")
.map(|(chars, _): (Vec<char>, _)| { .ignore_then(take_until(just("<<<")))
let s: String = chars.into_iter().collect(); .map(|(chars, _): (Vec<char>, _)| {
// Trim leading newline if present let s: String = chars.into_iter().collect();
let s = s.strip_prefix('\n').unwrap_or(&s); // Trim leading newline if present
Token::Str(s.to_string()) let s = s.strip_prefix('\n').unwrap_or(&s);
}); Token::Str(s.to_string())
});
let keyword_or_ident = text::ident().map(|s: String| match s.as_str() { let keyword_or_ident = text::ident().map(|s: String| match s.as_str() {
"let" => Token::Let, "let" => Token::Let,
@ -307,9 +307,7 @@ impl Lexer {
just('#').to(Token::Hash), just('#').to(Token::Hash),
)); ));
let comment = just('#') let comment = just('#').then(none_of("\n").repeated()).ignored();
.then(none_of("\n").repeated())
.ignored();
let whitespace = just(' ').or(just('\t')).repeated().at_least(1).ignored(); let whitespace = just(' ').or(just('\t')).repeated().at_least(1).ignored();
@ -361,9 +359,14 @@ impl Lexer {
if current_indent > last_indent { if current_indent > last_indent {
indent_stack.push(current_indent); indent_stack.push(current_indent);
result.push(Spanned::new(Token::Indent(current_indent), span_start..span_start)); result.push(Spanned::new(
Token::Indent(current_indent),
span_start..span_start,
));
} else { } else {
while indent_stack.len() > 1 && current_indent < *indent_stack.last().unwrap() { while indent_stack.len() > 1
&& current_indent < *indent_stack.last().unwrap()
{
indent_stack.pop(); indent_stack.pop();
result.push(Spanned::new(Token::Dedent, span_start..span_start)); result.push(Spanned::new(Token::Dedent, span_start..span_start));
} }

View file

@ -17,6 +17,6 @@ pub use ast::*;
pub use evaluator::Evaluator; pub use evaluator::Evaluator;
pub use lexer::Lexer; pub use lexer::Lexer;
pub use parser::Parser; pub use parser::Parser;
pub use planner::{validate_dotfile_targets, DotfileConflict, DotfileValidation, DotfileWarning}; pub use planner::{DotfileConflict, DotfileValidation, DotfileWarning, validate_dotfile_targets};
pub use type_checker::TypeChecker; pub use type_checker::TypeChecker;
pub use types::Type; pub use types::Type;

View file

@ -33,17 +33,18 @@ impl MacroExpander {
let expanded: Vec<Spanned<Statement>> = decl let expanded: Vec<Spanned<Statement>> = decl
.body .body
.iter() .iter()
.map(|stmt| Spanned::new(self.substitute_statement(&stmt.node, &substitutions), stmt.span.clone())) .map(|stmt| {
Spanned::new(
self.substitute_statement(&stmt.node, &substitutions),
stmt.span.clone(),
)
})
.collect(); .collect();
Some(expanded) Some(expanded)
} }
fn substitute_statement( fn substitute_statement(&self, stmt: &Statement, subs: &HashMap<String, &Expr>) -> Statement {
&self,
stmt: &Statement,
subs: &HashMap<String, &Expr>,
) -> Statement {
match stmt { match stmt {
Statement::VarDecl(decl) => Statement::VarDecl(VarDecl { Statement::VarDecl(decl) => Statement::VarDecl(VarDecl {
name: decl.name.clone(), name: decl.name.clone(),
@ -63,7 +64,7 @@ impl MacroExpander {
copy_patterns: dotfile.copy_patterns.clone(), copy_patterns: dotfile.copy_patterns.clone(),
}), }),
Statement::Package(pkg) => Statement::Package(Package { Statement::Package(pkg) => Statement::Package(Box::new(Package {
default: pkg.default.as_ref().map(|e| self.substitute_expr(e, subs)), default: pkg.default.as_ref().map(|e| self.substitute_expr(e, subs)),
brew: pkg.brew.as_ref().map(|s| PackageSpec { brew: pkg.brew.as_ref().map(|s| PackageSpec {
name: self.substitute_expr(&s.name, subs), name: self.substitute_expr(&s.name, subs),
@ -86,7 +87,7 @@ impl MacroExpander {
tap: s.tap.clone(), tap: s.tap.clone(),
}), }),
when: pkg.when.as_ref().map(|e| self.substitute_expr(e, subs)), when: pkg.when.as_ref().map(|e| self.substitute_expr(e, subs)),
}), })),
Statement::ForLoop(for_loop) => Statement::ForLoop(ForLoop { Statement::ForLoop(for_loop) => Statement::ForLoop(ForLoop {
var: for_loop.var.clone(), var: for_loop.var.clone(),
@ -94,9 +95,7 @@ impl MacroExpander {
body: for_loop body: for_loop
.body .body
.iter() .iter()
.map(|s| { .map(|s| Spanned::new(self.substitute_statement(&s.node, subs), s.span.clone()))
Spanned::new(self.substitute_statement(&s.node, subs), s.span.clone())
})
.collect(), .collect(),
}), }),
@ -105,9 +104,7 @@ impl MacroExpander {
then_body: if_stmt then_body: if_stmt
.then_body .then_body
.iter() .iter()
.map(|s| { .map(|s| Spanned::new(self.substitute_statement(&s.node, subs), s.span.clone()))
Spanned::new(self.substitute_statement(&s.node, subs), s.span.clone())
})
.collect(), .collect(),
else_body: if_stmt.else_body.as_ref().map(|body| { else_body: if_stmt.else_body.as_ref().map(|body| {
body.iter() body.iter()
@ -164,9 +161,12 @@ impl MacroExpander {
Box::new(self.substitute_expr(idx, subs)), Box::new(self.substitute_expr(idx, subs)),
), ),
Expr::List(items) => { Expr::List(items) => Expr::List(
Expr::List(items.iter().map(|i| self.substitute_expr(i, subs)).collect()) items
} .iter()
.map(|i| self.substitute_expr(i, subs))
.collect(),
),
Expr::StructInit(name, fields) => Expr::StructInit( Expr::StructInit(name, fields) => Expr::StructInit(
name.clone(), name.clone(),
@ -184,23 +184,18 @@ impl MacroExpander {
.map(|e| Box::new(self.substitute_expr(e, subs))), .map(|e| Box::new(self.substitute_expr(e, subs))),
), ),
Expr::Lambda(params, body) => Expr::Lambda( Expr::Lambda(params, body) => {
params.clone(), Expr::Lambda(params.clone(), Box::new(self.substitute_expr(body, subs)))
Box::new(self.substitute_expr(body, subs)),
),
Expr::Await(inner) => {
Expr::Await(Box::new(self.substitute_expr(inner, subs)))
} }
Expr::Await(inner) => Expr::Await(Box::new(self.substitute_expr(inner, subs))),
Expr::Path(left, right) => Expr::Path( Expr::Path(left, right) => Expr::Path(
Box::new(self.substitute_expr(left, subs)), Box::new(self.substitute_expr(left, subs)),
Box::new(self.substitute_expr(right, subs)), Box::new(self.substitute_expr(right, subs)),
), ),
Expr::HomePath(path) => { Expr::HomePath(path) => Expr::HomePath(Box::new(self.substitute_expr(path, subs))),
Expr::HomePath(Box::new(self.substitute_expr(path, subs)))
}
Expr::Interpolated(parts) => Expr::Interpolated( Expr::Interpolated(parts) => Expr::Interpolated(
parts parts

View file

@ -2,8 +2,8 @@
use crate::ast::*; use crate::ast::*;
use crate::lexer::Token; use crate::lexer::Token;
use chumsky::prelude::*;
use chumsky::Parser as _; use chumsky::Parser as _;
use chumsky::prelude::*;
use std::collections::HashMap; use std::collections::HashMap;
/// Parses tokens into an AST. /// Parses tokens into an AST.
@ -33,10 +33,7 @@ impl Parser {
fn statement_parser() -> impl chumsky::Parser<Token, Spanned<Statement>, Error = Simple<Token>> fn statement_parser() -> impl chumsky::Parser<Token, Spanned<Statement>, Error = Simple<Token>>
{ {
recursive(|stmt| { recursive(|stmt| {
let whitespace = choice(( let whitespace = just(Token::Newline).repeated();
just(Token::Newline),
just(Token::Dedent),
)).repeated();
let var_decl = Self::var_decl_parser().map(Statement::VarDecl); let var_decl = Self::var_decl_parser().map(Statement::VarDecl);
let fn_decl = Self::fn_decl_parser(stmt.clone()).map(Statement::FnDecl); let fn_decl = Self::fn_decl_parser(stmt.clone()).map(Statement::FnDecl);
@ -45,7 +42,7 @@ impl Parser {
let type_alias = Self::type_alias_parser().map(Statement::TypeAlias); let type_alias = Self::type_alias_parser().map(Statement::TypeAlias);
let import = Self::import_parser().map(Statement::Import); let import = Self::import_parser().map(Statement::Import);
let dotfile = Self::dotfile_parser().map(Statement::Dotfile); let dotfile = Self::dotfile_parser().map(Statement::Dotfile);
let package = Self::package_parser().map(Statement::Package); let package = Self::package_parser().map(|p| Statement::Package(Box::new(p)));
let secret = Self::secret_parser().map(Statement::Secret); let secret = Self::secret_parser().map(Statement::Secret);
let hook = Self::hook_parser().map(Statement::Hook); let hook = Self::hook_parser().map(Statement::Hook);
let simple_hook = Self::simple_hook_parser().map(Statement::Hook); let simple_hook = Self::simple_hook_parser().map(Statement::Hook);
@ -127,11 +124,7 @@ impl Parser {
let param = Self::ident_parser() let param = Self::ident_parser()
.then_ignore(just(Token::Colon)) .then_ignore(just(Token::Colon))
.then(Self::type_annotation_parser()) .then(Self::type_annotation_parser())
.then( .then(just(Token::Eq).ignore_then(Self::expr_parser()).or_not())
just(Token::Eq)
.ignore_then(Self::expr_parser())
.or_not(),
)
.map(|((name, ty), default)| FnParam { name, ty, default }); .map(|((name, ty), default)| FnParam { name, ty, default });
param param
@ -146,11 +139,7 @@ impl Parser {
let field = Self::ident_parser() let field = Self::ident_parser()
.then_ignore(just(Token::Colon)) .then_ignore(just(Token::Colon))
.then(Self::type_annotation_parser()) .then(Self::type_annotation_parser())
.then( .then(just(Token::Eq).ignore_then(Self::expr_parser()).or_not())
just(Token::Eq)
.ignore_then(Self::expr_parser())
.or_not(),
)
.map(|((name, ty), default)| StructField { name, ty, default }); .map(|((name, ty), default)| StructField { name, ty, default });
let method = Self::fn_decl_parser(stmt); let method = Self::fn_decl_parser(stmt);
@ -161,12 +150,9 @@ impl Parser {
.then_ignore(just(Token::Newline).repeated()) .then_ignore(just(Token::Newline).repeated())
.then_ignore(just(Token::Indent(0)).rewind().or_not()) .then_ignore(just(Token::Indent(0)).rewind().or_not())
.then( .then(
choice(( choice((field.map(Either::Left), method.map(Either::Right)))
field.map(Either::Left), .padded_by(just(Token::Newline).repeated())
method.map(Either::Right), .repeated(),
))
.padded_by(just(Token::Newline).repeated())
.repeated(),
) )
.then_ignore(just(Token::Dedent).or_not()) .then_ignore(just(Token::Dedent).or_not())
.map(|(name, members)| { .map(|(name, members)| {
@ -178,7 +164,11 @@ impl Parser {
Either::Right(m) => methods.push(m), Either::Right(m) => methods.push(m),
} }
} }
StructDecl { name, fields, methods } StructDecl {
name,
fields,
methods,
}
}) })
} }
@ -339,10 +329,34 @@ impl Parser {
for (name, value) in fields { for (name, value) in fields {
match name.as_str() { match name.as_str() {
"default" => pkg.default = Some(value), "default" => pkg.default = Some(value),
"brew" => pkg.brew = Some(PackageSpec { name: value, cask: None, tap: None }), "brew" => {
"apt" => pkg.apt = Some(PackageSpec { name: value, cask: None, tap: None }), pkg.brew = Some(PackageSpec {
"pacman" => pkg.pacman = Some(PackageSpec { name: value, cask: None, tap: None }), name: value,
"yay" => pkg.yay = Some(PackageSpec { name: value, cask: None, tap: None }), cask: None,
tap: None,
})
}
"apt" => {
pkg.apt = Some(PackageSpec {
name: value,
cask: None,
tap: None,
})
}
"pacman" => {
pkg.pacman = Some(PackageSpec {
name: value,
cask: None,
tap: None,
})
}
"yay" => {
pkg.yay = Some(PackageSpec {
name: value,
cask: None,
tap: None,
})
}
"when" => pkg.when = Some(value), "when" => pkg.when = Some(value),
_ => {} _ => {}
} }
@ -563,8 +577,8 @@ impl Parser {
.then_ignore(just(Token::Dedent).or_not()) .then_ignore(just(Token::Dedent).or_not())
} }
fn type_annotation_parser( fn type_annotation_parser() -> impl chumsky::Parser<Token, TypeAnnotation, Error = Simple<Token>>
) -> impl chumsky::Parser<Token, TypeAnnotation, Error = Simple<Token>> { {
recursive(|ty| { recursive(|ty| {
let simple = Self::ident_parser().map(TypeAnnotation::Simple); let simple = Self::ident_parser().map(TypeAnnotation::Simple);
@ -590,11 +604,7 @@ impl Parser {
optional optional
.clone() .clone()
.then( .then(just(Token::Pipe).ignore_then(optional.clone()).repeated())
just(Token::Pipe)
.ignore_then(optional.clone())
.repeated(),
)
.map(|(first, rest)| { .map(|(first, rest)| {
if rest.is_empty() { if rest.is_empty() {
first first
@ -707,40 +717,44 @@ impl Parser {
paren, paren,
)); ));
let call_or_access = atom.then( let call_or_access = atom
choice(( .then(
expr.clone() choice((
.separated_by(just(Token::Comma)) expr.clone()
.allow_trailing() .separated_by(just(Token::Comma))
.delimited_by(just(Token::LParen), just(Token::RParen)) .allow_trailing()
.map(CallOrAccess::Call), .delimited_by(just(Token::LParen), just(Token::RParen))
just(Token::Dot) .map(CallOrAccess::Call),
.ignore_then(Self::ident_parser()) just(Token::Dot)
.then( .ignore_then(Self::ident_parser())
expr.clone() .then(
.separated_by(just(Token::Comma)) expr.clone()
.allow_trailing() .separated_by(just(Token::Comma))
.delimited_by(just(Token::LParen), just(Token::RParen)) .allow_trailing()
.or_not(), .delimited_by(just(Token::LParen), just(Token::RParen))
) .or_not(),
.map(|(name, args)| { )
if let Some(args) = args { .map(|(name, args)| {
CallOrAccess::MethodCall(name, args) if let Some(args) = args {
} else { CallOrAccess::MethodCall(name, args)
CallOrAccess::Field(name) } else {
} CallOrAccess::Field(name)
}), }
expr.clone() }),
.delimited_by(just(Token::LBracket), just(Token::RBracket)) expr.clone()
.map(CallOrAccess::Index), .delimited_by(just(Token::LBracket), just(Token::RBracket))
)) .map(CallOrAccess::Index),
.repeated(), ))
).foldl(|e, access| match access { .repeated(),
CallOrAccess::Call(args) => Expr::Call(Box::new(e), args), )
CallOrAccess::MethodCall(name, args) => Expr::MethodCall(Box::new(e), name, args), .foldl(|e, access| match access {
CallOrAccess::Field(name) => Expr::Field(Box::new(e), name), CallOrAccess::Call(args) => Expr::Call(Box::new(e), args),
CallOrAccess::Index(idx) => Expr::Index(Box::new(e), Box::new(idx)), CallOrAccess::MethodCall(name, args) => {
}); Expr::MethodCall(Box::new(e), name, args)
}
CallOrAccess::Field(name) => Expr::Field(Box::new(e), name),
CallOrAccess::Index(idx) => Expr::Index(Box::new(e), Box::new(idx)),
});
let unary_ops = choice(( let unary_ops = choice((
just(Token::Minus).to(UnaryOp::Neg), just(Token::Minus).to(UnaryOp::Neg),
@ -752,7 +766,9 @@ impl Parser {
let unary = unary_ops let unary = unary_ops
.then(call_or_access) .then(call_or_access)
.map(|(ops, expr)| { .map(|(ops, expr)| {
ops.into_iter().rev().fold(expr, |e, op| Expr::Unary(op, Box::new(e))) ops.into_iter()
.rev()
.fold(expr, |e, op| Expr::Unary(op, Box::new(e)))
}) })
.boxed(); .boxed();
@ -819,7 +835,11 @@ impl Parser {
or_expr or_expr
.clone() .clone()
.then(just(Token::QuestionQuestion).ignore_then(or_expr.clone()).repeated()) .then(
just(Token::QuestionQuestion)
.ignore_then(or_expr.clone())
.repeated(),
)
.foldl(|a, b| Expr::Binary(Box::new(a), BinOp::NullCoalesce, Box::new(b))) .foldl(|a, b| Expr::Binary(Box::new(a), BinOp::NullCoalesce, Box::new(b)))
}) })
} }
@ -867,11 +887,10 @@ impl Parser {
parts.push(InterpolatedPart::Literal(current)); parts.push(InterpolatedPart::Literal(current));
} }
if parts.len() == 1 { if parts.len() == 1
if let InterpolatedPart::Literal(s) = &parts[0] { && let InterpolatedPart::Literal(s) = &parts[0] {
return Expr::Literal(Literal::Str(s.clone())); return Expr::Literal(Literal::Str(s.clone()));
} }
}
Expr::Interpolated(parts) Expr::Interpolated(parts)
} }
@ -933,9 +952,7 @@ fn expr_to_permission_rules(expr: &Expr) -> Vec<PermissionRule> {
} }
} }
// Single mode in array (less common but supported) // Single mode in array (less common but supported)
Expr::Literal(Literal::Int(mode)) => { Expr::Literal(Literal::Int(mode)) => Some(PermissionRule::Single(*mode as u32)),
Some(PermissionRule::Single(*mode as u32))
}
_ => None, _ => None,
} }
}) })

View file

@ -32,11 +32,7 @@ pub trait TaskHandler: Send + Sync {
fn handle_package(&self, name: &str, manager: &str) -> TaskResult; fn handle_package(&self, name: &str, manager: &str) -> TaskResult;
/// Handles secret decryption. /// Handles secret decryption.
fn handle_secret( fn handle_secret(&self, source: &std::path::Path, target: &std::path::Path) -> TaskResult;
&self,
source: &std::path::Path,
target: &std::path::Path,
) -> TaskResult;
/// Handles hook execution. /// Handles hook execution.
fn handle_hook(&self, command: &str) -> TaskResult; fn handle_hook(&self, command: &str) -> TaskResult;
@ -90,13 +86,13 @@ impl<H: TaskHandler + 'static> Executor<H> {
/// Executes tasks in parallel batches. /// Executes tasks in parallel batches.
pub fn execute_parallel(&self) -> Result<ExecutionReport, ExecutionError> { pub fn execute_parallel(&self) -> Result<ExecutionReport, ExecutionError> {
let batches = self let batches =
.graph self.graph
.get_parallel_batches() .get_parallel_batches()
.map_err(|e| ExecutionError::TaskFailed { .map_err(|e| ExecutionError::TaskFailed {
task_id: "scheduler".to_string(), task_id: "scheduler".to_string(),
message: e, message: e,
})?; })?;
let report = Arc::new(Mutex::new(ExecutionReport::new())); let report = Arc::new(Mutex::new(ExecutionReport::new()));
let errors = Arc::new(Mutex::new(Vec::new())); let errors = Arc::new(Mutex::new(Vec::new()));
@ -141,13 +137,9 @@ impl<H: TaskHandler + 'static> Executor<H> {
template, template,
} => self.handler.handle_dotfile(source, target, *template), } => self.handler.handle_dotfile(source, target, *template),
TaskData::Package { name, manager } => { TaskData::Package { name, manager } => self.handler.handle_package(name, manager),
self.handler.handle_package(name, manager)
}
TaskData::Secret { source, target } => { TaskData::Secret { source, target } => self.handler.handle_secret(source, target),
self.handler.handle_secret(source, target)
}
TaskData::Hook { command } => self.handler.handle_hook(command), TaskData::Hook { command } => self.handler.handle_hook(command),

View file

@ -7,5 +7,5 @@ pub mod scheduler;
pub use dag::DependencyGraph; pub use dag::DependencyGraph;
pub use executor::Executor; pub use executor::Executor;
pub use scheduler::{ pub use scheduler::{
validate_dotfile_targets, DotfileConflict, DotfileValidation, DotfileWarning, Scheduler, DotfileConflict, DotfileValidation, DotfileWarning, Scheduler, validate_dotfile_targets,
}; };

View file

@ -99,10 +99,7 @@ impl Default for Scheduler {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum DotfileConflict { pub enum DotfileConflict {
/// Same source and target (duplicate entry). /// Same source and target (duplicate entry).
Duplicate { Duplicate { index_a: usize, index_b: usize },
index_a: usize,
index_b: usize,
},
/// Overlapping directories with no distinguishing settings (likely redundant). /// Overlapping directories with no distinguishing settings (likely redundant).
RedundantOverlap { RedundantOverlap {
parent_index: usize, parent_index: usize,
@ -323,7 +320,8 @@ mod tests {
std::fs::create_dir_all(temp.path().join("config/nvim")).unwrap(); std::fs::create_dir_all(temp.path().join("config/nvim")).unwrap();
std::fs::write(temp.path().join("config/nvim/init.lua"), "").unwrap(); std::fs::write(temp.path().join("config/nvim/init.lua"), "").unwrap();
let mut file_dotfile = make_dotfile("config/nvim/init.lua", "/home/user/.config/nvim/init.lua"); let mut file_dotfile =
make_dotfile("config/nvim/init.lua", "/home/user/.config/nvim/init.lua");
file_dotfile.template = true; file_dotfile.template = true;
let dotfiles = vec![ let dotfiles = vec![
@ -377,6 +375,10 @@ mod tests {
assert!(result.errors.is_empty()); assert!(result.errors.is_empty());
assert_eq!(result.warnings.len(), 1); assert_eq!(result.warnings.len(), 1);
assert!(result.warnings[0].message.contains("overlapping directories")); assert!(
result.warnings[0]
.message
.contains("overlapping directories")
);
} }
} }

View file

@ -50,18 +50,26 @@ impl TypeError {
TypeError::UndefinedType(name, span) => { TypeError::UndefinedType(name, span) => {
(format!("undefined type: {}", name), span.clone()) (format!("undefined type: {}", name), span.clone())
} }
TypeError::TypeMismatch { expected, got, span } => { TypeError::TypeMismatch {
(format!("expected {}, got {}", expected, got), span.clone()) expected,
} got,
TypeError::NotCallable(ty, span) => { span,
(format!("cannot call non-function type: {}", ty), span.clone()) } => (format!("expected {}, got {}", expected, got), span.clone()),
} TypeError::NotCallable(ty, span) => (
format!("cannot call non-function type: {}", ty),
span.clone(),
),
TypeError::FieldNotFound { ty, field, span } => { TypeError::FieldNotFound { ty, field, span } => {
(format!("field {} not found on {}", field, ty), span.clone()) (format!("field {} not found on {}", field, ty), span.clone())
} }
TypeError::WrongArity { expected, got, span } => { TypeError::WrongArity {
(format!("expected {} arguments, got {}", expected, got), span.clone()) expected,
} got,
span,
} => (
format!("expected {} arguments, got {}", expected, got),
span.clone(),
),
}; };
Report::build(ReportKind::Error, filename, span.start) Report::build(ReportKind::Error, filename, span.start)
@ -318,7 +326,8 @@ impl TypeChecker {
Box::new(ft.return_type.clone()), Box::new(ft.return_type.clone()),
) )
} else { } else {
self.errors.push(TypeError::UndefinedVariable(name.clone(), span.clone())); self.errors
.push(TypeError::UndefinedVariable(name.clone(), span.clone()));
Type::Unknown Type::Unknown
} }
} }
@ -336,7 +345,8 @@ impl TypeChecker {
Type::Int Type::Int
} }
} else if matches!(op, BinOp::Add) } else if matches!(op, BinOp::Add)
&& (left_ty.is_compatible(&Type::Str) || right_ty.is_compatible(&Type::Str)) && (left_ty.is_compatible(&Type::Str)
|| right_ty.is_compatible(&Type::Str))
{ {
Type::Str Type::Str
} else { } else {
@ -349,9 +359,12 @@ impl TypeChecker {
} }
} }
BinOp::Eq | BinOp::NotEq | BinOp::Lt | BinOp::Gt | BinOp::LtEq | BinOp::GtEq => { BinOp::Eq
Type::Bool | BinOp::NotEq
} | BinOp::Lt
| BinOp::Gt
| BinOp::LtEq
| BinOp::GtEq => Type::Bool,
BinOp::And | BinOp::Or => { BinOp::And | BinOp::Or => {
if !left_ty.is_compatible(&Type::Bool) { if !left_ty.is_compatible(&Type::Bool) {
@ -446,7 +459,8 @@ impl TypeChecker {
} }
Type::Unknown | Type::Any => Type::Any, Type::Unknown | Type::Any => Type::Any,
_ => { _ => {
self.errors.push(TypeError::NotCallable(callee_ty.display(), span.clone())); self.errors
.push(TypeError::NotCallable(callee_ty.display(), span.clone()));
Type::Unknown Type::Unknown
} }
} }
@ -545,7 +559,8 @@ impl TypeChecker {
if let Some(et) = self.env.enums.get(enum_name) { if let Some(et) = self.env.enums.get(enum_name) {
Type::Enum(et.clone()) Type::Enum(et.clone())
} else { } else {
self.errors.push(TypeError::UndefinedType(enum_name.clone(), span.clone())); self.errors
.push(TypeError::UndefinedType(enum_name.clone(), span.clone()));
Type::Unknown Type::Unknown
} }
} }
@ -572,7 +587,8 @@ impl TypeChecker {
} }
Type::Struct(st) Type::Struct(st)
} else { } else {
self.errors.push(TypeError::UndefinedType(struct_name.clone(), span.clone())); self.errors
.push(TypeError::UndefinedType(struct_name.clone(), span.clone()));
Type::Unknown Type::Unknown
} }
} }
@ -615,8 +631,8 @@ impl TypeChecker {
} }
Expr::Await(expr) => { Expr::Await(expr) => {
let ty = self.infer_expr(expr, span);
ty self.infer_expr(expr, span)
} }
Expr::Path(left, right) => { Expr::Path(left, right) => {
@ -646,7 +662,7 @@ impl TypeChecker {
) -> Type { ) -> Type {
match name { match name {
"map" | "filter" => { "map" | "filter" => {
if args.len() >= 1 { if !args.is_empty() {
let list_ty = self.infer_expr(&args[0], span); let list_ty = self.infer_expr(&args[0], span);
if let Type::List(inner) = list_ty { if let Type::List(inner) = list_ty {
if name == "filter" { if name == "filter" {
@ -660,7 +676,7 @@ impl TypeChecker {
"fold" => Type::Any, "fold" => Type::Any,
"len" => Type::Int, "len" => Type::Int,
"first" | "last" => { "first" | "last" => {
if args.len() >= 1 { if !args.is_empty() {
let list_ty = self.infer_expr(&args[0], span); let list_ty = self.infer_expr(&args[0], span);
if let Type::List(inner) = list_ty { if let Type::List(inner) = list_ty {
return Type::Optional(inner); return Type::Optional(inner);
@ -675,8 +691,11 @@ impl TypeChecker {
"read_file" | "read_file_lines" => Type::Str, "read_file" | "read_file_lines" => Type::Str,
"file_exists" | "dir_exists" | "is_symlink" => Type::Bool, "file_exists" | "dir_exists" | "is_symlink" => Type::Bool,
"list_dir" | "glob" | "walk_dir" => Type::List(Box::new(Type::Path)), "list_dir" | "glob" | "walk_dir" => Type::List(Box::new(Type::Path)),
"home" | "config_dir" | "config_path" | "data_dir" | "cache_dir" | "temp_dir" | "temp_file" => Type::Path, "home" | "config_dir" | "config_path" | "data_dir" | "cache_dir" | "temp_dir"
"path_join" | "path_parent" | "path_filename" | "path_extension" | "read_link" => Type::Path, | "temp_file" => Type::Path,
"path_join" | "path_parent" | "path_filename" | "path_extension" | "read_link" => {
Type::Path
}
"fetch" | "fetch_json" | "fetch_bytes" | "post" | "post_json" => Type::Any, "fetch" | "fetch_json" | "fetch_bytes" | "post" | "post_json" => Type::Any,
"download" => Type::Bool, "download" => Type::Bool,
"exec" | "shell" => Type::Str, "exec" | "shell" => Type::Str,
@ -688,7 +707,7 @@ impl TypeChecker {
"encrypt_age" | "decrypt_age" => Type::Str, "encrypt_age" | "decrypt_age" => Type::Str,
"env" => Type::Optional(Box::new(Type::Str)), "env" => Type::Optional(Box::new(Type::Str)),
"unwrap" => { "unwrap" => {
if args.len() >= 1 { if !args.is_empty() {
let opt_ty = self.infer_expr(&args[0], span); let opt_ty = self.infer_expr(&args[0], span);
if let Type::Optional(inner) = opt_ty { if let Type::Optional(inner) = opt_ty {
return *inner; return *inner;
@ -706,14 +725,14 @@ impl TypeChecker {
"is_some" | "is_none" => Type::Bool, "is_some" | "is_none" => Type::Bool,
"all" | "race" => Type::Any, "all" | "race" => Type::Any,
"seq" | "batch" => { "seq" | "batch" => {
if args.len() >= 1 { if !args.is_empty() {
self.infer_expr(&args[0], span) self.infer_expr(&args[0], span)
} else { } else {
Type::Any Type::Any
} }
} }
"flatten" | "concat" | "unique" | "sort" | "reverse" => { "flatten" | "concat" | "unique" | "sort" | "reverse" => {
if args.len() >= 1 { if !args.is_empty() {
self.infer_expr(&args[0], span) self.infer_expr(&args[0], span)
} else { } else {
Type::List(Box::new(Type::Any)) Type::List(Box::new(Type::Any))
@ -721,7 +740,7 @@ impl TypeChecker {
} }
"zip" | "enumerate" => Type::List(Box::new(Type::Any)), "zip" | "enumerate" => Type::List(Box::new(Type::Any)),
"sort_by" => { "sort_by" => {
if args.len() >= 1 { if !args.is_empty() {
self.infer_expr(&args[0], span) self.infer_expr(&args[0], span)
} else { } else {
Type::List(Box::new(Type::Any)) Type::List(Box::new(Type::Any))

View file

@ -47,7 +47,10 @@ impl Type {
(a, Type::Optional(b)) => a.is_compatible(b), (a, Type::Optional(b)) => a.is_compatible(b),
(Type::Function(a_params, a_ret), Type::Function(b_params, b_ret)) => { (Type::Function(a_params, a_ret), Type::Function(b_params, b_ret)) => {
a_params.len() == b_params.len() a_params.len() == b_params.len()
&& a_params.iter().zip(b_params.iter()).all(|(a, b)| a.is_compatible(b)) && a_params
.iter()
.zip(b_params.iter())
.all(|(a, b)| a.is_compatible(b))
&& a_ret.is_compatible(b_ret) && a_ret.is_compatible(b_ret)
} }
(Type::Struct(a), Type::Struct(b)) => a.name == b.name, (Type::Struct(a), Type::Struct(b)) => a.name == b.name,
@ -71,12 +74,20 @@ impl Type {
Type::List(inner) => format!("[{}]", inner.display()), Type::List(inner) => format!("[{}]", inner.display()),
Type::Optional(inner) => format!("{}?", inner.display()), Type::Optional(inner) => format!("{}?", inner.display()),
Type::Function(params, ret) => { Type::Function(params, ret) => {
let params_str = params.iter().map(|p| p.display()).collect::<Vec<_>>().join(", "); let params_str = params
.iter()
.map(|p| p.display())
.collect::<Vec<_>>()
.join(", ");
format!("fn({}) -> {}", params_str, ret.display()) format!("fn({}) -> {}", params_str, ret.display())
} }
Type::Struct(s) => s.name.clone(), Type::Struct(s) => s.name.clone(),
Type::Enum(e) => e.name.clone(), Type::Enum(e) => e.name.clone(),
Type::Union(types) => types.iter().map(|t| t.display()).collect::<Vec<_>>().join(" | "), Type::Union(types) => types
.iter()
.map(|t| t.display())
.collect::<Vec<_>>()
.join(" | "),
Type::Any => "any".to_string(), Type::Any => "any".to_string(),
Type::Unknown => "unknown".to_string(), Type::Unknown => "unknown".to_string(),
} }