fix(parser): bug in indentation

This commit is contained in:
Ray Sinurat 2026-02-05 22:35:09 -06:00
parent c53b4db9cf
commit ca86eaae6e
45 changed files with 1555 additions and 603 deletions

35
Cargo.lock generated
View file

@ -1046,6 +1046,7 @@ dependencies = [
"glob",
"hostname",
"indicatif",
"minijinja",
"os_info",
"regex-lite",
"serde",
@ -1055,6 +1056,7 @@ dependencies = [
"thiserror 2.0.18",
"toml 0.8.23",
"walkdir",
"which",
]
[[package]]
@ -1106,6 +1108,12 @@ dependencies = [
"cfg-if",
]
[[package]]
name = "env_home"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7f84e12ccf0a7ddc17a6c41c93326024c42920d7ee630d04950e6926645c0fe"
[[package]]
name = "equivalent"
version = "1.0.2"
@ -2030,6 +2038,15 @@ dependencies = [
"unicase",
]
[[package]]
name = "minijinja"
version = "2.15.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b479616bb6f0779fb0f3964246beda02d4b01144e1b0d5519616e012ccc2a245"
dependencies = [
"serde",
]
[[package]]
name = "minimal-lexical"
version = "0.2.1"
@ -3716,6 +3733,18 @@ dependencies = [
"wasm-bindgen",
]
[[package]]
name = "which"
version = "7.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24d643ce3fd3e5b54854602a080f34fb10ab75e0b813ee32d00ca2b44fa74762"
dependencies = [
"either",
"env_home",
"rustix 1.1.3",
"winsafe",
]
[[package]]
name = "winapi"
version = "0.3.9"
@ -3918,6 +3947,12 @@ dependencies = [
"memchr",
]
[[package]]
name = "winsafe"
version = "0.0.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d135d17ab770252ad95e9a872d365cf3090e3be864a34ab46f48555993efc904"
[[package]]
name = "wit-bindgen"
version = "0.51.0"

View file

@ -3,7 +3,7 @@ use doot_core::state::{StateStore, SyncStatus};
use doot_core::{Config, Deployer};
use doot_lang::ast::HookStage;
use doot_lang::evaluator::{DotfileConfig, HookConfig};
use doot_lang::{validate_dotfile_targets, DotfileConflict, Evaluator};
use doot_lang::{DotfileConflict, Evaluator, validate_dotfile_targets};
use indicatif::{ProgressBar, ProgressStyle};
use std::io::{self, Write};
use std::path::PathBuf;
@ -112,7 +112,11 @@ pub fn run(
for dotfile in &ordered_dotfiles {
let full_source = source_dir.join(&dotfile.source);
let status = state.check_sync_status(&full_source, &dotfile.target);
let status = state.check_sync_status_with_template(
&full_source,
&dotfile.target,
Some(dotfile.template),
);
// For directories, check individual files for smarter merging
if full_source.is_dir() {
@ -124,7 +128,9 @@ pub fn run(
for (src, tgt, file_status) in changed_files {
match file_status {
SyncStatus::Synced => {}
SyncStatus::NotDeployed | SyncStatus::TargetMissing | SyncStatus::SourceChanged => {
SyncStatus::NotDeployed
| SyncStatus::TargetMissing
| SyncStatus::SourceChanged => {
// Can auto-merge: just copy from source
has_changes = true;
if verbose {
@ -185,10 +191,7 @@ pub fn run(
conflicts.push((dotfile, status));
}
SyncStatus::SourceMissing => {
eprintln!(
" [error] source missing: {}",
dotfile.source.display()
);
eprintln!(" [error] source missing: {}", dotfile.source.display());
}
}
}
@ -255,7 +258,9 @@ pub fn run(
dotfile.source.display(),
dotfile.target.display()
);
println!(" [s] Use source [t] Keep target [d] Show diff [m] Merge in editor");
println!(
" [s] Use source [t] Keep target [d] Show diff [m] Merge in editor"
);
print!(" Choice [s/t/d/m]: ");
io::stdout().flush()?;
@ -307,7 +312,11 @@ pub fn run(
} else {
println!("\n[dry-run] would deploy:");
for dotfile in &to_deploy {
println!(" {} -> {}", dotfile.source.display(), dotfile.target.display());
println!(
" {} -> {}",
dotfile.source.display(),
dotfile.target.display()
);
}
}
@ -387,11 +396,7 @@ pub fn run(
}
for skipped in &deploy_result.skipped {
println!(
" [skip] {} ({})",
skipped.target.display(),
skipped.reason
);
println!(" [skip] {} ({})", skipped.target.display(), skipped.reason);
}
for error in &deploy_result.errors {
@ -433,7 +438,10 @@ pub fn run(
}
if to_install.is_empty() {
println!("\nall {} packages already installed", already_installed.len());
println!(
"\nall {} packages already installed",
already_installed.len()
);
} else {
println!("\ninstalling {} packages...", to_install.len());
manager.install(&to_install)?;
@ -562,9 +570,7 @@ fn merge_in_editor(source: &PathBuf, target: &PathBuf) -> anyhow::Result<bool> {
println!(" Opening {} with target content...", editor);
println!(" Reference source: {}", source.display());
let status = Command::new(&editor)
.arg(&merged_path)
.status()?;
let status = Command::new(&editor).arg(&merged_path).status()?;
if !status.success() {
let _ = std::fs::remove_file(&merged_path);

View file

@ -1,4 +1,4 @@
use doot_core::{encryption::AgeEncryption, Config};
use doot_core::{Config, encryption::AgeEncryption};
use std::path::PathBuf;
pub fn run(file: PathBuf, identity: Option<PathBuf>, verbose: bool) -> anyhow::Result<()> {
@ -8,7 +8,9 @@ pub fn run(file: PathBuf, identity: Option<PathBuf>, verbose: bool) -> anyhow::R
} else if let Ok(key) = std::env::var("DOOT_AGE_IDENTITY") {
key
} else if config.identity_file.exists() {
std::fs::read_to_string(&config.identity_file)?.trim().to_string()
std::fs::read_to_string(&config.identity_file)?
.trim()
.to_string()
} else {
anyhow::bail!(
"no identity specified. use --identity, DOOT_AGE_IDENTITY env var, or {}",

View file

@ -1,12 +1,12 @@
use super::{find_config_file, parse_config, type_check};
use doot_core::{
deploy::Linker,
state::{DeployMode, StateStore},
Config,
deploy::{Linker, TemplateEngine},
state::{DeployMode, StateStore},
};
use doot_lang::Evaluator;
use std::io::{self, Write};
use std::path::PathBuf;
use std::path::{Path, PathBuf};
use std::process::Command;
pub fn run(
@ -67,7 +67,7 @@ pub fn run(
if should_apply {
if let Some(df) = dotfile {
apply_single(&source_file, &df.target, &df, &config, verbose)?;
apply_single(&source_file, &df.target, df, &config, verbose)?;
println!("applied changes to {}", df.target.display());
} else {
println!("hint: run 'doot apply' to deploy changes");
@ -109,6 +109,28 @@ fn apply_single(
let mut state = StateStore::new(&config.state_file);
// Handle templates specially
if dotfile.template {
if let Some(parent) = target.parent() {
std::fs::create_dir_all(parent)?;
}
let content = std::fs::read_to_string(source)?;
let engine = TemplateEngine::new();
let rendered = engine
.render(&content)
.map_err(|e| anyhow::anyhow!("template error: {}", e))?;
std::fs::write(target, rendered)?;
if verbose {
println!("rendered {} -> {}", source.display(), target.display());
}
state.record_deployment_with_template(source, target, DeployMode::Copy, true);
state.save()?;
return Ok(());
}
match deploy_mode {
DeployMode::Link => {
let linker = Linker::new(config.clone());
@ -147,7 +169,7 @@ fn copy_dir_recursive(src: &PathBuf, dst: &PathBuf) -> std::io::Result<()> {
let dst_path = dst.join(entry.file_name());
if ty.is_dir() {
copy_dir_recursive(&src_path.into(), &dst_path)?;
copy_dir_recursive(&src_path, &dst_path)?;
} else {
std::fs::copy(&src_path, &dst_path)?;
}
@ -156,18 +178,17 @@ fn copy_dir_recursive(src: &PathBuf, dst: &PathBuf) -> std::io::Result<()> {
}
fn expand_tilde(path: &str) -> PathBuf {
if path.starts_with("~/") {
if let Some(home) = dirs::home_dir() {
if path.starts_with("~/")
&& let Some(home) = dirs::home_dir() {
return home.join(&path[2..]);
}
}
PathBuf::from(path)
}
fn find_source_and_dotfile<'a>(
target: &PathBuf,
dotfiles: &'a [doot_lang::evaluator::DotfileConfig],
source_dir: &PathBuf,
source_dir: &Path,
state: &StateStore,
) -> anyhow::Result<(PathBuf, Option<&'a doot_lang::evaluator::DotfileConfig>)> {
// Exact match with dotfile targets

View file

@ -1,4 +1,4 @@
use doot_core::{encryption::AgeEncryption, Config};
use doot_core::{Config, encryption::AgeEncryption};
use std::path::PathBuf;
pub fn run(file: PathBuf, recipient: Option<String>, verbose: bool) -> anyhow::Result<()> {
@ -20,7 +20,11 @@ pub fn run(file: PathBuf, recipient: Option<String>, verbose: bool) -> anyhow::R
};
if verbose {
println!("encrypting {} with recipient {}", file.display(), &recipient_key[..20]);
println!(
"encrypting {} with recipient {}",
file.display(),
&recipient_key[..20]
);
}
let mut encryption = AgeEncryption::new();

View file

@ -14,14 +14,12 @@ pub fn run(config_path: Option<PathBuf>, check: bool, _verbose: bool) -> anyhow:
} else {
println!("{} is formatted correctly", path.display());
}
} else {
if formatted != source {
} else if formatted != source {
std::fs::write(&path, &formatted)?;
println!("formatted {}", path.display());
} else {
println!("{} is already formatted", path.display());
}
}
Ok(())
}

View file

@ -1,5 +1,5 @@
use doot_core::Config;
use std::path::PathBuf;
use std::path::{Path, PathBuf};
pub fn run(path: Option<PathBuf>, verbose: bool) -> anyhow::Result<()> {
let source_dir = path.unwrap_or_else(Config::default_source_dir);
@ -49,7 +49,10 @@ pub fn run(path: Option<PathBuf>, verbose: bool) -> anyhow::Result<()> {
println!(" state: {}", config.state_dir.display());
println!();
println!("next steps:");
println!(" 1. add dotfiles to {}/config/", config.config_dir.display());
println!(
" 1. add dotfiles to {}/config/",
config.config_dir.display()
);
println!(" 2. edit {}/doot.doot", config.config_dir.display());
println!(" 3. run 'doot apply -n' to preview");
println!(" 4. run 'doot apply' to deploy");
@ -57,7 +60,7 @@ pub fn run(path: Option<PathBuf>, verbose: bool) -> anyhow::Result<()> {
Ok(())
}
fn example_config_with_source(source_dir: &PathBuf) -> String {
fn example_config_with_source(source_dir: &Path) -> String {
format!(
r#"# doot.doot
# source directory: {source_dir}

View file

@ -25,10 +25,7 @@ pub fn find_config_file(base: Option<PathBuf>) -> anyhow::Result<PathBuf> {
anyhow::bail!("config file not found: {}", path.display());
}
let candidates = vec![
PathBuf::from("doot.doot"),
Config::default_config_file(),
];
let candidates = vec![PathBuf::from("doot.doot"), Config::default_config_file()];
for candidate in candidates {
if candidate.exists() {

View file

@ -27,14 +27,12 @@ pub fn install(config_path: Option<PathBuf>, verbose: bool) -> anyhow::Result<()
let package_names: Vec<String> = result
.packages
.iter()
.filter_map(|p| {
match manager.name() {
.filter_map(|p| match manager.name() {
"brew" => p.brew.clone().or_else(|| p.default.clone()),
"apt" => p.apt.clone().or_else(|| p.default.clone()),
"pacman" => p.pacman.clone().or_else(|| p.default.clone()),
"yay" => p.yay.clone().or_else(|| p.default.clone()),
_ => p.default.clone(),
}
})
.collect();

View file

@ -1,6 +1,6 @@
use doot_core::{
state::{DeployMode, Snapshot},
Config,
state::{DeployMode, Snapshot},
};
use std::path::PathBuf;

View file

@ -1,6 +1,6 @@
use doot_core::{
state::{Snapshot, StateStore},
Config,
state::{Snapshot, StateStore},
};
use std::path::PathBuf;
@ -16,7 +16,8 @@ pub fn run(_config_path: Option<PathBuf>, name: String, verbose: bool) -> anyhow
println!(" snapshot dir: {}", config.snapshot_dir.display());
}
let state_content = std::fs::read_to_string(&config.state_file).unwrap_or_else(|_| "{}".to_string());
let state_content =
std::fs::read_to_string(&config.state_file).unwrap_or_else(|_| "{}".to_string());
let state_data: doot_core::state::store::State = serde_json::from_str(&state_content)?;
Snapshot::create(&name, &state_data, &config.snapshot_dir)?;

View file

@ -2,19 +2,19 @@ use super::{find_config_file, parse_config, type_check};
use crossterm::{
event::{self, DisableMouseCapture, EnableMouseCapture, Event, KeyCode, KeyEventKind},
execute,
terminal::{disable_raw_mode, enable_raw_mode, EnterAlternateScreen, LeaveAlternateScreen},
terminal::{EnterAlternateScreen, LeaveAlternateScreen, disable_raw_mode, enable_raw_mode},
};
use doot_core::config::Config;
use doot_core::deploy::Linker;
use doot_core::state::{DeployMode, StateStore};
use doot_lang::Evaluator;
use ratatui::{
Frame, Terminal,
backend::CrosstermBackend,
layout::{Constraint, Direction, Layout},
style::{Color, Modifier, Style},
text::{Line, Span},
widgets::{Block, Borders, List, ListItem, ListState, Paragraph, Tabs, Gauge},
Frame, Terminal,
widgets::{Block, Borders, Gauge, List, ListItem, ListState, Paragraph, Tabs},
};
use std::io;
use std::path::PathBuf;
@ -234,14 +234,22 @@ impl App {
Tab::Dotfiles => {
let len = self.dotfiles.len();
if len > 0 {
let i = self.dotfile_state.selected().map(|i| (i + 1) % len).unwrap_or(0);
let i = self
.dotfile_state
.selected()
.map(|i| (i + 1) % len)
.unwrap_or(0);
self.dotfile_state.select(Some(i));
}
}
Tab::Packages => {
let len = self.packages.len();
if len > 0 {
let i = self.package_state.selected().map(|i| (i + 1) % len).unwrap_or(0);
let i = self
.package_state
.selected()
.map(|i| (i + 1) % len)
.unwrap_or(0);
self.package_state.select(Some(i));
}
}
@ -257,14 +265,22 @@ impl App {
Tab::Dotfiles => {
let len = self.dotfiles.len();
if len > 0 {
let i = self.dotfile_state.selected().map(|i| if i == 0 { len - 1 } else { i - 1 }).unwrap_or(0);
let i = self
.dotfile_state
.selected()
.map(|i| if i == 0 { len - 1 } else { i - 1 })
.unwrap_or(0);
self.dotfile_state.select(Some(i));
}
}
Tab::Packages => {
let len = self.packages.len();
if len > 0 {
let i = self.package_state.selected().map(|i| if i == 0 { len - 1 } else { i - 1 }).unwrap_or(0);
let i = self
.package_state
.selected()
.map(|i| if i == 0 { len - 1 } else { i - 1 })
.unwrap_or(0);
self.package_state.select(Some(i));
}
}
@ -278,21 +294,18 @@ impl App {
}
match self.tab {
Tab::Dotfiles => {
if let Some(i) = self.dotfile_state.selected() {
if let Some(item) = self.dotfiles.get_mut(i) {
if item.status != FileStatus::Error {
if let Some(i) = self.dotfile_state.selected()
&& let Some(item) = self.dotfiles.get_mut(i)
&& item.status != FileStatus::Error {
item.selected = !item.selected;
}
}
}
}
Tab::Packages => {
if let Some(i) = self.package_state.selected() {
if let Some(item) = self.packages.get_mut(i) {
if let Some(i) = self.package_state.selected()
&& let Some(item) = self.packages.get_mut(i) {
item.selected = !item.selected;
}
}
}
_ => {}
}
}
@ -347,20 +360,25 @@ impl App {
self.apply_logs.clear();
self.log_scroll = 0;
let selected_dotfiles: Vec<_> = self.dotfiles.iter()
let selected_dotfiles: Vec<_> = self
.dotfiles
.iter()
.enumerate()
.filter(|(_, d)| d.selected && d.status != FileStatus::Error)
.map(|(i, _)| i)
.collect();
let selected_packages: Vec<_> = self.packages.iter()
let selected_packages: Vec<_> = self
.packages
.iter()
.enumerate()
.filter(|(_, p)| p.selected && !p.installed)
.map(|(i, _)| i)
.collect();
if selected_dotfiles.is_empty() && selected_packages.is_empty() {
self.apply_logs.push(("Nothing to apply".to_string(), LogLevel::Info));
self.apply_logs
.push(("Nothing to apply".to_string(), LogLevel::Info));
self.apply_state = ApplyState::Done;
return;
}
@ -386,23 +404,26 @@ impl App {
let has_packages = self.packages.iter().any(|p| p.selected && !p.installed);
let has_owner = self.dotfiles.iter().any(|d| d.selected);
if has_packages {
if let Some(manager) = doot_core::package::detect_package_manager() {
if has_packages
&& let Some(manager) = doot_core::package::detect_package_manager() {
return manager.needs_sudo();
}
}
has_owner
}
fn apply_with_sudo(&mut self) {
let selected_dotfiles: Vec<_> = self.dotfiles.iter()
let selected_dotfiles: Vec<_> = self
.dotfiles
.iter()
.enumerate()
.filter(|(_, d)| d.selected && d.status != FileStatus::Error)
.map(|(i, _)| i)
.collect();
let selected_packages: Vec<_> = self.packages.iter()
let selected_packages: Vec<_> = self
.packages
.iter()
.enumerate()
.filter(|(_, p)| p.selected && !p.installed)
.map(|(i, _)| i)
@ -427,12 +448,20 @@ impl App {
};
self.apply_logs.push((
format!("{} {} -> {}", action_name, dotfile.source.display(), target.display()),
format!(
"{} {} -> {}",
action_name,
dotfile.source.display(),
target.display()
),
LogLevel::Info,
));
let result: Result<(), String> = match dotfile.deploy_mode {
DeployMode::Link => linker.link(&full_source, target).map(|_| ()).map_err(|e| e.to_string()),
DeployMode::Link => linker
.link(&full_source, target)
.map(|_| ())
.map_err(|e| e.to_string()),
DeployMode::Copy => copy_file(&full_source, target),
};
@ -448,10 +477,8 @@ impl App {
self.dotfiles[idx].selected = false;
}
Err(e) => {
self.apply_logs.push((
format!(" ✗ Failed: {}", e),
LogLevel::Error,
));
self.apply_logs
.push((format!(" ✗ Failed: {}", e), LogLevel::Error));
self.dotfiles[idx].status = FileStatus::Error;
}
}
@ -471,37 +498,31 @@ impl App {
let result = if manager.needs_sudo() {
if let Some(ref password) = self.sudo_password {
manager.install_with_sudo(&[package.name.clone()], password)
manager.install_with_sudo(std::slice::from_ref(&package.name), password)
} else {
manager.install(&[package.name.clone()])
manager.install(std::slice::from_ref(&package.name))
}
} else {
manager.install(&[package.name.clone()])
manager.install(std::slice::from_ref(&package.name))
};
match result {
Ok(_) => {
self.apply_logs.push((
format!(" ✓ Installed {}", package.name),
LogLevel::Success,
));
self.apply_logs
.push((format!(" ✓ Installed {}", package.name), LogLevel::Success));
self.packages[idx].installed = true;
self.packages[idx].selected = false;
}
Err(e) => {
self.apply_logs.push((
format!(" ✗ Failed: {}", e),
LogLevel::Error,
));
self.apply_logs
.push((format!(" ✗ Failed: {}", e), LogLevel::Error));
}
}
self.apply_progress += 1;
}
} else {
self.apply_logs.push((
"No package manager available".to_string(),
LogLevel::Error,
));
self.apply_logs
.push(("No package manager available".to_string(), LogLevel::Error));
}
self.apply_state = ApplyState::Done;
@ -529,8 +550,8 @@ fn run_app(
loop {
terminal.draw(|f| ui(f, &mut app))?;
if let Event::Key(key) = event::read()? {
if key.kind == KeyEventKind::Press {
if let Event::Key(key) = event::read()?
&& key.kind == KeyEventKind::Press {
match app.input_mode {
InputMode::Password => match key.code {
KeyCode::Enter => {
@ -583,7 +604,9 @@ fn run_app(
_ => {}
},
ApplyState::Done => match key.code {
KeyCode::Enter | KeyCode::Esc | KeyCode::Char('q') => app.dismiss_apply(),
KeyCode::Enter | KeyCode::Esc | KeyCode::Char('q') => {
app.dismiss_apply()
}
KeyCode::Up | KeyCode::Char('k') => app.scroll_log_up(),
KeyCode::Down | KeyCode::Char('j') => app.scroll_log_down(),
_ => {}
@ -592,7 +615,6 @@ fn run_app(
}
}
}
}
}
fn ui(f: &mut Frame, app: &mut App) {
@ -615,7 +637,11 @@ fn ui(f: &mut Frame, app: &mut App) {
Tab::Status => 3,
})
.style(Style::default().fg(Color::Cyan))
.highlight_style(Style::default().fg(Color::Yellow).add_modifier(Modifier::BOLD));
.highlight_style(
Style::default()
.fg(Color::Yellow)
.add_modifier(Modifier::BOLD),
);
f.render_widget(tabs, chunks[0]);
match app.input_mode {
@ -652,8 +678,7 @@ fn ui(f: &mut Frame, app: &mut App) {
} else {
"Applying..."
};
let help = Paragraph::new(help_text)
.block(Block::default().borders(Borders::ALL));
let help = Paragraph::new(help_text).block(Block::default().borders(Borders::ALL));
f.render_widget(help, chunks[2]);
}
},
@ -663,10 +688,7 @@ fn ui(f: &mut Frame, app: &mut App) {
fn render_apply_progress(f: &mut Frame, app: &App, area: ratatui::layout::Rect) {
let chunks = Layout::default()
.direction(Direction::Vertical)
.constraints([
Constraint::Length(3),
Constraint::Min(0),
])
.constraints([Constraint::Length(3), Constraint::Min(0)])
.split(area);
// Progress bar
@ -697,7 +719,10 @@ fn render_apply_progress(f: &mut Frame, app: &App, area: ratatui::layout::Rect)
LogLevel::Success => Color::Green,
LogLevel::Error => Color::Red,
};
ListItem::new(Line::from(Span::styled(msg.as_str(), Style::default().fg(color))))
ListItem::new(Line::from(Span::styled(
msg.as_str(),
Style::default().fg(color),
)))
})
.collect();
@ -707,8 +732,7 @@ fn render_apply_progress(f: &mut Frame, app: &App, area: ratatui::layout::Rect)
"Applying..."
};
let list = List::new(items)
.block(Block::default().borders(Borders::ALL).title(title));
let list = List::new(items).block(Block::default().borders(Borders::ALL).title(title));
f.render_widget(list, chunks[1]);
}
@ -782,35 +806,63 @@ fn render_secrets(f: &mut Frame, area: ratatui::layout::Rect) {
}
fn render_status(f: &mut Frame, app: &App, area: ratatui::layout::Rect) {
let synced = app.dotfiles.iter().filter(|d| matches!(d.status, FileStatus::Synced)).count();
let pending = app.dotfiles.iter().filter(|d| matches!(d.status, FileStatus::Pending)).count();
let modified = app.dotfiles.iter().filter(|d| matches!(d.status, FileStatus::Modified)).count();
let errors = app.dotfiles.iter().filter(|d| matches!(d.status, FileStatus::Error)).count();
let synced = app
.dotfiles
.iter()
.filter(|d| matches!(d.status, FileStatus::Synced))
.count();
let pending = app
.dotfiles
.iter()
.filter(|d| matches!(d.status, FileStatus::Pending))
.count();
let modified = app
.dotfiles
.iter()
.filter(|d| matches!(d.status, FileStatus::Modified))
.count();
let errors = app
.dotfiles
.iter()
.filter(|d| matches!(d.status, FileStatus::Error))
.count();
let installed = app.packages.iter().filter(|p| p.installed).count();
let text = format!(
"Source: {}\n\nDotfiles:\n Synced: {}\n Pending: {}\n Modified: {}\n Errors: {}\n\nPackages:\n Installed: {}/{}",
app.source_dir.display(),
synced, pending, modified, errors, installed, app.packages.len()
synced,
pending,
modified,
errors,
installed,
app.packages.len()
);
let paragraph = Paragraph::new(text)
.block(Block::default().borders(Borders::ALL).title("Status"));
let paragraph =
Paragraph::new(text).block(Block::default().borders(Borders::ALL).title("Status"));
f.render_widget(paragraph, area);
}
fn render_sudo_prompt(f: &mut Frame, area: ratatui::layout::Rect) {
let text = "Package installation requires sudo privileges.\n\nDo you want to enter your password?";
let paragraph = Paragraph::new(text)
.block(Block::default().borders(Borders::ALL).title("Sudo Required"));
let text =
"Package installation requires sudo privileges.\n\nDo you want to enter your password?";
let paragraph = Paragraph::new(text).block(
Block::default()
.borders(Borders::ALL)
.title("Sudo Required"),
);
f.render_widget(paragraph, area);
}
fn render_password_input(f: &mut Frame, app: &App, area: ratatui::layout::Rect) {
let masked: String = "*".repeat(app.password_input.len());
let text = format!("Password: {}_", masked);
let paragraph = Paragraph::new(text)
.block(Block::default().borders(Borders::ALL).title("Enter sudo password"));
let paragraph = Paragraph::new(text).block(
Block::default()
.borders(Borders::ALL)
.title("Enter sudo password"),
);
f.render_widget(paragraph, area);
}
@ -837,7 +889,7 @@ fn copy_dir_recursive(src: &PathBuf, dst: &PathBuf) -> std::io::Result<()> {
let dst_path = dst.join(entry.file_name());
if ty.is_dir() {
copy_dir_recursive(&src_path.into(), &dst_path)?;
copy_dir_recursive(&src_path, &dst_path)?;
} else {
std::fs::copy(&src_path, &dst_path)?;
}

View file

@ -118,9 +118,7 @@ fn main() -> anyhow::Result<()> {
Commands::Encrypt { file, recipient } => {
commands::encrypt::run(file, recipient, cli.verbose)
}
Commands::Decrypt { file, identity } => {
commands::decrypt::run(file, identity, cli.verbose)
}
Commands::Decrypt { file, identity } => commands::decrypt::run(file, identity, cli.verbose),
Commands::Package { action } => match action {
PackageAction::Install => commands::package::install(cli.config, cli.verbose),
PackageAction::Update => commands::package::update(cli.verbose),

View file

@ -72,18 +72,29 @@ fn test_init_creates_structure() {
assert!(output.status.success(), "init failed: {:?}", output);
assert!(sandbox.config_file().exists(), "config file not created");
assert!(sandbox.config_dir().join("config").exists(), "config dir not created");
assert!(sandbox.state_dir().join("backups").exists(), "backups dir not created");
assert!(sandbox.state_dir().join("snapshots").exists(), "snapshots dir not created");
assert!(
sandbox.config_dir().join("config").exists(),
"config dir not created"
);
assert!(
sandbox.state_dir().join("backups").exists(),
"backups dir not created"
);
assert!(
sandbox.state_dir().join("snapshots").exists(),
"snapshots dir not created"
);
}
#[test]
fn test_check_valid_config() {
let sandbox = Sandbox::new("check-valid");
sandbox.write_config(r#"
sandbox.write_config(
r#"
package: "ripgrep"
package: "fd"
"#);
"#,
);
let output = sandbox.run(&["check"]);
assert!(output.status.success(), "check failed: {:?}", output);
@ -92,11 +103,13 @@ package: "fd"
#[test]
fn test_apply_dry_run() {
let sandbox = Sandbox::new("apply-dry");
sandbox.write_config(r#"
sandbox.write_config(
r#"
dotfile:
source = "config/test.conf"
target = "~/.config/test/test.conf"
"#);
"#,
);
sandbox.write_source("config/test.conf", "test content");
let output = sandbox.run(&["apply", "-n"]);
@ -109,12 +122,14 @@ dotfile:
#[test]
fn test_apply_creates_symlink() {
let sandbox = Sandbox::new("apply-symlink");
sandbox.write_config(r#"
sandbox.write_config(
r#"
dotfile:
source = "config/app.conf"
target = "~/.config/app/app.conf"
deploy = "link"
"#);
"#,
);
sandbox.write_source("config/app.conf", "app config content");
let output = sandbox.run(&["apply"]);
@ -147,17 +162,22 @@ fn test_apply_unchanged_on_rerun() {
// Second apply should succeed (symlink already exists and points correctly)
let target = sandbox.path.join(".config/app/app.conf");
assert!(target.is_symlink(), "target should still be symlink after second apply");
assert!(
target.is_symlink(),
"target should still be symlink after second apply"
);
}
#[test]
fn test_apply_creates_copy() {
let sandbox = Sandbox::new("apply-copy");
sandbox.write_config(r#"
sandbox.write_config(
r#"
dotfile:
source = "config/app.conf"
target = "~/.config/app/app.conf"
"#);
"#,
);
sandbox.write_source("config/app.conf", "app config content");
let output = sandbox.run(&["apply"]);
@ -165,7 +185,10 @@ dotfile:
let target = sandbox.path.join(".config/app/app.conf");
assert!(target.exists(), "target should exist");
assert!(!target.is_symlink(), "target should be a copy, not a symlink");
assert!(
!target.is_symlink(),
"target should be a copy, not a symlink"
);
let content = std::fs::read_to_string(&target).unwrap();
assert_eq!(content, "app config content", "content should match source");
@ -193,11 +216,13 @@ fn test_apply_copy_unchanged_on_rerun() {
#[test]
fn test_status_shows_state() {
let sandbox = Sandbox::new("status");
sandbox.write_config(r#"
sandbox.write_config(
r#"
dotfile:
source = "config/app.conf"
target = "~/.config/app/app.conf"
"#);
"#,
);
sandbox.write_source("config/app.conf", "content");
sandbox.run(&["apply"]);
@ -208,16 +233,22 @@ dotfile:
#[test]
fn test_snapshot_and_rollback() {
let sandbox = Sandbox::new("snapshot");
sandbox.write_config(r#"
sandbox.write_config(
r#"
dotfile:
source = "config/app.conf"
target = "~/.config/app/app.conf"
"#);
"#,
);
sandbox.write_source("config/app.conf", "v1");
sandbox.run(&["apply"]);
let snap_output = sandbox.run(&["snapshot", "v1"]);
assert!(snap_output.status.success(), "snapshot failed: {:?}", snap_output);
assert!(
snap_output.status.success(),
"snapshot failed: {:?}",
snap_output
);
let snapshot_file = sandbox.state_dir().join("snapshots/v1.json");
assert!(snapshot_file.exists(), "snapshot file not created");
@ -240,7 +271,10 @@ fn test_dotfile_with_when_condition() {
assert!(output.status.success(), "apply failed: {:?}", output);
let target = sandbox.path.join(".config/test.conf");
assert!(target.exists(), "file should be deployed when condition is true");
assert!(
target.exists(),
"file should be deployed when condition is true"
);
}
#[test]
@ -259,17 +293,22 @@ fn test_dotfile_when_false_skips() {
assert!(output.status.success(), "apply failed: {:?}", output);
let target = sandbox.path.join(".config/skip.conf");
assert!(!target.exists(), "file should NOT be deployed when condition is false");
assert!(
!target.exists(),
"file should NOT be deployed when condition is false"
);
}
#[test]
fn test_diff_shows_changes() {
let sandbox = Sandbox::new("diff");
sandbox.write_config(r#"
sandbox.write_config(
r#"
dotfile:
source = "config/app.conf"
target = "~/.config/app/app.conf"
"#);
"#,
);
sandbox.write_source("config/app.conf", "new content");
let target_dir = sandbox.path.join(".config/app");

View file

@ -22,3 +22,5 @@ anyhow.workspace = true
hostname = "0.4"
regex-lite = "0.1"
glob = "0.3"
minijinja = { version = "2", features = ["builtins"] }
which = "7"

View file

@ -72,7 +72,7 @@ impl Config {
return PathBuf::from(doot_home).join(".local/state/doot");
}
dirs::state_dir()
.or_else(|| dirs::data_local_dir())
.or_else(dirs::data_local_dir)
.unwrap_or_else(|| Self::home_dir().join(".local/state"))
.join("doot")
}

View file

@ -5,8 +5,8 @@ pub mod linker;
pub mod template;
use crate::config::Config;
use crate::state::store::DeployMode;
use crate::state::StateStore;
use crate::state::store::DeployMode;
use doot_lang::evaluator::DotfileConfig;
use glob::Pattern;
use std::path::{Path, PathBuf};
@ -181,12 +181,11 @@ impl Deployer {
}
// For files or link mode, handle as before
if target.exists() && !target.is_symlink() {
if !self.config.dry_run {
if target.exists() && !target.is_symlink()
&& !self.config.dry_run {
self.backup_existing(target)?;
std::fs::remove_file(target)?;
}
}
let action = if dotfile.template {
self.deploy_template(&source, target)?
@ -198,18 +197,21 @@ impl Deployer {
};
// Set permissions if specified (only for copy mode, symlinks inherit from source)
if !dotfile.permissions.is_empty() && deploy_mode == DeployMode::Copy && !self.config.dry_run {
if !dotfile.permissions.is_empty()
&& deploy_mode == DeployMode::Copy
&& !self.config.dry_run
{
apply_permissions(target, &dotfile.permissions)?;
}
// Set owner if specified
if let Some(ref owner) = dotfile.owner {
if !self.config.dry_run {
if let Some(ref owner) = dotfile.owner
&& !self.config.dry_run {
set_owner(target, owner)?;
}
}
self.state.record_deployment(&source, target, deploy_mode);
self.state
.record_deployment_with_template(&source, target, deploy_mode, dotfile.template);
Ok(DeployedFile {
source: source.clone(),
@ -242,9 +244,7 @@ impl Deployer {
for (src_file, tgt_file, status) in changed_files {
match status {
SyncStatus::NotDeployed
| SyncStatus::TargetMissing
| SyncStatus::SourceChanged => {
SyncStatus::NotDeployed | SyncStatus::TargetMissing | SyncStatus::SourceChanged => {
// Copy from source to target
if !self.config.dry_run {
if let Some(parent) = tgt_file.parent() {
@ -264,12 +264,14 @@ impl Deployer {
any_updated = true;
}
self.state.record_deployment(&src_file, &tgt_file, deploy_mode);
self.state
.record_deployment(&src_file, &tgt_file, deploy_mode);
}
SyncStatus::TargetChanged => {
// Target changed but source didn't - keep target, just update state
// This is like keeping local changes in git
self.state.record_deployment(&src_file, &tgt_file, deploy_mode);
self.state
.record_deployment(&src_file, &tgt_file, deploy_mode);
}
SyncStatus::Conflict => {
// Real conflict - user already chose "use source" at directory level
@ -284,7 +286,8 @@ impl Deployer {
}
}
any_updated = true;
self.state.record_deployment(&src_file, &tgt_file, deploy_mode);
self.state
.record_deployment(&src_file, &tgt_file, deploy_mode);
}
SyncStatus::SourceMissing => {
// File was deleted from source, remove from target
@ -301,11 +304,10 @@ impl Deployer {
}
// Set owner if specified (for entire directory)
if let Some(ref owner) = dotfile.owner {
if !self.config.dry_run {
if let Some(ref owner) = dotfile.owner
&& !self.config.dry_run {
set_owner(target, owner)?;
}
}
// Also record the directory-level deployment for sync status checks
self.state.record_deployment(source, target, deploy_mode);
@ -340,22 +342,20 @@ impl Deployer {
match base_mode {
DeployMode::Copy => {
for pattern in &dotfile.link_patterns {
if let Ok(p) = Pattern::new(pattern) {
if p.matches(&relative_path) {
if let Ok(p) = Pattern::new(pattern)
&& p.matches(&relative_path) {
return DeployMode::Link;
}
}
}
DeployMode::Copy
}
DeployMode::Link => {
for pattern in &dotfile.copy_patterns {
if let Ok(p) = Pattern::new(pattern) {
if p.matches(&relative_path) {
if let Ok(p) = Pattern::new(pattern)
&& p.matches(&relative_path) {
return DeployMode::Copy;
}
}
}
DeployMode::Link
}
}
@ -523,8 +523,8 @@ fn apply_permissions_recursive(
break;
}
PermissionRule::Pattern { pattern, mode } => {
if let Ok(p) = Pattern::new(pattern) {
if p.matches(&relative) {
if let Ok(p) = Pattern::new(pattern)
&& p.matches(&relative) {
set_file_permissions(&path, *mode)?;
break;
}
@ -533,7 +533,6 @@ fn apply_permissions_recursive(
}
}
}
}
Ok(())
}
@ -577,7 +576,10 @@ fn set_owner(path: &Path, owner: &str) -> Result<(), DeployError> {
if !sudo_output.status.success() {
let stderr = String::from_utf8_lossy(&sudo_output.stderr);
return Err(DeployError::ChownFailed(path.to_path_buf(), stderr.to_string()));
return Err(DeployError::ChownFailed(
path.to_path_buf(),
stderr.to_string(),
));
}
Ok(())

View file

@ -1,68 +1,47 @@
//! Template rendering for dotfiles.
//! Template rendering for dotfiles using MiniJinja.
use minijinja::{Environment, Value};
use std::collections::HashMap;
use std::path::PathBuf;
/// Renders templates with variable substitution.
/// Renders templates with Jinja2-style syntax.
pub struct TemplateEngine {
variables: HashMap<String, String>,
env: Environment<'static>,
variables: HashMap<String, Value>,
}
impl TemplateEngine {
/// Creates a new engine with default variables.
/// Creates a new engine with default variables and functions.
pub fn new() -> Self {
let mut variables = HashMap::new();
let mut env = Environment::new();
if let Some(home) = dirs::home_dir() {
variables.insert("home".to_string(), home.display().to_string());
}
if let Some(config) = dirs::config_dir() {
variables.insert("config_dir".to_string(), config.display().to_string());
}
if let Some(data) = dirs::data_dir() {
variables.insert("data_dir".to_string(), data.display().to_string());
}
if let Some(cache) = dirs::cache_dir() {
variables.insert("cache_dir".to_string(), cache.display().to_string());
}
// Register custom functions
register_functions(&mut env);
variables.insert("os".to_string(), std::env::consts::OS.to_string());
variables.insert("arch".to_string(), std::env::consts::ARCH.to_string());
// Build default variables
let variables = build_default_variables();
if let Ok(hostname) = hostname::get() {
variables.insert("hostname".to_string(), hostname.to_string_lossy().to_string());
}
for (key, value) in std::env::vars() {
variables.insert(format!("env.{}", key), value);
}
Self { variables }
Self { env, variables }
}
/// Sets a template variable.
pub fn set_variable(&mut self, key: String, value: String) {
self.variables.insert(key, value);
pub fn set_variable(&mut self, key: String, value: impl Into<Value>) {
self.variables.insert(key, value.into());
}
/// Renders a template string.
pub fn render(&self, template: &str) -> Result<String, String> {
let mut result = template.to_string();
// Add template to environment
let tmpl = self
.env
.template_from_str(template)
.map_err(|e| format!("template parse error: {}", e))?;
for (key, value) in &self.variables {
result = result.replace(&format!("{{{{ {} }}}}", key), value);
result = result.replace(&format!("{{{{{}}}}}", key), value);
}
// Build context from variables
let ctx = Value::from_iter(self.variables.clone());
let re = regex_lite::Regex::new(r"\{\{[^}]+\}\}").unwrap();
if re.is_match(&result) {
let unresolved: Vec<&str> = re.find_iter(&result).map(|m| m.as_str()).collect();
return Err(format!(
"unresolved template variables: {}",
unresolved.join(", ")
));
}
Ok(result)
tmpl.render(ctx)
.map_err(|e| format!("template render error: {}", e))
}
}
@ -71,3 +50,532 @@ impl Default for TemplateEngine {
Self::new()
}
}
/// Builds the default template variables.
fn build_default_variables() -> HashMap<String, Value> {
let mut vars = HashMap::new();
// Directory paths
if let Some(home) = dirs::home_dir() {
vars.insert("home".to_string(), Value::from(home.display().to_string()));
}
if let Some(config) = dirs::config_dir() {
vars.insert(
"config_dir".to_string(),
Value::from(config.display().to_string()),
);
}
if let Some(data) = dirs::data_dir() {
vars.insert(
"data_dir".to_string(),
Value::from(data.display().to_string()),
);
}
if let Some(cache) = dirs::cache_dir() {
vars.insert(
"cache_dir".to_string(),
Value::from(cache.display().to_string()),
);
}
// System info
vars.insert("os".to_string(), Value::from(std::env::consts::OS));
vars.insert("arch".to_string(), Value::from(std::env::consts::ARCH));
if let Ok(hostname) = hostname::get() {
vars.insert(
"hostname".to_string(),
Value::from(hostname.to_string_lossy().to_string()),
);
}
// Detect Linux distro
if std::env::consts::OS == "linux"
&& let Some(distro) = detect_distro() {
vars.insert("distro".to_string(), Value::from(distro));
}
// Environment variables as a nested object
let env_vars: HashMap<String, Value> =
std::env::vars().map(|(k, v)| (k, Value::from(v))).collect();
vars.insert("env".to_string(), Value::from_iter(env_vars));
vars
}
/// Registers custom functions available in templates.
fn register_functions(env: &mut Environment<'static>) {
// ===== File System Functions =====
// file_exists(path) - check if a file or directory exists
env.add_function("file_exists", |path: String| -> bool {
std::path::Path::new(&expand_path(&path)).exists()
});
// dir_exists(path) - check if a directory exists
env.add_function("dir_exists", |path: String| -> bool {
std::path::Path::new(&expand_path(&path)).is_dir()
});
// is_symlink(path) - check if path is a symlink
env.add_function("is_symlink", |path: String| -> bool {
std::path::Path::new(&expand_path(&path)).is_symlink()
});
// read_link(path) - get symlink target
env.add_function("read_link", |path: String| -> String {
std::fs::read_link(expand_path(&path))
.map(|p| p.display().to_string())
.unwrap_or_default()
});
// read_file(path) - read file contents (returns empty string on error)
env.add_function("read_file", |path: String| -> String {
std::fs::read_to_string(expand_path(&path)).unwrap_or_default()
});
// read_file_lines(path) - read file as list of lines
env.add_function("read_file_lines", |path: String| -> Vec<String> {
std::fs::read_to_string(expand_path(&path))
.map(|s| s.lines().map(|l| l.to_string()).collect())
.unwrap_or_default()
});
// list_dir(path) - list directory contents
env.add_function("list_dir", |path: String| -> Vec<String> {
std::fs::read_dir(expand_path(&path))
.map(|entries| {
entries
.filter_map(|e| e.ok())
.map(|e| e.path().display().to_string())
.collect()
})
.unwrap_or_default()
});
// glob(pattern) - find files matching glob pattern
env.add_function("glob", |pattern: String| -> Vec<String> {
glob::glob(&pattern)
.map(|paths| {
paths
.filter_map(|p| p.ok())
.map(|p| p.display().to_string())
.collect()
})
.unwrap_or_default()
});
// temp_dir() - get temp directory path
env.add_function("temp_dir", || -> String {
std::env::temp_dir().display().to_string()
});
// ===== Path Functions =====
// path_join(a, b, ...) - join path components
env.add_function("path_join", |parts: Vec<String>| -> String {
let mut result = PathBuf::new();
for part in parts {
result.push(part);
}
result.display().to_string()
});
// path_parent(path) - get parent directory
env.add_function("path_parent", |path: String| -> String {
std::path::Path::new(&path)
.parent()
.map(|p| p.display().to_string())
.unwrap_or_default()
});
// path_filename(path) - get filename component
env.add_function("path_filename", |path: String| -> String {
std::path::Path::new(&path)
.file_name()
.map(|s| s.to_string_lossy().to_string())
.unwrap_or_default()
});
// path_extension(path) - get file extension
env.add_function("path_extension", |path: String| -> String {
std::path::Path::new(&path)
.extension()
.map(|s| s.to_string_lossy().to_string())
.unwrap_or_default()
});
// config_path(app) - get config directory for an app
env.add_function("config_path", |app: String| -> String {
dirs::config_dir()
.map(|p| p.join(&app).display().to_string())
.unwrap_or_default()
});
// ===== Command/Process Functions =====
// command_exists(cmd) - check if a command is available in PATH
env.add_function("command_exists", |cmd: String| -> bool {
which::which(&cmd).is_ok()
});
// which(cmd) - get full path to command (empty string if not found)
env.add_function("which", |cmd: String| -> String {
which::which(&cmd)
.map(|p| p.display().to_string())
.unwrap_or_default()
});
// shell(cmd) - execute a shell command and return output (trimmed)
env.add_function("shell", |cmd: String| -> String {
std::process::Command::new("sh")
.arg("-c")
.arg(&cmd)
.output()
.map(|o| String::from_utf8_lossy(&o.stdout).trim().to_string())
.unwrap_or_default()
});
// shell_status(cmd) - execute command and return exit status
env.add_function("shell_status", |cmd: String| -> i32 {
std::process::Command::new("sh")
.arg("-c")
.arg(&cmd)
.status()
.map(|s| s.code().unwrap_or(-1))
.unwrap_or(-1)
});
// ===== Environment Functions =====
// get_env(name, default?) - get environment variable with optional default
env.add_function(
"get_env",
|name: String, default: Option<String>| -> String {
std::env::var(&name).unwrap_or_else(|_| default.unwrap_or_default())
},
);
// ===== OS Detection Functions =====
// is_macos() - convenience check
env.add_function("is_macos", || -> bool { std::env::consts::OS == "macos" });
// is_linux() - convenience check
env.add_function("is_linux", || -> bool { std::env::consts::OS == "linux" });
// is_windows() - convenience check
env.add_function("is_windows", || -> bool {
std::env::consts::OS == "windows"
});
// is_arch() - check if running Arch Linux
env.add_function("is_arch", || -> bool {
detect_distro().map(|d| d == "arch").unwrap_or(false)
});
// is_ubuntu() - check if running Ubuntu
env.add_function("is_ubuntu", || -> bool {
detect_distro().map(|d| d == "ubuntu").unwrap_or(false)
});
// is_fedora() - check if running Fedora
env.add_function("is_fedora", || -> bool {
detect_distro().map(|d| d == "fedora").unwrap_or(false)
});
// is_debian() - check if running Debian
env.add_function("is_debian", || -> bool {
detect_distro().map(|d| d == "debian").unwrap_or(false)
});
// is_nixos() - check if running NixOS
env.add_function("is_nixos", || -> bool {
detect_distro().map(|d| d == "nixos").unwrap_or(false)
});
// is_nix() - check if nix package manager is available
env.add_function("is_nix", || -> bool {
// Check for nix command or /nix directory
which::which("nix").is_ok() || std::path::Path::new("/nix").exists()
});
// hostname_matches(pattern) - check if hostname matches a pattern
env.add_function("hostname_matches", |pattern: String| -> bool {
if let Ok(hostname) = hostname::get() {
let hostname = hostname.to_string_lossy();
if pattern.contains('*') {
// Simple glob matching
let regex_pattern = format!("^{}$", pattern.replace('*', ".*"));
regex_lite::Regex::new(&regex_pattern)
.map(|re| re.is_match(&hostname))
.unwrap_or(false)
} else {
hostname == pattern
}
} else {
false
}
});
// ===== Parsing Functions =====
// from_json(str) - parse JSON string into object
env.add_function("from_json", |s: String| -> Value {
serde_json::from_str::<serde_json::Value>(&s)
.map(|v| json_to_minijinja(&v))
.unwrap_or(Value::UNDEFINED)
});
// from_toml(str) - parse TOML string into object
env.add_function("from_toml", |s: String| -> Value {
toml::from_str::<toml::Value>(&s)
.map(|v| toml_to_minijinja(&v))
.unwrap_or(Value::UNDEFINED)
});
// ===== Crypto Functions =====
// hash_str(str) - hash a string using BLAKE3
env.add_function("hash_str", |s: String| -> String {
blake3::hash(s.as_bytes()).to_hex().to_string()
});
// hash_file(path) - hash a file using BLAKE3
env.add_function("hash_file", |path: String| -> String {
std::fs::read(expand_path(&path))
.map(|content| blake3::hash(&content).to_hex().to_string())
.unwrap_or_default()
});
// ===== String Functions =====
// starts_with(str, prefix) - check if string starts with prefix
env.add_function("starts_with", |s: String, prefix: String| -> bool {
s.starts_with(&prefix)
});
// ends_with(str, suffix) - check if string ends with suffix
env.add_function("ends_with", |s: String, suffix: String| -> bool {
s.ends_with(&suffix)
});
// contains(str, needle) - check if string contains needle
env.add_function("contains", |s: String, needle: String| -> bool {
s.contains(&needle)
});
}
/// Expands ~ to home directory in paths.
fn expand_path(s: &str) -> PathBuf {
if let Some(stripped) = s.strip_prefix('~') {
let home = dirs::home_dir().unwrap_or_default();
home.join(stripped.strip_prefix('/').unwrap_or(stripped))
} else {
PathBuf::from(s)
}
}
/// Detects the Linux distribution.
fn detect_distro() -> Option<String> {
if std::env::consts::OS != "linux" {
return None;
}
// Try /etc/os-release first
if let Ok(content) = std::fs::read_to_string("/etc/os-release") {
for line in content.lines() {
if let Some(id) = line.strip_prefix("ID=") {
return Some(id.trim_matches('"').to_string());
}
}
}
None
}
/// Converts serde_json::Value to minijinja::Value.
fn json_to_minijinja(json: &serde_json::Value) -> Value {
match json {
serde_json::Value::Null => Value::from(()),
serde_json::Value::Bool(b) => Value::from(*b),
serde_json::Value::Number(n) => {
if let Some(i) = n.as_i64() {
Value::from(i)
} else if let Some(f) = n.as_f64() {
Value::from(f)
} else {
Value::from(())
}
}
serde_json::Value::String(s) => Value::from(s.clone()),
serde_json::Value::Array(arr) => {
Value::from(arr.iter().map(json_to_minijinja).collect::<Vec<_>>())
}
serde_json::Value::Object(obj) => {
let map: std::collections::BTreeMap<String, Value> = obj
.iter()
.map(|(k, v)| (k.clone(), json_to_minijinja(v)))
.collect();
Value::from_iter(map)
}
}
}
/// Converts toml::Value to minijinja::Value.
fn toml_to_minijinja(toml: &toml::Value) -> Value {
match toml {
toml::Value::Boolean(b) => Value::from(*b),
toml::Value::Integer(i) => Value::from(*i),
toml::Value::Float(f) => Value::from(*f),
toml::Value::String(s) => Value::from(s.clone()),
toml::Value::Array(arr) => {
Value::from(arr.iter().map(toml_to_minijinja).collect::<Vec<_>>())
}
toml::Value::Table(table) => {
let map: std::collections::BTreeMap<String, Value> = table
.iter()
.map(|(k, v)| (k.clone(), toml_to_minijinja(v)))
.collect();
Value::from_iter(map)
}
toml::Value::Datetime(dt) => Value::from(dt.to_string()),
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_simple_variable() {
let engine = TemplateEngine::new();
let result = engine.render("Hello {{ os }}!").unwrap();
assert!(result.contains("linux") || result.contains("macos") || result.contains("windows"));
}
#[test]
fn test_if_else() {
let engine = TemplateEngine::new();
let template = r#"{% if os == "linux" %}Linux{% else %}Other{% endif %}"#;
let result = engine.render(template).unwrap();
if std::env::consts::OS == "linux" {
assert_eq!(result, "Linux");
} else {
assert_eq!(result, "Other");
}
}
#[test]
fn test_command_exists() {
let engine = TemplateEngine::new();
// 'sh' should exist on all Unix systems
let template = r#"{% if command_exists("sh") %}yes{% else %}no{% endif %}"#;
let result = engine.render(template).unwrap();
assert_eq!(result, "yes");
}
#[test]
fn test_which() {
let engine = TemplateEngine::new();
let result = engine.render("{{ which('sh') }}").unwrap();
assert!(result.contains("sh"));
}
#[test]
fn test_env_access() {
// SAFETY: This is a test running in isolation
unsafe { std::env::set_var("DOOT_TEST_VAR", "test_value") };
let engine = TemplateEngine::new();
let result = engine.render("{{ env.DOOT_TEST_VAR }}").unwrap();
assert_eq!(result, "test_value");
}
#[test]
fn test_filters() {
let engine = TemplateEngine::new();
let result = engine.render("{{ 'hello' | upper }}").unwrap();
assert_eq!(result, "HELLO");
}
#[test]
fn test_for_loop() {
let mut engine = TemplateEngine::new();
engine.set_variable("items".to_string(), Value::from(vec!["a", "b", "c"]));
let result = engine
.render("{% for i in items %}{{ i }}{% endfor %}")
.unwrap();
assert_eq!(result, "abc");
}
#[test]
fn test_is_linux_function() {
let engine = TemplateEngine::new();
let result = engine
.render("{% if is_linux() %}yes{% else %}no{% endif %}")
.unwrap();
if std::env::consts::OS == "linux" {
assert_eq!(result, "yes");
} else {
assert_eq!(result, "no");
}
}
#[test]
fn test_default_filter() {
let engine = TemplateEngine::new();
let result = engine
.render("{{ undefined_var | default('fallback') }}")
.unwrap();
assert_eq!(result, "fallback");
}
#[test]
fn test_path_functions() {
let engine = TemplateEngine::new();
let result = engine
.render("{{ path_filename('/foo/bar.txt') }}")
.unwrap();
assert_eq!(result, "bar.txt");
let result = engine
.render("{{ path_extension('/foo/bar.txt') }}")
.unwrap();
assert_eq!(result, "txt");
let result = engine.render("{{ path_parent('/foo/bar.txt') }}").unwrap();
assert_eq!(result, "/foo");
}
#[test]
fn test_hash_str() {
let engine = TemplateEngine::new();
let result = engine.render("{{ hash_str('hello') }}").unwrap();
// BLAKE3 hash of "hello"
assert_eq!(result.len(), 64); // 32 bytes = 64 hex chars
}
#[test]
fn test_from_json() {
let engine = TemplateEngine::new();
let template = r#"{% set data = from_json('{"name": "test", "count": 42}') %}{{ data.name }}-{{ data.count }}"#;
let result = engine.render(template).unwrap();
assert_eq!(result, "test-42");
}
#[test]
fn test_starts_ends_with() {
let engine = TemplateEngine::new();
let result = engine
.render("{% if starts_with('hello', 'he') %}yes{% endif %}")
.unwrap();
assert_eq!(result, "yes");
let result = engine
.render("{% if ends_with('hello', 'lo') %}yes{% endif %}")
.unwrap();
assert_eq!(result, "yes");
}
}

View file

@ -91,10 +91,9 @@ impl AgeEncryption {
/// Decrypts data using the configured identity.
pub fn decrypt(&self, data: &[u8]) -> Result<Vec<u8>, EncryptionError> {
let identity = self
.identity
.as_ref()
.ok_or_else(|| EncryptionError::DecryptionFailed("no identity configured".to_string()))?;
let identity = self.identity.as_ref().ok_or_else(|| {
EncryptionError::DecryptionFailed("no identity configured".to_string())
})?;
let decryptor = match age::Decryptor::new(data)
.map_err(|e| EncryptionError::DecryptionFailed(e.to_string()))?
@ -103,7 +102,7 @@ impl AgeEncryption {
_ => {
return Err(EncryptionError::DecryptionFailed(
"unexpected decryptor type".to_string(),
))
));
}
};

View file

@ -12,7 +12,7 @@ pub mod package;
pub mod state;
pub use config::Config;
pub use deploy::{Deployer, DeployResult};
pub use deploy::{DeployResult, Deployer};
pub use encryption::AgeEncryption;
pub use hooks::HookRunner;
pub use os::OsInfo;

View file

@ -135,7 +135,9 @@ fn command_exists(cmd: &str) -> bool {
} else {
// Fallback to hardcoded paths
let paths = ["/usr/bin/", "/usr/local/bin/", "/bin/"];
paths.iter().any(|p| std::path::Path::new(&format!("{}{}", p, cmd)).exists())
paths
.iter()
.any(|p| std::path::Path::new(&format!("{}{}", p, cmd)).exists())
};
cache.insert(cmd.to_string(), exists);

View file

@ -33,10 +33,7 @@ impl Apt {
}
let output = if self.use_sudo {
Command::new("sudo")
.arg("apt")
.args(args)
.output()?
Command::new("sudo").arg("apt").args(args).output()?
} else {
Command::new("apt").args(args).output()?
};
@ -136,9 +133,7 @@ impl PackageManager for Apt {
}
fn is_installed(&self, package: &str) -> Result<bool, PackageError> {
let output = Command::new("dpkg")
.args(["-s", package])
.output()?;
let output = Command::new("dpkg").args(["-s", package]).output()?;
Ok(output.status.success())
}

View file

@ -82,9 +82,7 @@ impl PackageManager for Brew {
}
fn is_installed(&self, package: &str) -> Result<bool, PackageError> {
let output = Command::new("brew")
.args(["list", package])
.output()?;
let output = Command::new("brew").args(["list", package]).output()?;
Ok(output.status.success())
}

View file

@ -62,7 +62,9 @@ pub trait PackageManager: Send + Sync {
/// Returns true if running in test mode (DOOT_TEST_MODE=1)
pub fn is_test_mode() -> bool {
std::env::var("DOOT_TEST_MODE").map(|v| v == "1").unwrap_or(false)
std::env::var("DOOT_TEST_MODE")
.map(|v| v == "1")
.unwrap_or(false)
}
/// Mock package manager for testing - doesn't actually install anything

View file

@ -33,10 +33,7 @@ impl Pacman {
}
let output = if self.use_sudo {
Command::new("sudo")
.arg("pacman")
.args(args)
.output()?
Command::new("sudo").arg("pacman").args(args).output()?
} else {
Command::new("pacman").args(args).output()?
};
@ -136,9 +133,7 @@ impl PackageManager for Pacman {
}
fn is_installed(&self, package: &str) -> Result<bool, PackageError> {
let output = Command::new("pacman")
.args(["-Q", package])
.output()?;
let output = Command::new("pacman").args(["-Q", package]).output()?;
Ok(output.status.success())
}

View file

@ -84,9 +84,7 @@ impl PackageManager for Yay {
}
fn is_installed(&self, package: &str) -> Result<bool, PackageError> {
let output = Command::new("yay")
.args(["-Q", package])
.output()?;
let output = Command::new("yay").args(["-Q", package]).output()?;
Ok(output.status.success())
}

View file

@ -56,12 +56,11 @@ impl Snapshot {
for entry in std::fs::read_dir(snapshot_dir)? {
let entry = entry?;
let path = entry.path();
if path.extension().map(|e| e == "json").unwrap_or(false) {
if let Some(name) = path.file_stem() {
if path.extension().map(|e| e == "json").unwrap_or(false)
&& let Some(name) = path.file_stem() {
snapshots.push(name.to_string_lossy().to_string());
}
}
}
snapshots.sort();
Ok(snapshots)

View file

@ -41,6 +41,9 @@ pub struct DeploymentRecord {
pub target_hash: String,
pub deployed_at: String,
pub mode: DeployMode,
/// Whether this file was deployed as a template (source != target content).
#[serde(default)]
pub template: bool,
}
/// Sync status after comparing current hashes with recorded state.
@ -91,6 +94,17 @@ impl StateStore {
/// Records a deployment with both source and target hashes.
pub fn record_deployment(&mut self, source: &Path, target: &Path, mode: DeployMode) {
self.record_deployment_with_template(source, target, mode, false);
}
/// Records a deployment with template flag.
pub fn record_deployment_with_template(
&mut self,
source: &Path,
target: &Path,
mode: DeployMode,
template: bool,
) {
let source_hash = hash_path(source);
let target_hash = hash_path(target);
@ -101,6 +115,7 @@ impl StateStore {
target_hash,
deployed_at: chrono_now(),
mode,
template,
};
self.state
@ -111,10 +126,43 @@ impl StateStore {
/// Checks sync status by comparing current hashes with recorded state.
pub fn check_sync_status(&self, source: &Path, target: &Path) -> SyncStatus {
self.check_sync_status_with_config(source, target, None, None)
}
/// Checks sync status, also detecting if template flag changed in config.
pub fn check_sync_status_with_template(
&self,
source: &Path,
target: &Path,
current_template: Option<bool>,
) -> SyncStatus {
self.check_sync_status_with_config(source, target, current_template, None)
}
/// Checks sync status, also detecting if config flags changed.
pub fn check_sync_status_with_config(
&self,
source: &Path,
target: &Path,
current_template: Option<bool>,
current_mode: Option<DeployMode>,
) -> SyncStatus {
let Some(record) = self.get_deployment(target) else {
return SyncStatus::NotDeployed;
};
// If template flag changed in config, force re-deploy
if let Some(is_template) = current_template
&& is_template != record.template {
return SyncStatus::SourceChanged;
}
// If deploy mode changed in config, force re-deploy
if let Some(mode) = current_mode
&& mode != record.mode {
return SyncStatus::SourceChanged;
}
if !source.exists() {
return SyncStatus::SourceMissing;
}
@ -124,6 +172,11 @@ impl StateStore {
// If stored hashes are empty (legacy record), treat as needing re-sync
if record.source_hash.is_empty() || record.target_hash.is_empty() {
// For templates, we can't compare source to target directly
if record.template {
return SyncStatus::SourceChanged;
}
let current_source_hash = hash_path(source);
let current_target_hash = hash_path(target);

View file

@ -36,7 +36,7 @@ pub enum Statement {
TypeAlias(TypeAlias),
Import(Import),
Dotfile(Dotfile),
Package(Package),
Package(Box<Package>),
Secret(Secret),
Hook(Hook),
MacroDecl(MacroDecl),

View file

@ -11,7 +11,11 @@ pub fn race(args: &[Value]) -> Result<Value, EvalError> {
pub fn fetch(args: &[Value]) -> Result<Value, EvalError> {
let url = match args.first() {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("fetch expects a URL string".to_string())),
_ => {
return Err(EvalError::TypeError(
"fetch expects a URL string".to_string(),
));
}
};
smol::block_on(async {
@ -31,7 +35,11 @@ pub fn fetch(args: &[Value]) -> Result<Value, EvalError> {
pub fn fetch_json(args: &[Value]) -> Result<Value, EvalError> {
let url = match args.first() {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("fetch_json expects a URL string".to_string())),
_ => {
return Err(EvalError::TypeError(
"fetch_json expects a URL string".to_string(),
));
}
};
smol::block_on(async {
@ -51,7 +59,11 @@ pub fn fetch_json(args: &[Value]) -> Result<Value, EvalError> {
pub fn fetch_bytes(args: &[Value]) -> Result<Value, EvalError> {
let url = match args.first() {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("fetch_bytes expects a URL string".to_string())),
_ => {
return Err(EvalError::TypeError(
"fetch_bytes expects a URL string".to_string(),
));
}
};
smol::block_on(async {
@ -72,7 +84,11 @@ pub fn fetch_bytes(args: &[Value]) -> Result<Value, EvalError> {
pub fn post(args: &[Value]) -> Result<Value, EvalError> {
let url = match args.first() {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("post expects a URL string".to_string())),
_ => {
return Err(EvalError::TypeError(
"post expects a URL string".to_string(),
));
}
};
let body = match args.get(1) {
@ -98,7 +114,11 @@ pub fn post(args: &[Value]) -> Result<Value, EvalError> {
pub fn post_json(args: &[Value]) -> Result<Value, EvalError> {
let url = match args.first() {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("post_json expects a URL string".to_string())),
_ => {
return Err(EvalError::TypeError(
"post_json expects a URL string".to_string(),
));
}
};
let data = args.get(1).unwrap_or(&Value::None);
@ -123,13 +143,21 @@ pub fn post_json(args: &[Value]) -> Result<Value, EvalError> {
pub fn download(args: &[Value]) -> Result<Value, EvalError> {
let url = match args.first() {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("download expects a URL string".to_string())),
_ => {
return Err(EvalError::TypeError(
"download expects a URL string".to_string(),
));
}
};
let path = match args.get(1) {
Some(Value::Path(p)) => p.clone(),
Some(Value::Str(s)) => std::path::PathBuf::from(s),
_ => return Err(EvalError::TypeError("download requires destination path".to_string())),
_ => {
return Err(EvalError::TypeError(
"download requires destination path".to_string(),
));
}
};
smol::block_on(async {
@ -161,9 +189,7 @@ fn json_to_value(json: &serde_json::Value) -> Value {
}
}
serde_json::Value::String(s) => Value::Str(s.clone()),
serde_json::Value::Array(arr) => {
Value::List(arr.iter().map(json_to_value).collect())
}
serde_json::Value::Array(arr) => Value::List(arr.iter().map(json_to_value).collect()),
serde_json::Value::Object(obj) => {
let fields: indexmap::IndexMap<String, Value> = obj
.iter()
@ -183,9 +209,7 @@ fn value_to_json(val: &Value) -> serde_json::Value {
Value::Str(s) => serde_json::Value::String(s.clone()),
Value::Bool(b) => serde_json::Value::Bool(*b),
Value::Path(p) => serde_json::Value::String(p.display().to_string()),
Value::List(items) => {
serde_json::Value::Array(items.iter().map(value_to_json).collect())
}
Value::List(items) => serde_json::Value::Array(items.iter().map(value_to_json).collect()),
Value::Struct(_, fields) => {
let map: serde_json::Map<String, serde_json::Value> = fields
.iter()

View file

@ -4,8 +4,17 @@ use crate::evaluator::{EvalError, Evaluator, Value};
pub fn map(eval: &mut Evaluator, args: &[Value], _arg_exprs: &[Expr]) -> Result<Value, EvalError> {
let list = match args.first() {
Some(Value::List(items)) => items.clone(),
Some(v) => return Err(EvalError::TypeError(format!("map expects list, got {}", v.type_name()))),
None => return Err(EvalError::TypeError("map requires a list argument".to_string())),
Some(v) => {
return Err(EvalError::TypeError(format!(
"map expects list, got {}",
v.type_name()
)));
}
None => {
return Err(EvalError::TypeError(
"map requires a list argument".to_string(),
));
}
};
match args.get(1) {
@ -34,11 +43,24 @@ pub fn map(eval: &mut Evaluator, args: &[Value], _arg_exprs: &[Expr]) -> Result<
}
}
pub fn filter(eval: &mut Evaluator, args: &[Value], _arg_exprs: &[Expr]) -> Result<Value, EvalError> {
pub fn filter(
eval: &mut Evaluator,
args: &[Value],
_arg_exprs: &[Expr],
) -> Result<Value, EvalError> {
let list = match args.first() {
Some(Value::List(items)) => items.clone(),
Some(v) => return Err(EvalError::TypeError(format!("filter expects list, got {}", v.type_name()))),
None => return Err(EvalError::TypeError("filter requires a list argument".to_string())),
Some(v) => {
return Err(EvalError::TypeError(format!(
"filter expects list, got {}",
v.type_name()
)));
}
None => {
return Err(EvalError::TypeError(
"filter requires a list argument".to_string(),
));
}
};
match args.get(1) {
@ -60,22 +82,33 @@ pub fn filter(eval: &mut Evaluator, args: &[Value], _arg_exprs: &[Expr]) -> Resu
Some(Value::Function(func, func_env)) => {
let mut results = Vec::new();
for item in list {
let result = eval.call_fn(func, func_env, &[item.clone()])?;
let result = eval.call_fn(func, func_env, std::slice::from_ref(&item))?;
if result.is_truthy() {
results.push(item);
}
}
Ok(Value::List(results))
}
_ => Err(EvalError::TypeError("filter requires a function".to_string())),
_ => Err(EvalError::TypeError(
"filter requires a function".to_string(),
)),
}
}
pub fn fold(eval: &mut Evaluator, args: &[Value], _arg_exprs: &[Expr]) -> Result<Value, EvalError> {
let list = match args.first() {
Some(Value::List(items)) => items.clone(),
Some(v) => return Err(EvalError::TypeError(format!("fold expects list, got {}", v.type_name()))),
None => return Err(EvalError::TypeError("fold requires a list argument".to_string())),
Some(v) => {
return Err(EvalError::TypeError(format!(
"fold expects list, got {}",
v.type_name()
)));
}
None => {
return Err(EvalError::TypeError(
"fold requires a list argument".to_string(),
));
}
};
let init = args.get(1).cloned().unwrap_or(Value::None);
@ -136,13 +169,18 @@ pub fn concat(args: &[Value]) -> Result<Value, EvalError> {
pub fn zip(args: &[Value]) -> Result<Value, EvalError> {
if args.len() < 2 {
return Err(EvalError::TypeError("zip requires at least 2 lists".to_string()));
return Err(EvalError::TypeError(
"zip requires at least 2 lists".to_string(),
));
}
let lists: Result<Vec<&Vec<Value>>, _> = args.iter().map(|a| match a {
let lists: Result<Vec<&Vec<Value>>, _> = args
.iter()
.map(|a| match a {
Value::List(items) => Ok(items),
_ => Err(EvalError::TypeError("zip expects lists".to_string())),
}).collect();
})
.collect();
let lists = lists?;
let min_len = lists.iter().map(|l| l.len()).min().unwrap_or(0);
@ -189,7 +227,9 @@ pub fn len(args: &[Value]) -> Result<Value, EvalError> {
match args.first() {
Some(Value::List(items)) => Ok(Value::Int(items.len() as i64)),
Some(Value::Str(s)) => Ok(Value::Int(s.len() as i64)),
_ => Err(EvalError::TypeError("len expects a list or string".to_string())),
_ => Err(EvalError::TypeError(
"len expects a list or string".to_string(),
)),
}
}
@ -245,7 +285,11 @@ pub fn sort(args: &[Value]) -> Result<Value, EvalError> {
Ok(Value::List(sortable.into_iter().map(|(v, _)| v).collect()))
}
pub fn sort_by(eval: &mut Evaluator, args: &[Value], _arg_exprs: &[Expr]) -> Result<Value, EvalError> {
pub fn sort_by(
eval: &mut Evaluator,
args: &[Value],
_arg_exprs: &[Expr],
) -> Result<Value, EvalError> {
let list = match args.first() {
Some(Value::List(items)) => items.clone(),
_ => return Err(EvalError::TypeError("sort_by expects a list".to_string())),
@ -266,7 +310,9 @@ pub fn sort_by(eval: &mut Evaluator, args: &[Value], _arg_exprs: &[Expr]) -> Res
keyed.sort_by(|a, b| a.1.cmp(&b.1));
Ok(Value::List(keyed.into_iter().map(|(v, _)| v).collect()))
}
_ => Err(EvalError::TypeError("sort_by requires a function".to_string())),
_ => Err(EvalError::TypeError(
"sort_by requires a function".to_string(),
)),
}
}
@ -305,7 +351,11 @@ pub fn seq(eval: &mut Evaluator, args: &[Value], _arg_exprs: &[Expr]) -> Result<
}
}
pub fn batch(eval: &mut Evaluator, args: &[Value], _arg_exprs: &[Expr]) -> Result<Value, EvalError> {
pub fn batch(
eval: &mut Evaluator,
args: &[Value],
_arg_exprs: &[Expr],
) -> Result<Value, EvalError> {
let list = match args.first() {
Some(Value::List(items)) => items.clone(),
_ => return Err(EvalError::TypeError("batch expects a list".to_string())),
@ -313,7 +363,11 @@ pub fn batch(eval: &mut Evaluator, args: &[Value], _arg_exprs: &[Expr]) -> Resul
let batch_size = match args.get(1) {
Some(Value::Int(n)) => *n as usize,
_ => return Err(EvalError::TypeError("batch requires batch size".to_string())),
_ => {
return Err(EvalError::TypeError(
"batch requires batch size".to_string(),
));
}
};
match args.get(2) {
@ -332,7 +386,9 @@ pub fn batch(eval: &mut Evaluator, args: &[Value], _arg_exprs: &[Expr]) -> Resul
}
Ok(Value::List(results))
}
_ => Err(EvalError::TypeError("batch requires a function".to_string())),
_ => Err(EvalError::TypeError(
"batch requires a function".to_string(),
)),
}
}

View file

@ -16,7 +16,11 @@ pub fn hash_file(args: &[Value]) -> Result<Value, EvalError> {
pub fn hash_str(args: &[Value]) -> Result<Value, EvalError> {
let s = match args.first() {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("hash_str expects a string".to_string())),
_ => {
return Err(EvalError::TypeError(
"hash_str expects a string".to_string(),
));
}
};
let hash = blake3::hash(s.as_bytes());
@ -26,12 +30,20 @@ pub fn hash_str(args: &[Value]) -> Result<Value, EvalError> {
pub fn encrypt_age(args: &[Value]) -> Result<Value, EvalError> {
let content = match args.first() {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("encrypt_age expects content string".to_string())),
_ => {
return Err(EvalError::TypeError(
"encrypt_age expects content string".to_string(),
));
}
};
let recipient = match args.get(1) {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("encrypt_age requires recipient public key".to_string())),
_ => {
return Err(EvalError::TypeError(
"encrypt_age requires recipient public key".to_string(),
));
}
};
let recipient = recipient
@ -60,12 +72,20 @@ pub fn encrypt_age(args: &[Value]) -> Result<Value, EvalError> {
pub fn decrypt_age(args: &[Value]) -> Result<Value, EvalError> {
let encrypted = match args.first() {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("decrypt_age expects encrypted string".to_string())),
_ => {
return Err(EvalError::TypeError(
"decrypt_age expects encrypted string".to_string(),
));
}
};
let identity_str = match args.get(1) {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("decrypt_age requires identity".to_string())),
_ => {
return Err(EvalError::TypeError(
"decrypt_age requires identity".to_string(),
));
}
};
let identity = identity_str
@ -79,7 +99,11 @@ pub fn decrypt_age(args: &[Value]) -> Result<Value, EvalError> {
.map_err(|e| EvalError::TypeError(format!("decryption error: {}", e)))?
{
age::Decryptor::Recipients(d) => d,
_ => return Err(EvalError::TypeError("unexpected decryptor type".to_string())),
_ => {
return Err(EvalError::TypeError(
"unexpected decryptor type".to_string(),
));
}
};
let mut decrypted = vec![];
@ -92,10 +116,9 @@ pub fn decrypt_age(args: &[Value]) -> Result<Value, EvalError> {
.read_to_end(&mut decrypted)
.map_err(|e| EvalError::TypeError(format!("decryption error: {}", e)))?;
Ok(Value::Str(
String::from_utf8(decrypted)
.map_err(|e| EvalError::TypeError(format!("invalid UTF-8: {}", e)))?,
))
Ok(Value::Str(String::from_utf8(decrypted).map_err(|e| {
EvalError::TypeError(format!("invalid UTF-8: {}", e))
})?))
}
fn base64_encode(data: &[u8]) -> String {

View file

@ -20,7 +20,11 @@ pub fn write_file(args: &[Value]) -> Result<Value, EvalError> {
let path = get_path(args)?;
let content = match args.get(1) {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("write_file requires content string".to_string())),
_ => {
return Err(EvalError::TypeError(
"write_file requires content string".to_string(),
));
}
};
std::fs::write(&path, content)?;
Ok(Value::Bool(true))
@ -31,7 +35,11 @@ pub fn copy_file(args: &[Value]) -> Result<Value, EvalError> {
let dst = match args.get(1) {
Some(Value::Path(p)) => p.clone(),
Some(Value::Str(s)) => expand_path(s),
_ => return Err(EvalError::TypeError("copy_file requires destination path".to_string())),
_ => {
return Err(EvalError::TypeError(
"copy_file requires destination path".to_string(),
));
}
};
std::fs::copy(&src, &dst)?;
Ok(Value::Bool(true))
@ -71,7 +79,11 @@ pub fn list_dir(args: &[Value]) -> Result<Value, EvalError> {
pub fn glob_files(args: &[Value]) -> Result<Value, EvalError> {
let pattern = match args.first() {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("glob expects a pattern string".to_string())),
_ => {
return Err(EvalError::TypeError(
"glob expects a pattern string".to_string(),
));
}
};
let entries: Vec<Value> = glob::glob(pattern)
@ -127,7 +139,11 @@ pub fn path_join(args: &[Value]) -> Result<Value, EvalError> {
match arg {
Value::Path(p) => result.push(p),
Value::Str(s) => result.push(s),
_ => return Err(EvalError::TypeError("path_join expects paths or strings".to_string())),
_ => {
return Err(EvalError::TypeError(
"path_join expects paths or strings".to_string(),
));
}
}
}
Ok(Value::Path(result))
@ -135,7 +151,9 @@ pub fn path_join(args: &[Value]) -> Result<Value, EvalError> {
pub fn path_parent(args: &[Value]) -> Result<Value, EvalError> {
let path = get_path(args)?;
Ok(Value::Path(path.parent().map(|p| p.to_path_buf()).unwrap_or_default()))
Ok(Value::Path(
path.parent().map(|p| p.to_path_buf()).unwrap_or_default(),
))
}
pub fn path_filename(args: &[Value]) -> Result<Value, EvalError> {
@ -167,7 +185,11 @@ pub fn config_dir() -> Result<Value, EvalError> {
pub fn config_path(args: &[Value]) -> Result<Value, EvalError> {
let app = match args.first() {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("config_path expects an app name string".to_string())),
_ => {
return Err(EvalError::TypeError(
"config_path expects an app name string".to_string(),
));
}
};
let config = dirs::config_dir().unwrap_or_default();
Ok(Value::Path(config.join(app)))
@ -184,27 +206,31 @@ pub fn cache_dir() -> Result<Value, EvalError> {
pub fn exec(args: &[Value]) -> Result<Value, EvalError> {
let cmd = match args.first() {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("exec expects a command string".to_string())),
_ => {
return Err(EvalError::TypeError(
"exec expects a command string".to_string(),
));
}
};
let output = Command::new("sh")
.arg("-c")
.arg(cmd)
.output()?;
let output = Command::new("sh").arg("-c").arg(cmd).output()?;
Ok(Value::Str(String::from_utf8_lossy(&output.stdout).to_string()))
Ok(Value::Str(
String::from_utf8_lossy(&output.stdout).to_string(),
))
}
pub fn exec_with_status(args: &[Value]) -> Result<Value, EvalError> {
let cmd = match args.first() {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("exec_with_status expects a command string".to_string())),
_ => {
return Err(EvalError::TypeError(
"exec_with_status expects a command string".to_string(),
));
}
};
let status = Command::new("sh")
.arg("-c")
.arg(cmd)
.status()?;
let status = Command::new("sh").arg("-c").arg(cmd).status()?;
Ok(Value::Int(status.code().unwrap_or(-1) as i64))
}
@ -216,12 +242,14 @@ pub fn shell(args: &[Value]) -> Result<Value, EvalError> {
pub fn which(args: &[Value]) -> Result<Value, EvalError> {
let cmd = match args.first() {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("which expects a command name".to_string())),
_ => {
return Err(EvalError::TypeError(
"which expects a command name".to_string(),
));
}
};
let output = Command::new("which")
.arg(cmd)
.output()?;
let output = Command::new("which").arg(cmd).output()?;
if output.status.success() {
let path = String::from_utf8_lossy(&output.stdout).trim().to_string();
@ -240,7 +268,11 @@ pub fn to_json(args: &[Value]) -> Result<Value, EvalError> {
pub fn from_json(args: &[Value]) -> Result<Value, EvalError> {
let s = match args.first() {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("from_json expects a string".to_string())),
_ => {
return Err(EvalError::TypeError(
"from_json expects a string".to_string(),
));
}
};
let json: serde_json::Value = serde_json::from_str(s)
@ -260,11 +292,15 @@ pub fn to_toml(args: &[Value]) -> Result<Value, EvalError> {
pub fn from_toml(args: &[Value]) -> Result<Value, EvalError> {
let s = match args.first() {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("from_toml expects a string".to_string())),
_ => {
return Err(EvalError::TypeError(
"from_toml expects a string".to_string(),
));
}
};
let toml_val: toml::Value = toml::from_str(s)
.map_err(|e| EvalError::TypeError(format!("invalid TOML: {}", e)))?;
let toml_val: toml::Value =
toml::from_str(s).map_err(|e| EvalError::TypeError(format!("invalid TOML: {}", e)))?;
Ok(toml_to_value(&toml_val))
}
@ -272,7 +308,9 @@ pub fn from_toml(args: &[Value]) -> Result<Value, EvalError> {
pub fn to_yaml(args: &[Value]) -> Result<Value, EvalError> {
let val = args.first().unwrap_or(&Value::None);
let json = value_to_json(val);
Ok(Value::Str(serde_json::to_string_pretty(&json).unwrap_or_default()))
Ok(Value::Str(
serde_json::to_string_pretty(&json).unwrap_or_default(),
))
}
pub fn from_yaml(args: &[Value]) -> Result<Value, EvalError> {
@ -288,9 +326,9 @@ fn get_path(args: &[Value]) -> Result<PathBuf, EvalError> {
}
fn expand_path(s: &str) -> PathBuf {
if s.starts_with('~') {
if let Some(stripped) = s.strip_prefix('~') {
let home = dirs::home_dir().unwrap_or_default();
home.join(s.strip_prefix("~/").unwrap_or(&s[1..]))
home.join(stripped.strip_prefix('/').unwrap_or(stripped))
} else {
PathBuf::from(s)
}
@ -314,9 +352,7 @@ fn value_to_json(val: &Value) -> serde_json::Value {
Value::Str(s) => serde_json::Value::String(s.clone()),
Value::Bool(b) => serde_json::Value::Bool(*b),
Value::Path(p) => serde_json::Value::String(p.display().to_string()),
Value::List(items) => {
serde_json::Value::Array(items.iter().map(value_to_json).collect())
}
Value::List(items) => serde_json::Value::Array(items.iter().map(value_to_json).collect()),
Value::Struct(_, fields) => {
let map: serde_json::Map<String, serde_json::Value> = fields
.iter()
@ -343,9 +379,7 @@ fn json_to_value(json: &serde_json::Value) -> Value {
}
}
serde_json::Value::String(s) => Value::Str(s.clone()),
serde_json::Value::Array(arr) => {
Value::List(arr.iter().map(json_to_value).collect())
}
serde_json::Value::Array(arr) => Value::List(arr.iter().map(json_to_value).collect()),
serde_json::Value::Object(obj) => {
let fields: indexmap::IndexMap<String, Value> = obj
.iter()
@ -363,9 +397,7 @@ fn value_to_toml(val: &Value) -> toml::Value {
Value::Str(s) => toml::Value::String(s.clone()),
Value::Bool(b) => toml::Value::Boolean(*b),
Value::Path(p) => toml::Value::String(p.display().to_string()),
Value::List(items) => {
toml::Value::Array(items.iter().map(value_to_toml).collect())
}
Value::List(items) => toml::Value::Array(items.iter().map(value_to_toml).collect()),
Value::Struct(_, fields) => {
let map: toml::map::Map<String, toml::Value> = fields
.iter()
@ -383,9 +415,7 @@ fn toml_to_value(toml: &toml::Value) -> Value {
toml::Value::Integer(i) => Value::Int(*i),
toml::Value::Float(f) => Value::Float(*f),
toml::Value::String(s) => Value::Str(s.clone()),
toml::Value::Array(arr) => {
Value::List(arr.iter().map(toml_to_value).collect())
}
toml::Value::Array(arr) => Value::List(arr.iter().map(toml_to_value).collect()),
toml::Value::Table(table) => {
let fields: indexmap::IndexMap<String, Value> = table
.iter()

View file

@ -146,22 +146,31 @@ pub fn call_method(
}
}
"map" => {
let all_args = std::iter::once(obj.clone()).chain(args.iter().cloned()).collect::<Vec<_>>();
let all_args = std::iter::once(obj.clone())
.chain(args.iter().cloned())
.collect::<Vec<_>>();
collections::map(eval, &all_args, arg_exprs)
}
"filter" => {
let all_args = std::iter::once(obj.clone()).chain(args.iter().cloned()).collect::<Vec<_>>();
let all_args = std::iter::once(obj.clone())
.chain(args.iter().cloned())
.collect::<Vec<_>>();
collections::filter(eval, &all_args, arg_exprs)
}
"fold" => {
let all_args = std::iter::once(obj.clone()).chain(args.iter().cloned()).collect::<Vec<_>>();
let all_args = std::iter::once(obj.clone())
.chain(args.iter().cloned())
.collect::<Vec<_>>();
collections::fold(eval, &all_args, arg_exprs)
}
"join" => {
let sep = args.first().map(|v| match v {
let sep = args
.first()
.map(|v| match v {
Value::Str(s) => s.as_str(),
_ => "",
}).unwrap_or("");
})
.unwrap_or("");
let result = items
.iter()
.map(|v| v.to_string_repr())
@ -170,7 +179,9 @@ pub fn call_method(
Ok(Value::Str(result))
}
"sort" => {
let all_args = std::iter::once(obj.clone()).chain(args.iter().cloned()).collect::<Vec<_>>();
let all_args = std::iter::once(obj.clone())
.chain(args.iter().cloned())
.collect::<Vec<_>>();
collections::sort(&all_args)
}
"reverse" => {
@ -179,7 +190,9 @@ pub fn call_method(
Ok(Value::List(reversed))
}
"unique" => {
let all_args = std::iter::once(obj.clone()).chain(args.iter().cloned()).collect::<Vec<_>>();
let all_args = std::iter::once(obj.clone())
.chain(args.iter().cloned())
.collect::<Vec<_>>();
collections::unique(&all_args)
}
_ => Err(EvalError::UndefinedFunction(format!("list.{}", method))),
@ -191,19 +204,21 @@ pub fn call_method(
"lower" => Ok(Value::Str(s.to_lowercase())),
"trim" => Ok(Value::Str(s.trim().to_string())),
"split" => {
let sep = args.first().map(|v| match v {
let sep = args
.first()
.map(|v| match v {
Value::Str(s) => s.as_str(),
_ => " ",
}).unwrap_or(" ");
})
.unwrap_or(" ");
let parts: Vec<Value> = s.split(sep).map(|p| Value::Str(p.to_string())).collect();
Ok(Value::List(parts))
}
"replace" => {
if args.len() >= 2 {
if let (Value::Str(from), Value::Str(to)) = (&args[0], &args[1]) {
if args.len() >= 2
&& let (Value::Str(from), Value::Str(to)) = (&args[0], &args[1]) {
return Ok(Value::Str(s.replace(from, to)));
}
}
Ok(Value::Str(s.clone()))
}
"starts_with" => {
@ -231,9 +246,19 @@ pub fn call_method(
},
Value::Path(p) => match method {
"parent" => Ok(Value::Path(p.parent().map(|p| p.to_path_buf()).unwrap_or_default())),
"filename" => Ok(Value::Str(p.file_name().map(|s| s.to_string_lossy().to_string()).unwrap_or_default())),
"extension" => Ok(Value::Str(p.extension().map(|s| s.to_string_lossy().to_string()).unwrap_or_default())),
"parent" => Ok(Value::Path(
p.parent().map(|p| p.to_path_buf()).unwrap_or_default(),
)),
"filename" => Ok(Value::Str(
p.file_name()
.map(|s| s.to_string_lossy().to_string())
.unwrap_or_default(),
)),
"extension" => Ok(Value::Str(
p.extension()
.map(|s| s.to_string_lossy().to_string())
.unwrap_or_default(),
)),
"exists" => Ok(Value::Bool(p.exists())),
"is_file" => Ok(Value::Bool(p.is_file())),
"is_dir" => Ok(Value::Bool(p.is_dir())),
@ -256,15 +281,14 @@ pub fn call_method(
let mut method_args = vec![obj.clone()];
method_args.extend(args.iter().cloned());
let env_clone = eval.env().clone();
return eval.call_function(&m, &env_clone, &method_args);
return eval.call_function(m, &env_clone, &method_args);
}
}
}
if let Some(field) = fields.get(method) {
if let Value::Function(func, env) = field {
if let Some(field) = fields.get(method)
&& let Value::Function(func, env) = field {
return eval.call_function(func, env, args);
}
}
Err(EvalError::FieldNotFound {
ty: name.clone(),
field: method.to_string(),
@ -295,7 +319,9 @@ fn options_unwrap(args: &[Value]) -> Result<Value, EvalError> {
match args.first() {
Some(Value::None) => Err(EvalError::TypeError("unwrap called on none".to_string())),
Some(v) => Ok(v.clone()),
None => Err(EvalError::TypeError("unwrap requires an argument".to_string())),
None => Err(EvalError::TypeError(
"unwrap requires an argument".to_string(),
)),
}
}
@ -308,18 +334,22 @@ fn options_unwrap_or(args: &[Value]) -> Result<Value, EvalError> {
}
fn options_is_some(args: &[Value]) -> Result<Value, EvalError> {
Ok(Value::Bool(!matches!(args.first(), Some(Value::None) | None)))
Ok(Value::Bool(!matches!(
args.first(),
Some(Value::None) | None
)))
}
fn options_is_none(args: &[Value]) -> Result<Value, EvalError> {
Ok(Value::Bool(matches!(args.first(), Some(Value::None) | None)))
Ok(Value::Bool(matches!(
args.first(),
Some(Value::None) | None
)))
}
fn env_get(args: &[Value]) -> Result<Value, EvalError> {
if let Some(Value::Str(key)) = args.first() {
Ok(std::env::var(key)
.map(Value::Str)
.unwrap_or(Value::None))
Ok(std::env::var(key).map(Value::Str).unwrap_or(Value::None))
} else {
Ok(Value::None)
}

View file

@ -64,12 +64,20 @@ pub fn replace(args: &[Value]) -> Result<Value, EvalError> {
let from = match args.get(1) {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("replace requires from string".to_string())),
_ => {
return Err(EvalError::TypeError(
"replace requires from string".to_string(),
));
}
};
let to = match args.get(2) {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("replace requires to string".to_string())),
_ => {
return Err(EvalError::TypeError(
"replace requires to string".to_string(),
));
}
};
Ok(Value::Str(s.replace(from.as_str(), to.as_str())))
@ -78,12 +86,20 @@ pub fn replace(args: &[Value]) -> Result<Value, EvalError> {
pub fn starts_with(args: &[Value]) -> Result<Value, EvalError> {
let s = match args.first() {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("starts_with expects a string".to_string())),
_ => {
return Err(EvalError::TypeError(
"starts_with expects a string".to_string(),
));
}
};
let prefix = match args.get(1) {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("starts_with requires prefix".to_string())),
_ => {
return Err(EvalError::TypeError(
"starts_with requires prefix".to_string(),
));
}
};
Ok(Value::Bool(s.starts_with(prefix.as_str())))
@ -92,12 +108,20 @@ pub fn starts_with(args: &[Value]) -> Result<Value, EvalError> {
pub fn ends_with(args: &[Value]) -> Result<Value, EvalError> {
let s = match args.first() {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("ends_with expects a string".to_string())),
_ => {
return Err(EvalError::TypeError(
"ends_with expects a string".to_string(),
));
}
};
let suffix = match args.get(1) {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("ends_with requires suffix".to_string())),
_ => {
return Err(EvalError::TypeError(
"ends_with requires suffix".to_string(),
));
}
};
Ok(Value::Bool(s.ends_with(suffix.as_str())))
@ -106,7 +130,11 @@ pub fn ends_with(args: &[Value]) -> Result<Value, EvalError> {
pub fn format(args: &[Value]) -> Result<Value, EvalError> {
let template = match args.first() {
Some(Value::Str(s)) => s.clone(),
_ => return Err(EvalError::TypeError("format expects a template string".to_string())),
_ => {
return Err(EvalError::TypeError(
"format expects a template string".to_string(),
));
}
};
let mut result = template;

View file

@ -128,7 +128,11 @@ impl Value {
Value::Path(p) => p.display().to_string(),
Value::List(items) => {
// Join list items with colon (PATH-style)
items.iter().map(|v| v.to_env_string()).collect::<Vec<_>>().join(":")
items
.iter()
.map(|v| v.to_env_string())
.collect::<Vec<_>>()
.join(":")
}
Value::None => String::new(),
_ => self.to_string_repr(),
@ -347,9 +351,18 @@ impl Evaluator {
EnumDecl {
name: "Os".to_string(),
variants: vec![
EnumVariant { name: "Linux".to_string(), fields: None },
EnumVariant { name: "MacOS".to_string(), fields: None },
EnumVariant { name: "Windows".to_string(), fields: None },
EnumVariant {
name: "Linux".to_string(),
fields: None,
},
EnumVariant {
name: "MacOS".to_string(),
fields: None,
},
EnumVariant {
name: "Windows".to_string(),
fields: None,
},
],
},
);
@ -365,7 +378,10 @@ impl Evaluator {
};
env.define("os".to_string(), os_val);
env.define("distro".to_string(), Value::Str(sys.distro.clone()));
env.define("pkg_manager".to_string(), Value::Str(sys.pkg_manager.clone()));
env.define(
"pkg_manager".to_string(),
Value::Str(sys.pkg_manager.clone()),
);
env.define("hostname".to_string(), Value::Str(sys.hostname.clone()));
env.define("arch".to_string(), Value::Str(sys.arch.to_string()));
}
@ -383,13 +399,17 @@ impl Evaluator {
let mut vars = self.env.get_all_variables();
// Add doot global variables
vars.insert("DOOT_HOME".to_string(), Self::home_dir().display().to_string());
vars.insert("DOOT_CONFIG_DIR".to_string(),
vars.insert(
"DOOT_HOME".to_string(),
Self::home_dir().display().to_string(),
);
vars.insert(
"DOOT_CONFIG_DIR".to_string(),
dirs::config_dir()
.unwrap_or_else(|| Self::home_dir().join(".config"))
.join("doot")
.display()
.to_string()
.to_string(),
);
vars.insert("DOOT_OS".to_string(), std::env::consts::OS.to_string());
vars.insert("DOOT_ARCH".to_string(), std::env::consts::ARCH.to_string());
@ -403,11 +423,10 @@ impl Evaluator {
let value = self.eval_expr(&decl.value)?;
// Handle special config variables
if decl.name == "sandbox" {
if let Value::Bool(b) = &value {
if decl.name == "sandbox"
&& let Value::Bool(b) = &value {
self.result.sandbox = *b;
}
}
self.env.define(decl.name.clone(), value);
Ok(None)
@ -718,12 +737,10 @@ impl Evaluator {
Value::Lambda(params, body, lambda_env) => {
self.call_lambda(&params, &body, &lambda_env, &arg_vals)
}
_ => {
Err(EvalError::TypeError(format!(
_ => Err(EvalError::TypeError(format!(
"cannot call {}",
callee_val.type_name()
)))
}
))),
}
}
@ -741,7 +758,10 @@ impl Evaluator {
let obj_val = self.eval_expr(obj)?;
match obj_val {
Value::Struct(name, fields) => {
fields.get(field).cloned().ok_or_else(|| EvalError::FieldNotFound {
fields
.get(field)
.cloned()
.ok_or_else(|| EvalError::FieldNotFound {
ty: name,
field: field.clone(),
})
@ -759,22 +779,17 @@ impl Evaluator {
match (obj_val, idx_val) {
(Value::List(items), Value::Int(i)) => {
let index = if i < 0 {
items.len() as i64 + i
} else {
i
};
items.get(index as usize).cloned().ok_or(EvalError::IndexOutOfBounds {
let index = if i < 0 { items.len() as i64 + i } else { i };
items
.get(index as usize)
.cloned()
.ok_or(EvalError::IndexOutOfBounds {
index: i,
len: items.len(),
})
}
(Value::Str(s), Value::Int(i)) => {
let index = if i < 0 {
s.len() as i64 + i
} else {
i
};
let index = if i < 0 { s.len() as i64 + i } else { i };
s.chars()
.nth(index as usize)
.map(|c| Value::Str(c.to_string()))
@ -815,9 +830,7 @@ impl Evaluator {
Ok(Value::Struct(name.clone(), values))
}
Expr::EnumVariant(ty, variant) => {
Ok(Value::Enum(ty.clone(), variant.clone()))
}
Expr::EnumVariant(ty, variant) => Ok(Value::Enum(ty.clone(), variant.clone())),
Expr::If(cond, then_expr, else_expr) => {
let cond_val = self.eval_expr(cond)?;
@ -830,13 +843,13 @@ impl Evaluator {
}
}
Expr::Lambda(params, body, ..) => {
Ok(Value::Lambda(params.clone(), *body.clone(), self.env.clone()))
}
Expr::Lambda(params, body, ..) => Ok(Value::Lambda(
params.clone(),
*body.clone(),
self.env.clone(),
)),
Expr::Await(expr) => {
self.eval_expr(expr)
}
Expr::Await(expr) => self.eval_expr(expr),
Expr::Path(left, right) => {
let left_path = self.eval_to_path(left)?;
@ -1126,9 +1139,9 @@ impl Evaluator {
match val {
Value::Path(p) => Ok(p),
Value::Str(s) => {
if s.starts_with('~') {
if let Some(stripped) = s.strip_prefix('~') {
let home = Self::home_dir();
Ok(home.join(s.strip_prefix("~/").unwrap_or(&s[1..])))
Ok(home.join(stripped.strip_prefix('/').unwrap_or(stripped)))
} else {
Ok(PathBuf::from(s))
}
@ -1208,7 +1221,9 @@ fn command_exists(cmd: &str) -> bool {
} else {
// Fallback to hardcoded paths
let paths = ["/usr/bin/", "/usr/local/bin/", "/bin/"];
paths.iter().any(|p| std::path::Path::new(&format!("{}{}", p, cmd)).exists())
paths
.iter()
.any(|p| std::path::Path::new(&format!("{}{}", p, cmd)).exists())
};
cache.insert(cmd.to_string(), exists);

View file

@ -198,17 +198,16 @@ impl Lexer {
.ignore_then(text::digits(16))
.map(|s: String| Token::Int(i64::from_str_radix(&s, 16).unwrap_or(0)));
let decimal = text::int(10)
.map(|s: String| Token::Int(s.parse().unwrap()));
let decimal = text::int(10).map(|s: String| Token::Int(s.parse().unwrap()));
let int = octal.or(hex).or(decimal);
let float = text::int(10)
.then(just('.').then(text::digits(10)))
.map(|(a, (_, b)): (String, (char, String))| {
let float = text::int(10).then(just('.').then(text::digits(10))).map(
|(a, (_, b)): (String, (char, String))| {
let f: f64 = format!("{}.{}", a, b).parse().unwrap();
Token::Float(OrderedFloat(f))
});
},
);
let escape = just('\\').ignore_then(
just('\\')
@ -226,7 +225,8 @@ impl Lexer {
.map(Token::Str);
// Heredoc: >>>...<<<
let heredoc = just(">>>")
let heredoc =
just(">>>")
.ignore_then(take_until(just("<<<")))
.map(|(chars, _): (Vec<char>, _)| {
let s: String = chars.into_iter().collect();
@ -307,9 +307,7 @@ impl Lexer {
just('#').to(Token::Hash),
));
let comment = just('#')
.then(none_of("\n").repeated())
.ignored();
let comment = just('#').then(none_of("\n").repeated()).ignored();
let whitespace = just(' ').or(just('\t')).repeated().at_least(1).ignored();
@ -361,9 +359,14 @@ impl Lexer {
if current_indent > last_indent {
indent_stack.push(current_indent);
result.push(Spanned::new(Token::Indent(current_indent), span_start..span_start));
result.push(Spanned::new(
Token::Indent(current_indent),
span_start..span_start,
));
} else {
while indent_stack.len() > 1 && current_indent < *indent_stack.last().unwrap() {
while indent_stack.len() > 1
&& current_indent < *indent_stack.last().unwrap()
{
indent_stack.pop();
result.push(Spanned::new(Token::Dedent, span_start..span_start));
}

View file

@ -17,6 +17,6 @@ pub use ast::*;
pub use evaluator::Evaluator;
pub use lexer::Lexer;
pub use parser::Parser;
pub use planner::{validate_dotfile_targets, DotfileConflict, DotfileValidation, DotfileWarning};
pub use planner::{DotfileConflict, DotfileValidation, DotfileWarning, validate_dotfile_targets};
pub use type_checker::TypeChecker;
pub use types::Type;

View file

@ -33,17 +33,18 @@ impl MacroExpander {
let expanded: Vec<Spanned<Statement>> = decl
.body
.iter()
.map(|stmt| Spanned::new(self.substitute_statement(&stmt.node, &substitutions), stmt.span.clone()))
.map(|stmt| {
Spanned::new(
self.substitute_statement(&stmt.node, &substitutions),
stmt.span.clone(),
)
})
.collect();
Some(expanded)
}
fn substitute_statement(
&self,
stmt: &Statement,
subs: &HashMap<String, &Expr>,
) -> Statement {
fn substitute_statement(&self, stmt: &Statement, subs: &HashMap<String, &Expr>) -> Statement {
match stmt {
Statement::VarDecl(decl) => Statement::VarDecl(VarDecl {
name: decl.name.clone(),
@ -63,7 +64,7 @@ impl MacroExpander {
copy_patterns: dotfile.copy_patterns.clone(),
}),
Statement::Package(pkg) => Statement::Package(Package {
Statement::Package(pkg) => Statement::Package(Box::new(Package {
default: pkg.default.as_ref().map(|e| self.substitute_expr(e, subs)),
brew: pkg.brew.as_ref().map(|s| PackageSpec {
name: self.substitute_expr(&s.name, subs),
@ -86,7 +87,7 @@ impl MacroExpander {
tap: s.tap.clone(),
}),
when: pkg.when.as_ref().map(|e| self.substitute_expr(e, subs)),
}),
})),
Statement::ForLoop(for_loop) => Statement::ForLoop(ForLoop {
var: for_loop.var.clone(),
@ -94,9 +95,7 @@ impl MacroExpander {
body: for_loop
.body
.iter()
.map(|s| {
Spanned::new(self.substitute_statement(&s.node, subs), s.span.clone())
})
.map(|s| Spanned::new(self.substitute_statement(&s.node, subs), s.span.clone()))
.collect(),
}),
@ -105,9 +104,7 @@ impl MacroExpander {
then_body: if_stmt
.then_body
.iter()
.map(|s| {
Spanned::new(self.substitute_statement(&s.node, subs), s.span.clone())
})
.map(|s| Spanned::new(self.substitute_statement(&s.node, subs), s.span.clone()))
.collect(),
else_body: if_stmt.else_body.as_ref().map(|body| {
body.iter()
@ -164,9 +161,12 @@ impl MacroExpander {
Box::new(self.substitute_expr(idx, subs)),
),
Expr::List(items) => {
Expr::List(items.iter().map(|i| self.substitute_expr(i, subs)).collect())
}
Expr::List(items) => Expr::List(
items
.iter()
.map(|i| self.substitute_expr(i, subs))
.collect(),
),
Expr::StructInit(name, fields) => Expr::StructInit(
name.clone(),
@ -184,23 +184,18 @@ impl MacroExpander {
.map(|e| Box::new(self.substitute_expr(e, subs))),
),
Expr::Lambda(params, body) => Expr::Lambda(
params.clone(),
Box::new(self.substitute_expr(body, subs)),
),
Expr::Await(inner) => {
Expr::Await(Box::new(self.substitute_expr(inner, subs)))
Expr::Lambda(params, body) => {
Expr::Lambda(params.clone(), Box::new(self.substitute_expr(body, subs)))
}
Expr::Await(inner) => Expr::Await(Box::new(self.substitute_expr(inner, subs))),
Expr::Path(left, right) => Expr::Path(
Box::new(self.substitute_expr(left, subs)),
Box::new(self.substitute_expr(right, subs)),
),
Expr::HomePath(path) => {
Expr::HomePath(Box::new(self.substitute_expr(path, subs)))
}
Expr::HomePath(path) => Expr::HomePath(Box::new(self.substitute_expr(path, subs))),
Expr::Interpolated(parts) => Expr::Interpolated(
parts

View file

@ -2,8 +2,8 @@
use crate::ast::*;
use crate::lexer::Token;
use chumsky::prelude::*;
use chumsky::Parser as _;
use chumsky::prelude::*;
use std::collections::HashMap;
/// Parses tokens into an AST.
@ -33,10 +33,7 @@ impl Parser {
fn statement_parser() -> impl chumsky::Parser<Token, Spanned<Statement>, Error = Simple<Token>>
{
recursive(|stmt| {
let whitespace = choice((
just(Token::Newline),
just(Token::Dedent),
)).repeated();
let whitespace = just(Token::Newline).repeated();
let var_decl = Self::var_decl_parser().map(Statement::VarDecl);
let fn_decl = Self::fn_decl_parser(stmt.clone()).map(Statement::FnDecl);
@ -45,7 +42,7 @@ impl Parser {
let type_alias = Self::type_alias_parser().map(Statement::TypeAlias);
let import = Self::import_parser().map(Statement::Import);
let dotfile = Self::dotfile_parser().map(Statement::Dotfile);
let package = Self::package_parser().map(Statement::Package);
let package = Self::package_parser().map(|p| Statement::Package(Box::new(p)));
let secret = Self::secret_parser().map(Statement::Secret);
let hook = Self::hook_parser().map(Statement::Hook);
let simple_hook = Self::simple_hook_parser().map(Statement::Hook);
@ -127,11 +124,7 @@ impl Parser {
let param = Self::ident_parser()
.then_ignore(just(Token::Colon))
.then(Self::type_annotation_parser())
.then(
just(Token::Eq)
.ignore_then(Self::expr_parser())
.or_not(),
)
.then(just(Token::Eq).ignore_then(Self::expr_parser()).or_not())
.map(|((name, ty), default)| FnParam { name, ty, default });
param
@ -146,11 +139,7 @@ impl Parser {
let field = Self::ident_parser()
.then_ignore(just(Token::Colon))
.then(Self::type_annotation_parser())
.then(
just(Token::Eq)
.ignore_then(Self::expr_parser())
.or_not(),
)
.then(just(Token::Eq).ignore_then(Self::expr_parser()).or_not())
.map(|((name, ty), default)| StructField { name, ty, default });
let method = Self::fn_decl_parser(stmt);
@ -161,10 +150,7 @@ impl Parser {
.then_ignore(just(Token::Newline).repeated())
.then_ignore(just(Token::Indent(0)).rewind().or_not())
.then(
choice((
field.map(Either::Left),
method.map(Either::Right),
))
choice((field.map(Either::Left), method.map(Either::Right)))
.padded_by(just(Token::Newline).repeated())
.repeated(),
)
@ -178,7 +164,11 @@ impl Parser {
Either::Right(m) => methods.push(m),
}
}
StructDecl { name, fields, methods }
StructDecl {
name,
fields,
methods,
}
})
}
@ -339,10 +329,34 @@ impl Parser {
for (name, value) in fields {
match name.as_str() {
"default" => pkg.default = Some(value),
"brew" => pkg.brew = Some(PackageSpec { name: value, cask: None, tap: None }),
"apt" => pkg.apt = Some(PackageSpec { name: value, cask: None, tap: None }),
"pacman" => pkg.pacman = Some(PackageSpec { name: value, cask: None, tap: None }),
"yay" => pkg.yay = Some(PackageSpec { name: value, cask: None, tap: None }),
"brew" => {
pkg.brew = Some(PackageSpec {
name: value,
cask: None,
tap: None,
})
}
"apt" => {
pkg.apt = Some(PackageSpec {
name: value,
cask: None,
tap: None,
})
}
"pacman" => {
pkg.pacman = Some(PackageSpec {
name: value,
cask: None,
tap: None,
})
}
"yay" => {
pkg.yay = Some(PackageSpec {
name: value,
cask: None,
tap: None,
})
}
"when" => pkg.when = Some(value),
_ => {}
}
@ -563,8 +577,8 @@ impl Parser {
.then_ignore(just(Token::Dedent).or_not())
}
fn type_annotation_parser(
) -> impl chumsky::Parser<Token, TypeAnnotation, Error = Simple<Token>> {
fn type_annotation_parser() -> impl chumsky::Parser<Token, TypeAnnotation, Error = Simple<Token>>
{
recursive(|ty| {
let simple = Self::ident_parser().map(TypeAnnotation::Simple);
@ -590,11 +604,7 @@ impl Parser {
optional
.clone()
.then(
just(Token::Pipe)
.ignore_then(optional.clone())
.repeated(),
)
.then(just(Token::Pipe).ignore_then(optional.clone()).repeated())
.map(|(first, rest)| {
if rest.is_empty() {
first
@ -707,7 +717,8 @@ impl Parser {
paren,
));
let call_or_access = atom.then(
let call_or_access = atom
.then(
choice((
expr.clone()
.separated_by(just(Token::Comma))
@ -735,9 +746,12 @@ impl Parser {
.map(CallOrAccess::Index),
))
.repeated(),
).foldl(|e, access| match access {
)
.foldl(|e, access| match access {
CallOrAccess::Call(args) => Expr::Call(Box::new(e), args),
CallOrAccess::MethodCall(name, args) => Expr::MethodCall(Box::new(e), name, args),
CallOrAccess::MethodCall(name, args) => {
Expr::MethodCall(Box::new(e), name, args)
}
CallOrAccess::Field(name) => Expr::Field(Box::new(e), name),
CallOrAccess::Index(idx) => Expr::Index(Box::new(e), Box::new(idx)),
});
@ -752,7 +766,9 @@ impl Parser {
let unary = unary_ops
.then(call_or_access)
.map(|(ops, expr)| {
ops.into_iter().rev().fold(expr, |e, op| Expr::Unary(op, Box::new(e)))
ops.into_iter()
.rev()
.fold(expr, |e, op| Expr::Unary(op, Box::new(e)))
})
.boxed();
@ -819,7 +835,11 @@ impl Parser {
or_expr
.clone()
.then(just(Token::QuestionQuestion).ignore_then(or_expr.clone()).repeated())
.then(
just(Token::QuestionQuestion)
.ignore_then(or_expr.clone())
.repeated(),
)
.foldl(|a, b| Expr::Binary(Box::new(a), BinOp::NullCoalesce, Box::new(b)))
})
}
@ -867,11 +887,10 @@ impl Parser {
parts.push(InterpolatedPart::Literal(current));
}
if parts.len() == 1 {
if let InterpolatedPart::Literal(s) = &parts[0] {
if parts.len() == 1
&& let InterpolatedPart::Literal(s) = &parts[0] {
return Expr::Literal(Literal::Str(s.clone()));
}
}
Expr::Interpolated(parts)
}
@ -933,9 +952,7 @@ fn expr_to_permission_rules(expr: &Expr) -> Vec<PermissionRule> {
}
}
// Single mode in array (less common but supported)
Expr::Literal(Literal::Int(mode)) => {
Some(PermissionRule::Single(*mode as u32))
}
Expr::Literal(Literal::Int(mode)) => Some(PermissionRule::Single(*mode as u32)),
_ => None,
}
})

View file

@ -32,11 +32,7 @@ pub trait TaskHandler: Send + Sync {
fn handle_package(&self, name: &str, manager: &str) -> TaskResult;
/// Handles secret decryption.
fn handle_secret(
&self,
source: &std::path::Path,
target: &std::path::Path,
) -> TaskResult;
fn handle_secret(&self, source: &std::path::Path, target: &std::path::Path) -> TaskResult;
/// Handles hook execution.
fn handle_hook(&self, command: &str) -> TaskResult;
@ -90,8 +86,8 @@ impl<H: TaskHandler + 'static> Executor<H> {
/// Executes tasks in parallel batches.
pub fn execute_parallel(&self) -> Result<ExecutionReport, ExecutionError> {
let batches = self
.graph
let batches =
self.graph
.get_parallel_batches()
.map_err(|e| ExecutionError::TaskFailed {
task_id: "scheduler".to_string(),
@ -141,13 +137,9 @@ impl<H: TaskHandler + 'static> Executor<H> {
template,
} => self.handler.handle_dotfile(source, target, *template),
TaskData::Package { name, manager } => {
self.handler.handle_package(name, manager)
}
TaskData::Package { name, manager } => self.handler.handle_package(name, manager),
TaskData::Secret { source, target } => {
self.handler.handle_secret(source, target)
}
TaskData::Secret { source, target } => self.handler.handle_secret(source, target),
TaskData::Hook { command } => self.handler.handle_hook(command),

View file

@ -7,5 +7,5 @@ pub mod scheduler;
pub use dag::DependencyGraph;
pub use executor::Executor;
pub use scheduler::{
validate_dotfile_targets, DotfileConflict, DotfileValidation, DotfileWarning, Scheduler,
DotfileConflict, DotfileValidation, DotfileWarning, Scheduler, validate_dotfile_targets,
};

View file

@ -99,10 +99,7 @@ impl Default for Scheduler {
#[derive(Debug, Clone)]
pub enum DotfileConflict {
/// Same source and target (duplicate entry).
Duplicate {
index_a: usize,
index_b: usize,
},
Duplicate { index_a: usize, index_b: usize },
/// Overlapping directories with no distinguishing settings (likely redundant).
RedundantOverlap {
parent_index: usize,
@ -323,7 +320,8 @@ mod tests {
std::fs::create_dir_all(temp.path().join("config/nvim")).unwrap();
std::fs::write(temp.path().join("config/nvim/init.lua"), "").unwrap();
let mut file_dotfile = make_dotfile("config/nvim/init.lua", "/home/user/.config/nvim/init.lua");
let mut file_dotfile =
make_dotfile("config/nvim/init.lua", "/home/user/.config/nvim/init.lua");
file_dotfile.template = true;
let dotfiles = vec![
@ -377,6 +375,10 @@ mod tests {
assert!(result.errors.is_empty());
assert_eq!(result.warnings.len(), 1);
assert!(result.warnings[0].message.contains("overlapping directories"));
assert!(
result.warnings[0]
.message
.contains("overlapping directories")
);
}
}

View file

@ -50,18 +50,26 @@ impl TypeError {
TypeError::UndefinedType(name, span) => {
(format!("undefined type: {}", name), span.clone())
}
TypeError::TypeMismatch { expected, got, span } => {
(format!("expected {}, got {}", expected, got), span.clone())
}
TypeError::NotCallable(ty, span) => {
(format!("cannot call non-function type: {}", ty), span.clone())
}
TypeError::TypeMismatch {
expected,
got,
span,
} => (format!("expected {}, got {}", expected, got), span.clone()),
TypeError::NotCallable(ty, span) => (
format!("cannot call non-function type: {}", ty),
span.clone(),
),
TypeError::FieldNotFound { ty, field, span } => {
(format!("field {} not found on {}", field, ty), span.clone())
}
TypeError::WrongArity { expected, got, span } => {
(format!("expected {} arguments, got {}", expected, got), span.clone())
}
TypeError::WrongArity {
expected,
got,
span,
} => (
format!("expected {} arguments, got {}", expected, got),
span.clone(),
),
};
Report::build(ReportKind::Error, filename, span.start)
@ -318,7 +326,8 @@ impl TypeChecker {
Box::new(ft.return_type.clone()),
)
} else {
self.errors.push(TypeError::UndefinedVariable(name.clone(), span.clone()));
self.errors
.push(TypeError::UndefinedVariable(name.clone(), span.clone()));
Type::Unknown
}
}
@ -336,7 +345,8 @@ impl TypeChecker {
Type::Int
}
} else if matches!(op, BinOp::Add)
&& (left_ty.is_compatible(&Type::Str) || right_ty.is_compatible(&Type::Str))
&& (left_ty.is_compatible(&Type::Str)
|| right_ty.is_compatible(&Type::Str))
{
Type::Str
} else {
@ -349,9 +359,12 @@ impl TypeChecker {
}
}
BinOp::Eq | BinOp::NotEq | BinOp::Lt | BinOp::Gt | BinOp::LtEq | BinOp::GtEq => {
Type::Bool
}
BinOp::Eq
| BinOp::NotEq
| BinOp::Lt
| BinOp::Gt
| BinOp::LtEq
| BinOp::GtEq => Type::Bool,
BinOp::And | BinOp::Or => {
if !left_ty.is_compatible(&Type::Bool) {
@ -446,7 +459,8 @@ impl TypeChecker {
}
Type::Unknown | Type::Any => Type::Any,
_ => {
self.errors.push(TypeError::NotCallable(callee_ty.display(), span.clone()));
self.errors
.push(TypeError::NotCallable(callee_ty.display(), span.clone()));
Type::Unknown
}
}
@ -545,7 +559,8 @@ impl TypeChecker {
if let Some(et) = self.env.enums.get(enum_name) {
Type::Enum(et.clone())
} else {
self.errors.push(TypeError::UndefinedType(enum_name.clone(), span.clone()));
self.errors
.push(TypeError::UndefinedType(enum_name.clone(), span.clone()));
Type::Unknown
}
}
@ -572,7 +587,8 @@ impl TypeChecker {
}
Type::Struct(st)
} else {
self.errors.push(TypeError::UndefinedType(struct_name.clone(), span.clone()));
self.errors
.push(TypeError::UndefinedType(struct_name.clone(), span.clone()));
Type::Unknown
}
}
@ -615,8 +631,8 @@ impl TypeChecker {
}
Expr::Await(expr) => {
let ty = self.infer_expr(expr, span);
ty
self.infer_expr(expr, span)
}
Expr::Path(left, right) => {
@ -646,7 +662,7 @@ impl TypeChecker {
) -> Type {
match name {
"map" | "filter" => {
if args.len() >= 1 {
if !args.is_empty() {
let list_ty = self.infer_expr(&args[0], span);
if let Type::List(inner) = list_ty {
if name == "filter" {
@ -660,7 +676,7 @@ impl TypeChecker {
"fold" => Type::Any,
"len" => Type::Int,
"first" | "last" => {
if args.len() >= 1 {
if !args.is_empty() {
let list_ty = self.infer_expr(&args[0], span);
if let Type::List(inner) = list_ty {
return Type::Optional(inner);
@ -675,8 +691,11 @@ impl TypeChecker {
"read_file" | "read_file_lines" => Type::Str,
"file_exists" | "dir_exists" | "is_symlink" => Type::Bool,
"list_dir" | "glob" | "walk_dir" => Type::List(Box::new(Type::Path)),
"home" | "config_dir" | "config_path" | "data_dir" | "cache_dir" | "temp_dir" | "temp_file" => Type::Path,
"path_join" | "path_parent" | "path_filename" | "path_extension" | "read_link" => Type::Path,
"home" | "config_dir" | "config_path" | "data_dir" | "cache_dir" | "temp_dir"
| "temp_file" => Type::Path,
"path_join" | "path_parent" | "path_filename" | "path_extension" | "read_link" => {
Type::Path
}
"fetch" | "fetch_json" | "fetch_bytes" | "post" | "post_json" => Type::Any,
"download" => Type::Bool,
"exec" | "shell" => Type::Str,
@ -688,7 +707,7 @@ impl TypeChecker {
"encrypt_age" | "decrypt_age" => Type::Str,
"env" => Type::Optional(Box::new(Type::Str)),
"unwrap" => {
if args.len() >= 1 {
if !args.is_empty() {
let opt_ty = self.infer_expr(&args[0], span);
if let Type::Optional(inner) = opt_ty {
return *inner;
@ -706,14 +725,14 @@ impl TypeChecker {
"is_some" | "is_none" => Type::Bool,
"all" | "race" => Type::Any,
"seq" | "batch" => {
if args.len() >= 1 {
if !args.is_empty() {
self.infer_expr(&args[0], span)
} else {
Type::Any
}
}
"flatten" | "concat" | "unique" | "sort" | "reverse" => {
if args.len() >= 1 {
if !args.is_empty() {
self.infer_expr(&args[0], span)
} else {
Type::List(Box::new(Type::Any))
@ -721,7 +740,7 @@ impl TypeChecker {
}
"zip" | "enumerate" => Type::List(Box::new(Type::Any)),
"sort_by" => {
if args.len() >= 1 {
if !args.is_empty() {
self.infer_expr(&args[0], span)
} else {
Type::List(Box::new(Type::Any))

View file

@ -47,7 +47,10 @@ impl Type {
(a, Type::Optional(b)) => a.is_compatible(b),
(Type::Function(a_params, a_ret), Type::Function(b_params, b_ret)) => {
a_params.len() == b_params.len()
&& a_params.iter().zip(b_params.iter()).all(|(a, b)| a.is_compatible(b))
&& a_params
.iter()
.zip(b_params.iter())
.all(|(a, b)| a.is_compatible(b))
&& a_ret.is_compatible(b_ret)
}
(Type::Struct(a), Type::Struct(b)) => a.name == b.name,
@ -71,12 +74,20 @@ impl Type {
Type::List(inner) => format!("[{}]", inner.display()),
Type::Optional(inner) => format!("{}?", inner.display()),
Type::Function(params, ret) => {
let params_str = params.iter().map(|p| p.display()).collect::<Vec<_>>().join(", ");
let params_str = params
.iter()
.map(|p| p.display())
.collect::<Vec<_>>()
.join(", ");
format!("fn({}) -> {}", params_str, ret.display())
}
Type::Struct(s) => s.name.clone(),
Type::Enum(e) => e.name.clone(),
Type::Union(types) => types.iter().map(|t| t.display()).collect::<Vec<_>>().join(" | "),
Type::Union(types) => types
.iter()
.map(|t| t.display())
.collect::<Vec<_>>()
.join(" | "),
Type::Any => "any".to_string(),
Type::Unknown => "unknown".to_string(),
}