chore: initialize

This commit is contained in:
Ray Sinurat 2026-02-05 20:49:22 -06:00
commit c53b4db9cf
58 changed files with 16195 additions and 0 deletions

32
.gitignore vendored Normal file
View file

@ -0,0 +1,32 @@
# secrets (encrypted files are ok)
secrets/*.key
*.age.key
# AI
.claude
# Generated by Cargo
# will have compiled files and executables
debug
target
# These are backup files generated by rustfmt
**/*.rs.bk
# MSVC Windows builds of rustc generate these, which store debugging information
*.pdb
# Generated by cargo mutants
# Contains mutation testing data
**/mutants.out*/
# RustRover
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
# OS files
.DS_Store
Thumbs.db

4073
Cargo.lock generated Normal file

File diff suppressed because it is too large Load diff

36
Cargo.toml Normal file
View file

@ -0,0 +1,36 @@
[workspace]
resolver = "2"
members = ["crates/*"]
[workspace.package]
version = "0.1.0"
edition = "2024"
license = "MIT"
repository = "https://github.com/rayandrew/doot"
[workspace.dependencies]
doot-lang = { path = "crates/doot-lang" }
doot-core = { path = "crates/doot-core" }
chumsky = "0.9"
ariadne = "0.4"
clap = { version = "4", features = ["derive"] }
serde = { version = "1", features = ["derive"] }
serde_json = "1"
toml = "0.8"
smol = "2"
async-fs = "2"
async-net = "2"
surf = "2"
rayon = "1"
age = "0.10"
walkdir = "2"
dirs = "6"
similar = "2"
blake3 = "1"
os_info = "3"
indicatif = "0.17"
ratatui = "0.29"
crossterm = "0.28"
thiserror = "2"
anyhow = "1"

View file

@ -0,0 +1,23 @@
[package]
name = "doot-cli"
version.workspace = true
edition.workspace = true
[[bin]]
name = "doot"
path = "src/main.rs"
[dependencies]
doot-lang.workspace = true
doot-core.workspace = true
clap.workspace = true
serde.workspace = true
serde_json.workspace = true
smol.workspace = true
indicatif.workspace = true
ratatui.workspace = true
crossterm.workspace = true
thiserror.workspace = true
anyhow.workspace = true
dirs.workspace = true
blake3.workspace = true

View file

@ -0,0 +1,591 @@
use super::{find_config_file, parse_config, type_check};
use doot_core::state::{StateStore, SyncStatus};
use doot_core::{Config, Deployer};
use doot_lang::ast::HookStage;
use doot_lang::evaluator::{DotfileConfig, HookConfig};
use doot_lang::{validate_dotfile_targets, DotfileConflict, Evaluator};
use indicatif::{ProgressBar, ProgressStyle};
use std::io::{self, Write};
use std::path::PathBuf;
use std::process::Command;
pub fn run(
config_path: Option<PathBuf>,
dry_run: bool,
parallel: bool,
verbose: bool,
) -> anyhow::Result<()> {
let path = find_config_file(config_path)?;
let source = std::fs::read_to_string(&path)?;
if verbose {
println!("parsing {}", path.display());
}
let program = parse_config(&path)?;
type_check(&program, &source, &path.display().to_string())?;
let mut evaluator = Evaluator::new();
let result = evaluator.eval(&program)?;
// Get environment variables to expose to hook scripts
let hook_env = evaluator.get_hook_env();
let _total_items = result.dotfiles.len() + result.packages.len();
println!(
"config parsed: {} dotfiles, {} packages",
result.dotfiles.len(),
result.packages.len()
);
let source_dir = path.parent().unwrap_or(&PathBuf::from(".")).to_path_buf();
// Validate dotfile targets and get proper execution order
let validation = validate_dotfile_targets(&result.dotfiles, &source_dir);
// Handle errors
if !validation.errors.is_empty() {
eprintln!("\nDotfile configuration errors:");
for error in &validation.errors {
match error {
DotfileConflict::Duplicate { index_a, index_b } => {
let a = &result.dotfiles[*index_a];
let b = &result.dotfiles[*index_b];
eprintln!(
" [error] duplicate entry: '{}' -> '{}' appears twice (entries {} and {})",
a.source.display(),
a.target.display(),
index_a + 1,
index_b + 1
);
let _ = b; // silence unused warning
}
DotfileConflict::RedundantOverlap {
parent_index,
child_index,
} => {
let parent = &result.dotfiles[*parent_index];
let child = &result.dotfiles[*child_index];
eprintln!(
" [error] redundant overlap: '{}' already includes '{}' (entries {} and {})",
parent.source.display(),
child.source.display(),
parent_index + 1,
child_index + 1
);
let _ = child; // silence unused warning
}
}
}
anyhow::bail!("fix configuration errors before deploying");
}
// Show warnings
if !validation.warnings.is_empty() {
eprintln!("\nDotfile configuration warnings:");
for warning in &validation.warnings {
eprintln!(" [warn] {}", warning.message);
}
eprintln!();
}
// Reorder dotfiles based on dependency analysis
let ordered_dotfiles: Vec<DotfileConfig> = validation
.ordered_indices
.iter()
.map(|&i| result.dotfiles[i].clone())
.collect();
let config = Config::new(source_dir.clone())
.dry_run(dry_run)
.verbose(verbose)
.parallel(parallel);
let state_file = config.state_file.clone();
let state = StateStore::new(&state_file);
// Check for conflicts before deploying
let mut to_deploy: Vec<&DotfileConfig> = Vec::new();
let mut conflicts: Vec<(&DotfileConfig, SyncStatus)> = Vec::new();
// Track per-file conflicts for directories (file_path, source, target, status)
let mut file_conflicts: Vec<(PathBuf, PathBuf, PathBuf, SyncStatus)> = Vec::new();
for dotfile in &ordered_dotfiles {
let full_source = source_dir.join(&dotfile.source);
let status = state.check_sync_status(&full_source, &dotfile.target);
// For directories, check individual files for smarter merging
if full_source.is_dir() {
let changed_files = state.get_changed_files_in_dir(&full_source, &dotfile.target);
let mut has_real_conflicts = false;
let mut has_changes = false;
for (src, tgt, file_status) in changed_files {
match file_status {
SyncStatus::Synced => {}
SyncStatus::NotDeployed | SyncStatus::TargetMissing | SyncStatus::SourceChanged => {
// Can auto-merge: just copy from source
has_changes = true;
if verbose {
println!(" [source changed] {}", src.display());
}
}
SyncStatus::TargetChanged => {
// Target changed but source didn't - keep target, will update state
has_changes = true;
if verbose {
println!(" [target changed, keeping] {}", tgt.display());
}
}
SyncStatus::Conflict => {
// Real conflict - both sides changed this file
has_real_conflicts = true;
file_conflicts.push((tgt.clone(), src, tgt, file_status));
}
SyncStatus::SourceMissing => {
has_changes = true;
if verbose {
println!(" [removed from source] {}", tgt.display());
}
}
}
}
if has_real_conflicts {
conflicts.push((dotfile, SyncStatus::Conflict));
} else if has_changes {
to_deploy.push(dotfile);
} else if verbose {
println!(" [synced] {}", dotfile.target.display());
}
} else {
// Single file handling (unchanged)
match status {
SyncStatus::Synced => {
if verbose {
println!(" [synced] {}", dotfile.target.display());
}
}
SyncStatus::NotDeployed | SyncStatus::TargetMissing => {
to_deploy.push(dotfile);
}
SyncStatus::SourceChanged => {
println!(
" [source changed] {} -> {}",
dotfile.source.display(),
dotfile.target.display()
);
to_deploy.push(dotfile);
}
SyncStatus::TargetChanged => {
conflicts.push((dotfile, status));
}
SyncStatus::Conflict => {
conflicts.push((dotfile, status));
}
SyncStatus::SourceMissing => {
eprintln!(
" [error] source missing: {}",
dotfile.source.display()
);
}
}
}
}
// Handle conflicts
if !conflicts.is_empty() {
println!("\nConflicts detected:");
for (dotfile, status) in &conflicts {
let status_str = match status {
SyncStatus::TargetChanged => "target changed",
SyncStatus::Conflict => "both changed",
_ => "conflict",
};
println!(
" [{}] {} -> {}",
status_str,
dotfile.source.display(),
dotfile.target.display()
);
// Show per-file conflicts for directories
let full_source = source_dir.join(&dotfile.source);
if full_source.is_dir() {
for (file_path, _, _, _) in &file_conflicts {
if file_path.starts_with(&dotfile.target) {
let relative = file_path.strip_prefix(&dotfile.target).unwrap_or(file_path);
println!(" - {}", relative.display());
}
}
}
}
println!("\nHow to resolve conflicts?");
println!(" [s] Use source (overwrite target)");
println!(" [t] Keep target (skip these files)");
println!(" [i] Interactive (ask for each)");
println!(" [a] Abort");
print!("\nChoice [s/t/i/a]: ");
io::stdout().flush()?;
let mut input = String::new();
io::stdin().read_line(&mut input)?;
match input.trim().to_lowercase().as_str() {
"s" => {
for (dotfile, _) in conflicts {
to_deploy.push(dotfile);
}
}
"t" => {
println!("Skipping conflicted files.");
}
"i" => {
for (dotfile, status) in conflicts {
let status_str = match status {
SyncStatus::TargetChanged => "target changed",
SyncStatus::Conflict => "both changed",
_ => "conflict",
};
println!(
"\n[{}] {} -> {}",
status_str,
dotfile.source.display(),
dotfile.target.display()
);
println!(" [s] Use source [t] Keep target [d] Show diff [m] Merge in editor");
print!(" Choice [s/t/d/m]: ");
io::stdout().flush()?;
let mut choice = String::new();
io::stdin().read_line(&mut choice)?;
match choice.trim().to_lowercase().as_str() {
"s" => {
to_deploy.push(dotfile);
}
"d" => {
let full_source = source_dir.join(&dotfile.source);
show_diff(&full_source, &dotfile.target);
print!(" Use source? [y/n]: ");
io::stdout().flush()?;
let mut confirm = String::new();
io::stdin().read_line(&mut confirm)?;
if confirm.trim().to_lowercase() == "y" {
to_deploy.push(dotfile);
}
}
"m" => {
let full_source = source_dir.join(&dotfile.source);
if merge_in_editor(&full_source, &dotfile.target)? {
// Source was updated with merged content, deploy it
to_deploy.push(dotfile);
} else {
println!(" Merge cancelled, keeping target.");
}
}
_ => {
println!(" Keeping target.");
}
}
}
}
_ => {
println!("Aborted.");
return Ok(());
}
}
}
// Dry-run: show what would be done and exit
if dry_run {
if to_deploy.is_empty() {
println!("\n[dry-run] all dotfiles synced, nothing to deploy");
} else {
println!("\n[dry-run] would deploy:");
for dotfile in &to_deploy {
println!(" {} -> {}", dotfile.source.display(), dotfile.target.display());
}
}
if !result.packages.is_empty() {
if let Some(manager) = doot_core::package::detect_package_manager() {
let mut to_install = Vec::new();
let mut already_installed = Vec::new();
for pkg in &result.packages {
if let Some(ref name) = pkg.default {
match manager.is_installed(name) {
Ok(true) => already_installed.push(name.clone()),
_ => to_install.push(name.clone()),
}
}
}
if !already_installed.is_empty() {
println!("\n[dry-run] packages already installed:");
for pkg in &already_installed {
println!(" {}", pkg);
}
}
if !to_install.is_empty() {
println!("\n[dry-run] would install packages:");
for pkg in &to_install {
println!(" {}", pkg);
}
} else if already_installed.is_empty() {
println!("\n[dry-run] no packages to install");
}
} else {
println!("\n[dry-run] no supported package manager found");
}
}
return Ok(());
}
// Run before_deploy hooks
run_hooks(&result.hooks, HookStage::BeforeDeploy, verbose, &hook_env)?;
if to_deploy.is_empty() {
println!("\nNothing to deploy (all files synced).");
} else {
let mut deployer = Deployer::new(config, result.sandbox);
let pb = ProgressBar::new(to_deploy.len() as u64);
pb.set_style(
ProgressStyle::default_bar()
.template("{spinner:.green} [{bar:40.cyan/blue}] {pos}/{len} {msg}")
.unwrap()
.progress_chars("=>-"),
);
pb.set_message("deploying dotfiles");
// Convert to owned for deploy
let dotfiles_to_deploy: Vec<DotfileConfig> = to_deploy.into_iter().cloned().collect();
let deploy_result = deployer.deploy(&dotfiles_to_deploy)?;
pb.finish_with_message("done");
println!("\ndeployment complete:");
println!(" deployed: {}", deploy_result.deployed.len());
println!(" skipped: {}", deploy_result.skipped.len());
println!(" errors: {}", deploy_result.errors.len());
for deployed in &deploy_result.deployed {
if verbose {
println!(
" [ok] {} -> {}",
deployed.source.display(),
deployed.target.display()
);
}
}
for skipped in &deploy_result.skipped {
println!(
" [skip] {} ({})",
skipped.target.display(),
skipped.reason
);
}
for error in &deploy_result.errors {
eprintln!(
" [err] {} -> {}: {}",
error.source.display(),
error.target.display(),
error.error
);
}
}
// Run after_deploy hooks
run_hooks(&result.hooks, HookStage::AfterDeploy, verbose, &hook_env)?;
if !result.packages.is_empty() {
// Run before_package hooks
run_hooks(&result.hooks, HookStage::BeforePackage, verbose, &hook_env)?;
if let Some(manager) = doot_core::package::detect_package_manager() {
// Filter out already installed packages
let mut to_install = Vec::new();
let mut already_installed = Vec::new();
for pkg in &result.packages {
if let Some(ref name) = pkg.default {
match manager.is_installed(name) {
Ok(true) => already_installed.push(name.clone()),
_ => to_install.push(name.clone()),
}
}
}
if !already_installed.is_empty() && verbose {
println!("\npackages already installed:");
for pkg in &already_installed {
println!(" [ok] {}", pkg);
}
}
if to_install.is_empty() {
println!("\nall {} packages already installed", already_installed.len());
} else {
println!("\ninstalling {} packages...", to_install.len());
manager.install(&to_install)?;
println!("installed {} packages", to_install.len());
}
// Record all managed packages in state (both newly installed and already installed)
let mut state = StateStore::new(&state_file);
let manager_name = manager.name();
for pkg in to_install.iter().chain(already_installed.iter()) {
state.record_package(pkg, manager_name);
}
state.save()?;
} else {
println!("no supported package manager found");
}
// Run after_package hooks
run_hooks(&result.hooks, HookStage::AfterPackage, verbose, &hook_env)?;
}
Ok(())
}
fn show_diff(source: &PathBuf, target: &PathBuf) {
use std::process::Command;
if source.is_file() && target.is_file() {
let output = Command::new("diff")
.arg("--color=always")
.arg("-u")
.arg(target)
.arg(source)
.output();
if let Ok(output) = output {
println!("{}", String::from_utf8_lossy(&output.stdout));
}
} else {
println!(" (diff not available for directories)");
}
}
fn run_hooks(
hooks: &[HookConfig],
stage: HookStage,
verbose: bool,
env_vars: &std::collections::HashMap<String, String>,
) -> anyhow::Result<()> {
let stage_hooks: Vec<_> = hooks.iter().filter(|h| h.stage == stage).collect();
if stage_hooks.is_empty() {
return Ok(());
}
let stage_name = match stage {
HookStage::BeforeDeploy => "before_deploy",
HookStage::AfterDeploy => "after_deploy",
HookStage::BeforePackage => "before_package",
HookStage::AfterPackage => "after_package",
};
if verbose {
println!("\nrunning {} hooks...", stage_name);
}
for hook in stage_hooks {
if verbose {
println!(" $ {}", hook.run);
}
let status = Command::new("sh")
.arg("-c")
.arg(&hook.run)
.envs(env_vars)
.status()?;
if !status.success() {
anyhow::bail!("hook failed: {}", hook.run);
}
}
Ok(())
}
fn merge_in_editor(source: &PathBuf, target: &PathBuf) -> anyhow::Result<bool> {
use std::process::Command;
if !source.is_file() || !target.is_file() {
println!(" (merge not available for directories)");
return Ok(false);
}
// Try vimdiff first
let editor = std::env::var("VISUAL")
.or_else(|_| std::env::var("EDITOR"))
.unwrap_or_else(|_| "vim".to_string());
// Create temp file for merged result
let temp_dir = std::env::temp_dir();
let merged_path = temp_dir.join(format!(
"doot-merge-{}",
source.file_name().unwrap_or_default().to_string_lossy()
));
// Copy target to temp (start with target's content as base)
std::fs::copy(target, &merged_path)?;
// Try vimdiff-style merge if using vim/nvim
if editor.contains("vim") {
println!(" Opening vimdiff (left=target, right=source)...");
println!(" Edit left pane, then :wqa to save and quit");
let status = Command::new(&editor)
.arg("-d")
.arg(&merged_path) // target (editable)
.arg(source) // source (reference)
.status()?;
if !status.success() {
let _ = std::fs::remove_file(&merged_path);
return Ok(false);
}
} else {
// For other editors, show diff and open merged file
println!(" Opening {} with target content...", editor);
println!(" Reference source: {}", source.display());
let status = Command::new(&editor)
.arg(&merged_path)
.status()?;
if !status.success() {
let _ = std::fs::remove_file(&merged_path);
return Ok(false);
}
}
// Ask if user wants to use the merged result
print!(" Save merged result to source? [y/n]: ");
io::stdout().flush()?;
let mut confirm = String::new();
io::stdin().read_line(&mut confirm)?;
if confirm.trim().to_lowercase() == "y" {
// Copy merged result back to source
std::fs::copy(&merged_path, source)?;
let _ = std::fs::remove_file(&merged_path);
println!(" Source updated with merged content.");
Ok(true)
} else {
let _ = std::fs::remove_file(&merged_path);
Ok(false)
}
}

View file

@ -0,0 +1,22 @@
use super::{find_config_file, parse_config, type_check};
use std::path::PathBuf;
pub fn run(config_path: Option<PathBuf>, verbose: bool) -> anyhow::Result<()> {
let path = find_config_file(config_path)?;
let source = std::fs::read_to_string(&path)?;
if verbose {
println!("checking {}", path.display());
}
let program = parse_config(&path)?;
println!("syntax: ok");
type_check(&program, &source, &path.display().to_string())?;
println!("types: ok");
println!("\nconfig is valid");
println!(" statements: {}", program.statements.len());
Ok(())
}

View file

@ -0,0 +1,35 @@
use doot_core::{encryption::AgeEncryption, Config};
use std::path::PathBuf;
pub fn run(file: PathBuf, identity: Option<PathBuf>, verbose: bool) -> anyhow::Result<()> {
let config = Config::default();
let identity_key = if let Some(path) = identity {
std::fs::read_to_string(&path)?.trim().to_string()
} else if let Ok(key) = std::env::var("DOOT_AGE_IDENTITY") {
key
} else if config.identity_file.exists() {
std::fs::read_to_string(&config.identity_file)?.trim().to_string()
} else {
anyhow::bail!(
"no identity specified. use --identity, DOOT_AGE_IDENTITY env var, or {}",
config.identity_file.display()
);
};
if verbose {
println!("decrypting {}", file.display());
}
let encryption = AgeEncryption::new().with_identity(&identity_key)?;
let output = if file.extension().map(|e| e == "age").unwrap_or(false) {
file.with_extension("")
} else {
file.with_extension("decrypted")
};
encryption.decrypt_file(&file, &output)?;
println!("decrypted {} -> {}", file.display(), output.display());
Ok(())
}

View file

@ -0,0 +1,66 @@
use super::{find_config_file, parse_config, type_check};
use doot_core::deploy::DiffDisplay;
use doot_lang::Evaluator;
use std::path::PathBuf;
pub fn run(config_path: Option<PathBuf>, all: bool, verbose: bool) -> anyhow::Result<()> {
let path = find_config_file(config_path)?;
let source = std::fs::read_to_string(&path)?;
let program = parse_config(&path)?;
type_check(&program, &source, &path.display().to_string())?;
let mut evaluator = Evaluator::new();
let result = evaluator.eval(&program)?;
let source_dir = path.parent().unwrap_or(&PathBuf::from(".")).to_path_buf();
let mut has_changes = false;
for dotfile in &result.dotfiles {
let source_path = source_dir.join(&dotfile.source);
let target_path = &dotfile.target;
if !source_path.exists() {
if verbose {
println!("[missing] {} (source not found)", source_path.display());
}
continue;
}
let changed = DiffDisplay::has_changes(&source_path, target_path)?;
if changed || all {
has_changes = true;
println!(
"\n--- {} -> {}",
dotfile.source.display(),
target_path.display()
);
if target_path.is_symlink() {
let link_target = std::fs::read_link(target_path)?;
if link_target == source_path {
println!(" [symlink ok]");
} else {
println!(" [symlink mismatch]");
println!(" current: {}", link_target.display());
println!(" expected: {}", source_path.display());
}
} else if target_path.exists() {
let diff = DiffDisplay::unified_diff(&source_path, target_path)?;
if !diff.is_empty() {
println!("{}", diff);
}
} else {
println!(" [new file]");
}
}
}
if !has_changes {
println!("no changes detected");
}
Ok(())
}

View file

@ -0,0 +1,226 @@
use super::{find_config_file, parse_config, type_check};
use doot_core::{
deploy::Linker,
state::{DeployMode, StateStore},
Config,
};
use doot_lang::Evaluator;
use std::io::{self, Write};
use std::path::PathBuf;
use std::process::Command;
pub fn run(
config_path: Option<PathBuf>,
target: String,
auto_apply: bool,
skip_prompt: bool,
verbose: bool,
) -> anyhow::Result<()> {
let path = find_config_file(config_path)?;
let source = std::fs::read_to_string(&path)?;
let program = parse_config(&path)?;
type_check(&program, &source, &path.display().to_string())?;
let mut evaluator = Evaluator::new();
let result = evaluator.eval(&program)?;
let source_dir = path.parent().unwrap_or(&PathBuf::from(".")).to_path_buf();
let config = Config::default();
let state = StateStore::new(&config.state_file);
let target_path = expand_tilde(&target);
let (source_file, dotfile) =
find_source_and_dotfile(&target_path, &result.dotfiles, &source_dir, &state)?;
if verbose {
println!("editing source: {}", source_file.display());
}
// Get hash before editing
let hash_before = hash_file(&source_file);
// Open in editor
let editor = std::env::var("EDITOR").unwrap_or_else(|_| "vim".to_string());
let status = Command::new(&editor).arg(&source_file).status()?;
if !status.success() {
anyhow::bail!("editor exited with non-zero status");
}
// Check if file changed
let hash_after = hash_file(&source_file);
if hash_before == hash_after {
println!("no changes made");
return Ok(());
}
// Determine if we should apply
let should_apply = if auto_apply {
true
} else if skip_prompt {
false
} else {
prompt_apply()?
};
if should_apply {
if let Some(df) = dotfile {
apply_single(&source_file, &df.target, &df, &config, verbose)?;
println!("applied changes to {}", df.target.display());
} else {
println!("hint: run 'doot apply' to deploy changes");
}
} else {
println!("hint: run 'doot apply' to deploy changes");
}
Ok(())
}
fn prompt_apply() -> anyhow::Result<bool> {
print!("Apply changes? [y/N] ");
io::stdout().flush()?;
let mut input = String::new();
io::stdin().read_line(&mut input)?;
Ok(input.trim().eq_ignore_ascii_case("y") || input.trim().eq_ignore_ascii_case("yes"))
}
fn hash_file(path: &PathBuf) -> String {
std::fs::read(path)
.map(|content| blake3::hash(&content).to_hex().to_string())
.unwrap_or_default()
}
fn apply_single(
source: &PathBuf,
target: &PathBuf,
dotfile: &doot_lang::evaluator::DotfileConfig,
config: &Config,
verbose: bool,
) -> anyhow::Result<()> {
let deploy_mode = match dotfile.deploy {
doot_lang::evaluator::DeployMode::Copy => DeployMode::Copy,
doot_lang::evaluator::DeployMode::Link => DeployMode::Link,
};
let mut state = StateStore::new(&config.state_file);
match deploy_mode {
DeployMode::Link => {
let linker = Linker::new(config.clone());
linker.link(source, target)?;
if verbose {
println!("linked {} -> {}", source.display(), target.display());
}
}
DeployMode::Copy => {
if let Some(parent) = target.parent() {
std::fs::create_dir_all(parent)?;
}
if source.is_dir() {
copy_dir_recursive(source, target)?;
} else {
std::fs::copy(source, target)?;
}
if verbose {
println!("copied {} -> {}", source.display(), target.display());
}
}
}
state.record_deployment(source, target, deploy_mode);
state.save()?;
Ok(())
}
fn copy_dir_recursive(src: &PathBuf, dst: &PathBuf) -> std::io::Result<()> {
std::fs::create_dir_all(dst)?;
for entry in std::fs::read_dir(src)? {
let entry = entry?;
let ty = entry.file_type()?;
let src_path = entry.path();
let dst_path = dst.join(entry.file_name());
if ty.is_dir() {
copy_dir_recursive(&src_path.into(), &dst_path)?;
} else {
std::fs::copy(&src_path, &dst_path)?;
}
}
Ok(())
}
fn expand_tilde(path: &str) -> PathBuf {
if path.starts_with("~/") {
if let Some(home) = dirs::home_dir() {
return home.join(&path[2..]);
}
}
PathBuf::from(path)
}
fn find_source_and_dotfile<'a>(
target: &PathBuf,
dotfiles: &'a [doot_lang::evaluator::DotfileConfig],
source_dir: &PathBuf,
state: &StateStore,
) -> anyhow::Result<(PathBuf, Option<&'a doot_lang::evaluator::DotfileConfig>)> {
// Exact match with dotfile targets
for df in dotfiles {
if &df.target == target {
return Ok((source_dir.join(&df.source), Some(df)));
}
}
// Match by name
let target_str = target.to_string_lossy();
for df in dotfiles {
let target_name = df
.target
.file_name()
.map(|n| n.to_string_lossy().to_string())
.unwrap_or_default();
if target_name == target_str.as_ref() {
return Ok((source_dir.join(&df.source), Some(df)));
}
let source_name = df
.source
.file_name()
.map(|n| n.to_string_lossy().to_string())
.unwrap_or_default();
if source_name == target_str.as_ref() {
return Ok((source_dir.join(&df.source), Some(df)));
}
}
// State lookup
if let Some(record) = state.get_deployment(target) {
return Ok((record.source.clone(), None));
}
// Partial path matching
for df in dotfiles {
if target.starts_with(&df.target) {
let relative = target.strip_prefix(&df.target).unwrap_or(target);
return Ok((source_dir.join(&df.source).join(relative), Some(df)));
}
}
anyhow::bail!(
"could not find source for '{}'\n\nAvailable dotfiles:\n{}",
target.display(),
dotfiles
.iter()
.map(|df| format!(" {} -> {}", df.source.display(), df.target.display()))
.collect::<Vec<_>>()
.join("\n")
)
}

View file

@ -0,0 +1,39 @@
use doot_core::{encryption::AgeEncryption, Config};
use std::path::PathBuf;
pub fn run(file: PathBuf, recipient: Option<String>, verbose: bool) -> anyhow::Result<()> {
let config_dir = Config::default_config_dir();
let recipient_key = if let Some(r) = recipient {
r
} else if let Ok(key) = std::env::var("DOOT_AGE_RECIPIENT") {
key
} else {
let key_file = config_dir.join("recipient.txt");
if key_file.exists() {
std::fs::read_to_string(&key_file)?.trim().to_string()
} else {
anyhow::bail!(
"no recipient specified. use --recipient, DOOT_AGE_RECIPIENT env var, or {}",
key_file.display()
);
}
};
if verbose {
println!("encrypting {} with recipient {}", file.display(), &recipient_key[..20]);
}
let mut encryption = AgeEncryption::new();
encryption.add_recipient(&recipient_key)?;
let output = file.with_extension(
file.extension()
.map(|e| format!("{}.age", e.to_string_lossy()))
.unwrap_or_else(|| "age".to_string()),
);
encryption.encrypt_file(&file, &output)?;
println!("encrypted {} -> {}", file.display(), output.display());
Ok(())
}

View file

@ -0,0 +1,78 @@
use super::find_config_file;
use std::path::PathBuf;
pub fn run(config_path: Option<PathBuf>, check: bool, _verbose: bool) -> anyhow::Result<()> {
let path = find_config_file(config_path)?;
let source = std::fs::read_to_string(&path)?;
let formatted = format_source(&source);
if check {
if formatted != source {
eprintln!("{} would be reformatted", path.display());
std::process::exit(1);
} else {
println!("{} is formatted correctly", path.display());
}
} else {
if formatted != source {
std::fs::write(&path, &formatted)?;
println!("formatted {}", path.display());
} else {
println!("{} is already formatted", path.display());
}
}
Ok(())
}
fn format_source(source: &str) -> String {
let mut result = String::new();
let mut indent_level = 0;
let mut prev_was_blank = false;
for line in source.lines() {
let trimmed = line.trim();
if trimmed.is_empty() {
if !prev_was_blank {
result.push('\n');
prev_was_blank = true;
}
continue;
}
prev_was_blank = false;
if trimmed.starts_with('#') {
result.push_str(&" ".repeat(indent_level));
result.push_str(trimmed);
result.push('\n');
continue;
}
let dedent_keywords = ["else"];
let should_dedent = dedent_keywords.iter().any(|k| trimmed.starts_with(k));
if should_dedent && indent_level > 0 {
indent_level -= 1;
}
result.push_str(&" ".repeat(indent_level));
result.push_str(trimmed);
result.push('\n');
if trimmed.ends_with(':') && !trimmed.starts_with('#') {
indent_level += 1;
}
}
while result.ends_with("\n\n") {
result.pop();
}
if !result.ends_with('\n') {
result.push('\n');
}
result
}

View file

@ -0,0 +1,146 @@
use doot_core::Config;
use std::path::PathBuf;
pub fn run(path: Option<PathBuf>, verbose: bool) -> anyhow::Result<()> {
let source_dir = path.unwrap_or_else(Config::default_source_dir);
let config = Config::new(source_dir.clone());
let is_default = source_dir == Config::default_config_dir();
if verbose {
println!("config dir: {}", config.config_dir.display());
println!("state dir: {}", config.state_dir.display());
if !is_default {
println!("source dir: {}", source_dir.display());
}
}
config.ensure_dirs()?;
if !is_default {
std::fs::create_dir_all(&source_dir)?;
}
let config_file = config.config_dir.join("doot.doot");
if !config_file.exists() {
let content = if is_default {
EXAMPLE_CONFIG.to_string()
} else {
example_config_with_source(&source_dir)
};
std::fs::write(&config_file, content)?;
println!("created {}", config_file.display());
}
let dotfiles_config_dir = source_dir.join("config");
std::fs::create_dir_all(&dotfiles_config_dir)?;
if !is_default {
let gitignore_path = source_dir.join(".gitignore");
if !gitignore_path.exists() {
std::fs::write(&gitignore_path, GITIGNORE_CONTENT)?;
println!("created {}", gitignore_path.display());
}
}
println!("doot initialized");
println!();
println!("structure:");
println!(" {}/doot.doot", config.config_dir.display());
println!(" {}/config/", config.config_dir.display());
println!(" state: {}", config.state_dir.display());
println!();
println!("next steps:");
println!(" 1. add dotfiles to {}/config/", config.config_dir.display());
println!(" 2. edit {}/doot.doot", config.config_dir.display());
println!(" 3. run 'doot apply -n' to preview");
println!(" 4. run 'doot apply' to deploy");
Ok(())
}
fn example_config_with_source(source_dir: &PathBuf) -> String {
format!(
r#"# doot.doot
# source directory: {source_dir}
source_dir = "{source_dir}"
{EXAMPLE_CONFIG}"#,
source_dir = source_dir.display(),
EXAMPLE_CONFIG = EXAMPLE_CONFIG_BODY
)
}
const EXAMPLE_CONFIG: &str = r#"# doot.doot
# Dotfiles
dotfile:
source = "config/nvim"
target = config_path("nvim")
dotfile:
source = "config/kitty"
target = config_path("kitty")
# Platform-specific
if os == Os::MacOS:
dotfile:
source = "config/aerospace"
target = "~/.config/aerospace"
if os == Os::Linux:
dotfile:
source = "config/i3"
target = "~/.config/i3"
# Packages
package: "ripgrep"
package: "fd"
package: "bat"
package: "fzf"
# Package with platform variants
package:
default = "fd"
apt = "fd-find"
"#;
const EXAMPLE_CONFIG_BODY: &str = r#"# Dotfiles
dotfile:
source = "config/nvim"
target = config_path("nvim")
dotfile:
source = "config/kitty"
target = config_path("kitty")
# Platform-specific
if os == Os::MacOS:
dotfile:
source = "config/aerospace"
target = "~/.config/aerospace"
if os == Os::Linux:
dotfile:
source = "config/i3"
target = "~/.config/i3"
# Packages
package: "ripgrep"
package: "fd"
package: "bat"
package: "fzf"
# Package with platform variants
package:
default = "fd"
apt = "fd-find"
"#;
const GITIGNORE_CONTENT: &str = r#"# secrets (encrypted files are ok)
secrets/*.key
*.age.key
# OS files
.DS_Store
Thumbs.db
"#;

View file

@ -0,0 +1,6 @@
pub fn run() -> anyhow::Result<()> {
println!("doot language server");
println!("LSP support is not yet implemented");
println!("\nfor now, use 'doot check' for validation");
Ok(())
}

View file

@ -0,0 +1,95 @@
pub mod apply;
pub mod check;
pub mod decrypt;
pub mod diff;
pub mod edit;
pub mod encrypt;
pub mod fmt;
pub mod init;
pub mod lsp;
pub mod package;
pub mod rollback;
pub mod snapshot;
pub mod status;
pub mod tui;
use doot_core::Config;
use doot_lang::{Lexer, Parser, TypeChecker};
use std::path::PathBuf;
pub fn find_config_file(base: Option<PathBuf>) -> anyhow::Result<PathBuf> {
if let Some(path) = base {
if path.exists() {
return Ok(path);
}
anyhow::bail!("config file not found: {}", path.display());
}
let candidates = vec![
PathBuf::from("doot.doot"),
Config::default_config_file(),
];
for candidate in candidates {
if candidate.exists() {
return Ok(candidate);
}
}
anyhow::bail!(
"no config file found. searched:\n - ./doot.doot\n - {}",
Config::default_config_file().display()
)
}
fn byte_offset_to_line(source: &str, offset: usize) -> usize {
source[..offset.min(source.len())]
.chars()
.filter(|&c| c == '\n')
.count()
+ 1
}
pub fn parse_config(path: &PathBuf) -> anyhow::Result<doot_lang::Program> {
let source = std::fs::read_to_string(path)?;
let tokens = Lexer::lex(&source).map_err(|errs| {
let msg = errs
.iter()
.map(|e| {
let line = byte_offset_to_line(&source, e.span().start);
format!("{}:{}: {}", path.display(), line, e)
})
.collect::<Vec<_>>()
.join("\n");
anyhow::anyhow!("lexer errors:\n{}", msg)
})?;
let program = Parser::parse(tokens).map_err(|errs| {
let msg = errs
.iter()
.map(|e| {
let line = byte_offset_to_line(&source, e.span().start);
format!("{}:{}: {}", path.display(), line, e)
})
.collect::<Vec<_>>()
.join("\n");
anyhow::anyhow!("parser errors:\n{}", msg)
})?;
Ok(program)
}
pub fn type_check(
program: &doot_lang::Program,
source: &str,
filename: &str,
) -> anyhow::Result<()> {
let mut checker = TypeChecker::new();
if let Err(errors) = checker.check(program) {
for error in &errors {
error.report(source, filename);
}
anyhow::bail!("{} type error(s) found", errors.len());
}
Ok(())
}

View file

@ -0,0 +1,104 @@
use super::{find_config_file, parse_config, type_check};
use doot_lang::Evaluator;
use std::path::PathBuf;
pub fn install(config_path: Option<PathBuf>, verbose: bool) -> anyhow::Result<()> {
let path = find_config_file(config_path)?;
let source = std::fs::read_to_string(&path)?;
let program = parse_config(&path)?;
type_check(&program, &source, &path.display().to_string())?;
let mut evaluator = Evaluator::new();
let result = evaluator.eval(&program)?;
if result.packages.is_empty() {
println!("no packages configured");
return Ok(());
}
let manager = doot_core::package::detect_package_manager()
.ok_or_else(|| anyhow::anyhow!("no supported package manager found"))?;
if verbose {
println!("using package manager: {}", manager.name());
}
let package_names: Vec<String> = result
.packages
.iter()
.filter_map(|p| {
match manager.name() {
"brew" => p.brew.clone().or_else(|| p.default.clone()),
"apt" => p.apt.clone().or_else(|| p.default.clone()),
"pacman" => p.pacman.clone().or_else(|| p.default.clone()),
"yay" => p.yay.clone().or_else(|| p.default.clone()),
_ => p.default.clone(),
}
})
.collect();
if package_names.is_empty() {
println!("no packages to install for {}", manager.name());
return Ok(());
}
println!("installing {} packages...", package_names.len());
for name in &package_names {
if verbose {
println!(" {}", name);
}
}
manager.install(&package_names)?;
println!("done");
Ok(())
}
pub fn update(verbose: bool) -> anyhow::Result<()> {
let manager = doot_core::package::detect_package_manager()
.ok_or_else(|| anyhow::anyhow!("no supported package manager found"))?;
if verbose {
println!("updating package index with {}", manager.name());
}
manager.update()?;
println!("package index updated");
Ok(())
}
pub fn list(config_path: Option<PathBuf>, _verbose: bool) -> anyhow::Result<()> {
let path = find_config_file(config_path)?;
let source = std::fs::read_to_string(&path)?;
let program = parse_config(&path)?;
type_check(&program, &source, &path.display().to_string())?;
let mut evaluator = Evaluator::new();
let result = evaluator.eval(&program)?;
if result.packages.is_empty() {
println!("no packages configured");
return Ok(());
}
let manager = doot_core::package::detect_package_manager();
println!("configured packages:");
for pkg in &result.packages {
if let Some(ref name) = pkg.default {
let installed = manager
.as_ref()
.map(|m| m.is_installed(name).unwrap_or(false))
.unwrap_or(false);
let marker = if installed { "" } else { "" };
println!(" {} {}", marker, name);
}
}
Ok(())
}

View file

@ -0,0 +1,85 @@
use doot_core::{
state::{DeployMode, Snapshot},
Config,
};
use std::path::PathBuf;
pub fn run(
_config_path: Option<PathBuf>,
snapshot_name: Option<String>,
verbose: bool,
) -> anyhow::Result<()> {
let config = Config::default();
let name = if let Some(n) = snapshot_name {
if n == "last" || n == "latest" {
let snapshots = Snapshot::list(&config.snapshot_dir)?;
snapshots
.last()
.cloned()
.ok_or_else(|| anyhow::anyhow!("no snapshots found"))?
} else {
n
}
} else {
let snapshots = Snapshot::list(&config.snapshot_dir)?;
if snapshots.is_empty() {
anyhow::bail!("no snapshots found in {}", config.snapshot_dir.display());
}
println!("available snapshots:");
for (i, name) in snapshots.iter().enumerate() {
println!(" {}. {}", i + 1, name);
}
anyhow::bail!("please specify a snapshot name or 'last'");
};
if verbose {
println!("rolling back to snapshot: {}", name);
println!(" snapshot dir: {}", config.snapshot_dir.display());
}
let snapshot = Snapshot::load(&name, &config.snapshot_dir)?;
for (target_str, record) in &snapshot.state.deployments {
let target = PathBuf::from(target_str);
if target.is_symlink() {
if verbose {
println!("removing symlink: {}", target.display());
}
std::fs::remove_file(&target)?;
}
match record.mode {
DeployMode::Link => {
if verbose {
println!(
"recreating symlink: {} -> {}",
record.source.display(),
target.display()
);
}
#[cfg(unix)]
std::os::unix::fs::symlink(&record.source, &target)?;
}
DeployMode::Copy => {
if verbose {
println!(
"restoring copy: {} -> {}",
record.source.display(),
target.display()
);
}
if record.source.exists() {
std::fs::copy(&record.source, &target)?;
}
}
}
}
let json = serde_json::to_string_pretty(&snapshot.state)?;
std::fs::write(&config.state_file, json)?;
println!("rolled back to snapshot: {}", name);
Ok(())
}

View file

@ -0,0 +1,33 @@
use doot_core::{
state::{Snapshot, StateStore},
Config,
};
use std::path::PathBuf;
pub fn run(_config_path: Option<PathBuf>, name: String, verbose: bool) -> anyhow::Result<()> {
let config = Config::default();
config.ensure_dirs()?;
let mut state = StateStore::new(&config.state_file);
if verbose {
println!("creating snapshot: {}", name);
println!(" state file: {}", config.state_file.display());
println!(" snapshot dir: {}", config.snapshot_dir.display());
}
let state_content = std::fs::read_to_string(&config.state_file).unwrap_or_else(|_| "{}".to_string());
let state_data: doot_core::state::store::State = serde_json::from_str(&state_content)?;
Snapshot::create(&name, &state_data, &config.snapshot_dir)?;
// Record snapshot in state
state.add_snapshot(&name);
state.save()?;
println!("snapshot created: {}", name);
println!(" deployments: {}", state_data.deployments.len());
println!(" packages: {}", state_data.packages.len());
Ok(())
}

View file

@ -0,0 +1,96 @@
use super::{find_config_file, parse_config, type_check};
use doot_core::state::StateStore;
use doot_lang::Evaluator;
use std::path::PathBuf;
pub fn run(config_path: Option<PathBuf>, verbose: bool) -> anyhow::Result<()> {
let path = find_config_file(config_path)?;
let source = std::fs::read_to_string(&path)?;
let program = parse_config(&path)?;
type_check(&program, &source, &path.display().to_string())?;
let mut evaluator = Evaluator::new();
let result = evaluator.eval(&program)?;
let source_dir = path.parent().unwrap_or(&PathBuf::from(".")).to_path_buf();
let state_file = source_dir.join(".doot-state.json");
let state = StateStore::new(&state_file);
println!("doot status");
println!("===========\n");
println!("dotfiles ({}):", result.dotfiles.len());
for dotfile in &result.dotfiles {
let target = &dotfile.target;
let status = if target.is_symlink() {
let source_path = source_dir.join(&dotfile.source);
let link_target = std::fs::read_link(target).ok();
if link_target.as_ref() == Some(&source_path) {
"ok"
} else {
"mismatch"
}
} else if target.exists() {
if state.has_changed(&source_dir.join(&dotfile.source), target) {
"modified"
} else {
"deployed"
}
} else {
"pending"
};
let marker = match status {
"ok" => "\x1b[32m✓\x1b[0m",
"deployed" => "\x1b[32m✓\x1b[0m",
"pending" => "\x1b[33m○\x1b[0m",
"modified" => "\x1b[33m~\x1b[0m",
"mismatch" => "\x1b[31m✗\x1b[0m",
_ => "?",
};
println!(
" {} {} -> {}",
marker,
dotfile.source.display(),
target.display()
);
if verbose && status != "ok" && status != "deployed" {
println!(" status: {}", status);
}
}
if !result.packages.is_empty() {
println!("\npackages ({}):", result.packages.len());
if let Some(manager) = doot_core::package::detect_package_manager() {
for pkg in &result.packages {
if let Some(ref name) = pkg.default {
let installed = manager.is_installed(name).unwrap_or(false);
let marker = if installed {
"\x1b[32m✓\x1b[0m"
} else {
"\x1b[33m○\x1b[0m"
};
println!(" {} {}", marker, name);
}
}
}
}
if !result.secrets.is_empty() {
println!("\nsecrets ({}):", result.secrets.len());
for secret in &result.secrets {
let exists = secret.target.exists();
let marker = if exists {
"\x1b[32m✓\x1b[0m"
} else {
"\x1b[33m○\x1b[0m"
};
println!(" {} {}", marker, secret.target.display());
}
}
Ok(())
}

View file

@ -0,0 +1,846 @@
use super::{find_config_file, parse_config, type_check};
use crossterm::{
event::{self, DisableMouseCapture, EnableMouseCapture, Event, KeyCode, KeyEventKind},
execute,
terminal::{disable_raw_mode, enable_raw_mode, EnterAlternateScreen, LeaveAlternateScreen},
};
use doot_core::config::Config;
use doot_core::deploy::Linker;
use doot_core::state::{DeployMode, StateStore};
use doot_lang::Evaluator;
use ratatui::{
backend::CrosstermBackend,
layout::{Constraint, Direction, Layout},
style::{Color, Modifier, Style},
text::{Line, Span},
widgets::{Block, Borders, List, ListItem, ListState, Paragraph, Tabs, Gauge},
Frame, Terminal,
};
use std::io;
use std::path::PathBuf;
pub fn run(config_path: Option<PathBuf>) -> anyhow::Result<()> {
enable_raw_mode()?;
let mut stdout = io::stdout();
execute!(stdout, EnterAlternateScreen, EnableMouseCapture)?;
let backend = CrosstermBackend::new(stdout);
let mut terminal = Terminal::new(backend)?;
let result = run_app(&mut terminal, config_path);
disable_raw_mode()?;
execute!(
terminal.backend_mut(),
LeaveAlternateScreen,
DisableMouseCapture
)?;
terminal.show_cursor()?;
result
}
#[derive(Clone, Copy, PartialEq)]
enum Tab {
Dotfiles,
Packages,
Secrets,
Status,
}
#[derive(Clone, Copy, PartialEq)]
enum ApplyState {
Idle,
Applying,
Done,
NeedsSudo,
}
#[derive(Clone, PartialEq)]
enum InputMode {
Normal,
Password,
}
struct App {
tab: Tab,
dotfiles: Vec<DotfileItem>,
packages: Vec<PackageItem>,
dotfile_state: ListState,
package_state: ListState,
source_dir: PathBuf,
apply_state: ApplyState,
apply_progress: usize,
apply_total: usize,
apply_logs: Vec<(String, LogLevel)>,
log_scroll: usize,
input_mode: InputMode,
password_input: String,
sudo_password: Option<String>,
}
#[derive(Clone, Copy)]
enum LogLevel {
Info,
Success,
Error,
}
struct DotfileItem {
source: PathBuf,
target: PathBuf,
status: FileStatus,
selected: bool,
deploy_mode: DeployMode,
}
struct PackageItem {
name: String,
installed: bool,
selected: bool,
}
#[derive(Clone, Copy, PartialEq)]
enum FileStatus {
Synced,
Modified,
Pending,
Error,
}
impl App {
fn new(config_path: Option<PathBuf>) -> anyhow::Result<Self> {
let path = find_config_file(config_path)?;
let source = std::fs::read_to_string(&path)?;
let program = parse_config(&path)?;
type_check(&program, &source, &path.display().to_string())?;
let mut evaluator = Evaluator::new();
let result = evaluator.eval(&program)?;
let source_dir = path.parent().unwrap_or(&PathBuf::from(".")).to_path_buf();
let config = Config::default();
let state = StateStore::new(&config.state_file);
let dotfiles: Vec<DotfileItem> = result
.dotfiles
.iter()
.map(|d| {
let full_source = source_dir.join(&d.source);
let deploy_mode = match d.deploy {
doot_lang::evaluator::DeployMode::Copy => DeployMode::Copy,
doot_lang::evaluator::DeployMode::Link => DeployMode::Link,
};
let status = if !full_source.exists() {
FileStatus::Error
} else {
match state.check_sync_status(&full_source, &d.target) {
doot_core::state::SyncStatus::Synced => FileStatus::Synced,
doot_core::state::SyncStatus::SourceChanged => FileStatus::Modified,
doot_core::state::SyncStatus::TargetChanged => FileStatus::Modified,
doot_core::state::SyncStatus::Conflict => FileStatus::Modified,
doot_core::state::SyncStatus::NotDeployed => FileStatus::Pending,
doot_core::state::SyncStatus::TargetMissing => FileStatus::Pending,
doot_core::state::SyncStatus::SourceMissing => FileStatus::Error,
}
};
DotfileItem {
source: d.source.clone(),
target: d.target.clone(),
status,
selected: !matches!(status, FileStatus::Error | FileStatus::Synced),
deploy_mode,
}
})
.collect();
let manager = doot_core::package::detect_package_manager();
let packages: Vec<PackageItem> = result
.packages
.iter()
.filter_map(|p| p.default.clone())
.map(|name| {
let installed = manager
.as_ref()
.map(|m| m.is_installed(&name).unwrap_or(false))
.unwrap_or(false);
PackageItem {
name,
installed,
selected: !installed,
}
})
.collect();
let mut dotfile_state = ListState::default();
if !dotfiles.is_empty() {
dotfile_state.select(Some(0));
}
let mut package_state = ListState::default();
if !packages.is_empty() {
package_state.select(Some(0));
}
Ok(Self {
tab: Tab::Dotfiles,
dotfiles,
packages,
dotfile_state,
package_state,
source_dir,
apply_state: ApplyState::Idle,
apply_progress: 0,
apply_total: 0,
apply_logs: Vec::new(),
log_scroll: 0,
input_mode: InputMode::Normal,
password_input: String::new(),
sudo_password: None,
})
}
fn next_tab(&mut self) {
if self.apply_state == ApplyState::Applying {
return;
}
self.tab = match self.tab {
Tab::Dotfiles => Tab::Packages,
Tab::Packages => Tab::Secrets,
Tab::Secrets => Tab::Status,
Tab::Status => Tab::Dotfiles,
};
}
fn prev_tab(&mut self) {
if self.apply_state == ApplyState::Applying {
return;
}
self.tab = match self.tab {
Tab::Dotfiles => Tab::Status,
Tab::Packages => Tab::Dotfiles,
Tab::Secrets => Tab::Packages,
Tab::Status => Tab::Secrets,
};
}
fn next_item(&mut self) {
if self.apply_state == ApplyState::Applying {
return;
}
match self.tab {
Tab::Dotfiles => {
let len = self.dotfiles.len();
if len > 0 {
let i = self.dotfile_state.selected().map(|i| (i + 1) % len).unwrap_or(0);
self.dotfile_state.select(Some(i));
}
}
Tab::Packages => {
let len = self.packages.len();
if len > 0 {
let i = self.package_state.selected().map(|i| (i + 1) % len).unwrap_or(0);
self.package_state.select(Some(i));
}
}
_ => {}
}
}
fn prev_item(&mut self) {
if self.apply_state == ApplyState::Applying {
return;
}
match self.tab {
Tab::Dotfiles => {
let len = self.dotfiles.len();
if len > 0 {
let i = self.dotfile_state.selected().map(|i| if i == 0 { len - 1 } else { i - 1 }).unwrap_or(0);
self.dotfile_state.select(Some(i));
}
}
Tab::Packages => {
let len = self.packages.len();
if len > 0 {
let i = self.package_state.selected().map(|i| if i == 0 { len - 1 } else { i - 1 }).unwrap_or(0);
self.package_state.select(Some(i));
}
}
_ => {}
}
}
fn toggle_selected(&mut self) {
if self.apply_state == ApplyState::Applying {
return;
}
match self.tab {
Tab::Dotfiles => {
if let Some(i) = self.dotfile_state.selected() {
if let Some(item) = self.dotfiles.get_mut(i) {
if item.status != FileStatus::Error {
item.selected = !item.selected;
}
}
}
}
Tab::Packages => {
if let Some(i) = self.package_state.selected() {
if let Some(item) = self.packages.get_mut(i) {
item.selected = !item.selected;
}
}
}
_ => {}
}
}
fn select_all(&mut self) {
if self.apply_state == ApplyState::Applying {
return;
}
match self.tab {
Tab::Dotfiles => {
for item in &mut self.dotfiles {
if item.status != FileStatus::Error && item.status != FileStatus::Synced {
item.selected = true;
}
}
}
Tab::Packages => {
for item in &mut self.packages {
if !item.installed {
item.selected = true;
}
}
}
_ => {}
}
}
fn select_none(&mut self) {
if self.apply_state == ApplyState::Applying {
return;
}
match self.tab {
Tab::Dotfiles => {
for item in &mut self.dotfiles {
item.selected = false;
}
}
Tab::Packages => {
for item in &mut self.packages {
item.selected = false;
}
}
_ => {}
}
}
fn apply(&mut self) {
if self.apply_state == ApplyState::Applying {
return;
}
self.apply_logs.clear();
self.log_scroll = 0;
let selected_dotfiles: Vec<_> = self.dotfiles.iter()
.enumerate()
.filter(|(_, d)| d.selected && d.status != FileStatus::Error)
.map(|(i, _)| i)
.collect();
let selected_packages: Vec<_> = self.packages.iter()
.enumerate()
.filter(|(_, p)| p.selected && !p.installed)
.map(|(i, _)| i)
.collect();
if selected_dotfiles.is_empty() && selected_packages.is_empty() {
self.apply_logs.push(("Nothing to apply".to_string(), LogLevel::Info));
self.apply_state = ApplyState::Done;
return;
}
// Check if we need sudo for packages
if !selected_packages.is_empty() && self.needs_sudo() && self.sudo_password.is_none() {
self.apply_state = ApplyState::NeedsSudo;
return;
}
self.apply_state = ApplyState::Applying;
self.apply_with_sudo();
}
fn dismiss_apply(&mut self) {
if self.apply_state == ApplyState::Done {
self.apply_state = ApplyState::Idle;
self.sudo_password = None;
}
}
fn needs_sudo(&self) -> bool {
let has_packages = self.packages.iter().any(|p| p.selected && !p.installed);
let has_owner = self.dotfiles.iter().any(|d| d.selected);
if has_packages {
if let Some(manager) = doot_core::package::detect_package_manager() {
return manager.needs_sudo();
}
}
has_owner
}
fn apply_with_sudo(&mut self) {
let selected_dotfiles: Vec<_> = self.dotfiles.iter()
.enumerate()
.filter(|(_, d)| d.selected && d.status != FileStatus::Error)
.map(|(i, _)| i)
.collect();
let selected_packages: Vec<_> = self.packages.iter()
.enumerate()
.filter(|(_, p)| p.selected && !p.installed)
.map(|(i, _)| i)
.collect();
self.apply_total = selected_dotfiles.len() + selected_packages.len();
self.apply_progress = 0;
// Apply dotfiles
let config = Config::default();
let linker = Linker::new(config.clone());
let mut state = StateStore::new(&config.state_file);
for idx in selected_dotfiles {
let dotfile = &self.dotfiles[idx];
let full_source = self.source_dir.join(&dotfile.source);
let target = &dotfile.target;
let action_name = match dotfile.deploy_mode {
DeployMode::Copy => "Copying",
DeployMode::Link => "Linking",
};
self.apply_logs.push((
format!("{} {} -> {}", action_name, dotfile.source.display(), target.display()),
LogLevel::Info,
));
let result: Result<(), String> = match dotfile.deploy_mode {
DeployMode::Link => linker.link(&full_source, target).map(|_| ()).map_err(|e| e.to_string()),
DeployMode::Copy => copy_file(&full_source, target),
};
match result {
Ok(_) => {
state.record_deployment(&full_source, target, dotfile.deploy_mode);
let done_msg = match dotfile.deploy_mode {
DeployMode::Copy => format!(" ✓ Copied {}", dotfile.source.display()),
DeployMode::Link => format!(" ✓ Linked {}", dotfile.source.display()),
};
self.apply_logs.push((done_msg, LogLevel::Success));
self.dotfiles[idx].status = FileStatus::Synced;
self.dotfiles[idx].selected = false;
}
Err(e) => {
self.apply_logs.push((
format!(" ✗ Failed: {}", e),
LogLevel::Error,
));
self.dotfiles[idx].status = FileStatus::Error;
}
}
self.apply_progress += 1;
}
let _ = state.save();
// Install packages with sudo if needed
if let Some(manager) = doot_core::package::detect_package_manager() {
for idx in selected_packages {
let package = &self.packages[idx];
self.apply_logs.push((
format!("Installing {} via {}", package.name, manager.name()),
LogLevel::Info,
));
let result = if manager.needs_sudo() {
if let Some(ref password) = self.sudo_password {
manager.install_with_sudo(&[package.name.clone()], password)
} else {
manager.install(&[package.name.clone()])
}
} else {
manager.install(&[package.name.clone()])
};
match result {
Ok(_) => {
self.apply_logs.push((
format!(" ✓ Installed {}", package.name),
LogLevel::Success,
));
self.packages[idx].installed = true;
self.packages[idx].selected = false;
}
Err(e) => {
self.apply_logs.push((
format!(" ✗ Failed: {}", e),
LogLevel::Error,
));
}
}
self.apply_progress += 1;
}
} else {
self.apply_logs.push((
"No package manager available".to_string(),
LogLevel::Error,
));
}
self.apply_state = ApplyState::Done;
}
fn scroll_log_up(&mut self) {
if self.log_scroll > 0 {
self.log_scroll -= 1;
}
}
fn scroll_log_down(&mut self) {
if self.log_scroll < self.apply_logs.len().saturating_sub(1) {
self.log_scroll += 1;
}
}
}
fn run_app(
terminal: &mut Terminal<CrosstermBackend<io::Stdout>>,
config_path: Option<PathBuf>,
) -> anyhow::Result<()> {
let mut app = App::new(config_path)?;
loop {
terminal.draw(|f| ui(f, &mut app))?;
if let Event::Key(key) = event::read()? {
if key.kind == KeyEventKind::Press {
match app.input_mode {
InputMode::Password => match key.code {
KeyCode::Enter => {
app.sudo_password = Some(app.password_input.clone());
app.password_input.clear();
app.input_mode = InputMode::Normal;
app.apply_state = ApplyState::Applying;
app.apply_with_sudo();
}
KeyCode::Esc => {
app.password_input.clear();
app.input_mode = InputMode::Normal;
app.apply_state = ApplyState::Idle;
}
KeyCode::Backspace => {
app.password_input.pop();
}
KeyCode::Char(c) => {
app.password_input.push(c);
}
_ => {}
},
InputMode::Normal => match app.apply_state {
ApplyState::Idle => match key.code {
KeyCode::Char('q') => return Ok(()),
KeyCode::Tab => app.next_tab(),
KeyCode::BackTab => app.prev_tab(),
KeyCode::Down | KeyCode::Char('j') => app.next_item(),
KeyCode::Up | KeyCode::Char('k') => app.prev_item(),
KeyCode::Char(' ') => app.toggle_selected(),
KeyCode::Char('a') => app.select_all(),
KeyCode::Char('n') => app.select_none(),
KeyCode::Enter => app.apply(),
KeyCode::Char('1') => app.tab = Tab::Dotfiles,
KeyCode::Char('2') => app.tab = Tab::Packages,
KeyCode::Char('3') => app.tab = Tab::Secrets,
KeyCode::Char('4') => app.tab = Tab::Status,
_ => {}
},
ApplyState::Applying => {
// Can't do anything while applying
}
ApplyState::NeedsSudo => match key.code {
KeyCode::Char('y') | KeyCode::Enter => {
app.input_mode = InputMode::Password;
}
KeyCode::Char('n') | KeyCode::Esc => {
app.apply_state = ApplyState::Idle;
}
_ => {}
},
ApplyState::Done => match key.code {
KeyCode::Enter | KeyCode::Esc | KeyCode::Char('q') => app.dismiss_apply(),
KeyCode::Up | KeyCode::Char('k') => app.scroll_log_up(),
KeyCode::Down | KeyCode::Char('j') => app.scroll_log_down(),
_ => {}
},
},
}
}
}
}
}
fn ui(f: &mut Frame, app: &mut App) {
let chunks = Layout::default()
.direction(Direction::Vertical)
.constraints([
Constraint::Length(3),
Constraint::Min(0),
Constraint::Length(3),
])
.split(f.area());
let title = format!("Doot - {}", app.source_dir.display());
let tabs = Tabs::new(vec!["Dotfiles", "Packages", "Secrets", "Status"])
.block(Block::default().borders(Borders::ALL).title(title))
.select(match app.tab {
Tab::Dotfiles => 0,
Tab::Packages => 1,
Tab::Secrets => 2,
Tab::Status => 3,
})
.style(Style::default().fg(Color::Cyan))
.highlight_style(Style::default().fg(Color::Yellow).add_modifier(Modifier::BOLD));
f.render_widget(tabs, chunks[0]);
match app.input_mode {
InputMode::Password => {
render_password_input(f, app, chunks[1]);
let help = Paragraph::new("[enter] submit [esc] cancel")
.block(Block::default().borders(Borders::ALL));
f.render_widget(help, chunks[2]);
}
InputMode::Normal => match app.apply_state {
ApplyState::Idle => {
match app.tab {
Tab::Dotfiles => render_dotfiles(f, app, chunks[1]),
Tab::Packages => render_packages(f, app, chunks[1]),
Tab::Secrets => render_secrets(f, chunks[1]),
Tab::Status => render_status(f, app, chunks[1]),
}
let help = Paragraph::new("[tab] switch [j/k] navigate [space] toggle [a] all [n] none [enter] apply [q] quit")
.block(Block::default().borders(Borders::ALL));
f.render_widget(help, chunks[2]);
}
ApplyState::NeedsSudo => {
render_sudo_prompt(f, chunks[1]);
let help = Paragraph::new("[y/enter] enter password [n/esc] cancel")
.block(Block::default().borders(Borders::ALL));
f.render_widget(help, chunks[2]);
}
ApplyState::Applying | ApplyState::Done => {
render_apply_progress(f, app, chunks[1]);
let help_text = if app.apply_state == ApplyState::Done {
"[enter/esc] dismiss [j/k] scroll"
} else {
"Applying..."
};
let help = Paragraph::new(help_text)
.block(Block::default().borders(Borders::ALL));
f.render_widget(help, chunks[2]);
}
},
}
}
fn render_apply_progress(f: &mut Frame, app: &App, area: ratatui::layout::Rect) {
let chunks = Layout::default()
.direction(Direction::Vertical)
.constraints([
Constraint::Length(3),
Constraint::Min(0),
])
.split(area);
// Progress bar
let progress = if app.apply_total > 0 {
(app.apply_progress as f64 / app.apply_total as f64 * 100.0) as u16
} else {
100
};
let label = format!("{}/{}", app.apply_progress, app.apply_total);
let gauge = Gauge::default()
.block(Block::default().borders(Borders::ALL).title("Progress"))
.gauge_style(Style::default().fg(Color::Green))
.percent(progress)
.label(label);
f.render_widget(gauge, chunks[0]);
// Log output
let visible_height = chunks[1].height.saturating_sub(2) as usize;
let start = app.log_scroll;
let end = (start + visible_height).min(app.apply_logs.len());
let items: Vec<ListItem> = app.apply_logs[start..end]
.iter()
.map(|(msg, level)| {
let color = match level {
LogLevel::Info => Color::White,
LogLevel::Success => Color::Green,
LogLevel::Error => Color::Red,
};
ListItem::new(Line::from(Span::styled(msg.as_str(), Style::default().fg(color))))
})
.collect();
let title = if app.apply_state == ApplyState::Done {
"Complete - Press Enter to continue"
} else {
"Applying..."
};
let list = List::new(items)
.block(Block::default().borders(Borders::ALL).title(title));
f.render_widget(list, chunks[1]);
}
fn render_dotfiles(f: &mut Frame, app: &mut App, area: ratatui::layout::Rect) {
let items: Vec<ListItem> = app
.dotfiles
.iter()
.map(|d| {
let checkbox = if d.selected { "" } else { "" };
let status = match d.status {
FileStatus::Synced => ("", Color::Green),
FileStatus::Modified => ("~", Color::Yellow),
FileStatus::Pending => ("", Color::Gray),
FileStatus::Error => ("", Color::Red),
};
let line = Line::from(vec![
Span::raw(format!("{} ", checkbox)),
Span::raw(format!("{} ", d.source.display())),
Span::raw(""),
Span::raw(format!("{} ", d.target.display())),
Span::styled(status.0, Style::default().fg(status.1)),
]);
ListItem::new(line)
})
.collect();
let selected_count = app.dotfiles.iter().filter(|d| d.selected).count();
let title = format!("Dotfiles ({} selected)", selected_count);
let list = List::new(items)
.block(Block::default().borders(Borders::ALL).title(title))
.highlight_style(Style::default().add_modifier(Modifier::REVERSED));
f.render_stateful_widget(list, area, &mut app.dotfile_state);
}
fn render_packages(f: &mut Frame, app: &mut App, area: ratatui::layout::Rect) {
let items: Vec<ListItem> = app
.packages
.iter()
.map(|p| {
let checkbox = if p.selected { "" } else { "" };
let status = if p.installed {
("", Color::Green)
} else {
("", Color::Gray)
};
let line = Line::from(vec![
Span::raw(format!("{} ", checkbox)),
Span::raw(format!("{} ", p.name)),
Span::styled(status.0, Style::default().fg(status.1)),
]);
ListItem::new(line)
})
.collect();
let selected_count = app.packages.iter().filter(|p| p.selected).count();
let title = format!("Packages ({} selected)", selected_count);
let list = List::new(items)
.block(Block::default().borders(Borders::ALL).title(title))
.highlight_style(Style::default().add_modifier(Modifier::REVERSED));
f.render_stateful_widget(list, area, &mut app.package_state);
}
fn render_secrets(f: &mut Frame, area: ratatui::layout::Rect) {
let text = Paragraph::new("No secrets configured")
.block(Block::default().borders(Borders::ALL).title("Secrets"));
f.render_widget(text, area);
}
fn render_status(f: &mut Frame, app: &App, area: ratatui::layout::Rect) {
let synced = app.dotfiles.iter().filter(|d| matches!(d.status, FileStatus::Synced)).count();
let pending = app.dotfiles.iter().filter(|d| matches!(d.status, FileStatus::Pending)).count();
let modified = app.dotfiles.iter().filter(|d| matches!(d.status, FileStatus::Modified)).count();
let errors = app.dotfiles.iter().filter(|d| matches!(d.status, FileStatus::Error)).count();
let installed = app.packages.iter().filter(|p| p.installed).count();
let text = format!(
"Source: {}\n\nDotfiles:\n Synced: {}\n Pending: {}\n Modified: {}\n Errors: {}\n\nPackages:\n Installed: {}/{}",
app.source_dir.display(),
synced, pending, modified, errors, installed, app.packages.len()
);
let paragraph = Paragraph::new(text)
.block(Block::default().borders(Borders::ALL).title("Status"));
f.render_widget(paragraph, area);
}
fn render_sudo_prompt(f: &mut Frame, area: ratatui::layout::Rect) {
let text = "Package installation requires sudo privileges.\n\nDo you want to enter your password?";
let paragraph = Paragraph::new(text)
.block(Block::default().borders(Borders::ALL).title("Sudo Required"));
f.render_widget(paragraph, area);
}
fn render_password_input(f: &mut Frame, app: &App, area: ratatui::layout::Rect) {
let masked: String = "*".repeat(app.password_input.len());
let text = format!("Password: {}_", masked);
let paragraph = Paragraph::new(text)
.block(Block::default().borders(Borders::ALL).title("Enter sudo password"));
f.render_widget(paragraph, area);
}
fn copy_file(source: &PathBuf, target: &PathBuf) -> Result<(), String> {
if let Some(parent) = target.parent() {
std::fs::create_dir_all(parent).map_err(|e| e.to_string())?;
}
if source.is_dir() {
copy_dir_recursive(source, target).map_err(|e| e.to_string())
} else {
std::fs::copy(source, target)
.map(|_| ())
.map_err(|e| e.to_string())
}
}
fn copy_dir_recursive(src: &PathBuf, dst: &PathBuf) -> std::io::Result<()> {
std::fs::create_dir_all(dst)?;
for entry in std::fs::read_dir(src)? {
let entry = entry?;
let ty = entry.file_type()?;
let src_path = entry.path();
let dst_path = dst.join(entry.file_name());
if ty.is_dir() {
copy_dir_recursive(&src_path.into(), &dst_path)?;
} else {
std::fs::copy(&src_path, &dst_path)?;
}
}
Ok(())
}

135
crates/doot-cli/src/main.rs Normal file
View file

@ -0,0 +1,135 @@
mod commands;
use clap::{Parser, Subcommand};
use std::path::PathBuf;
#[derive(Parser)]
#[command(name = "doot")]
#[command(about = "A modern dotfiles manager with a typed DSL", long_about = None)]
struct Cli {
#[command(subcommand)]
command: Commands,
#[arg(short, long, global = true)]
verbose: bool,
#[arg(short = 'C', long, global = true)]
config: Option<PathBuf>,
}
#[derive(Subcommand)]
enum Commands {
Init {
/// Source directory for dotfiles (default: ~/.config/doot)
path: Option<PathBuf>,
},
Apply {
#[arg(short = 'n', long)]
dry_run: bool,
#[arg(short, long)]
parallel: bool,
},
Diff {
#[arg(short, long)]
all: bool,
},
Status,
Check,
Fmt {
#[arg(short, long)]
check: bool,
},
Rollback {
snapshot: Option<String>,
},
Snapshot {
name: String,
},
Encrypt {
file: PathBuf,
#[arg(short, long)]
recipient: Option<String>,
},
Decrypt {
file: PathBuf,
#[arg(short, long)]
identity: Option<PathBuf>,
},
Package {
#[command(subcommand)]
action: PackageAction,
},
Lsp,
Tui,
/// Open source file in editor for a deployed target
Edit {
/// Target path or dotfile name (e.g., ~/.config/nvim or nvim)
target: String,
/// Apply changes after editing
#[arg(short, long)]
apply: bool,
/// Skip confirmation prompt
#[arg(short = 'y', long)]
yes: bool,
},
}
#[derive(Subcommand)]
enum PackageAction {
Install,
Update,
List,
}
fn main() -> anyhow::Result<()> {
let cli = Cli::parse();
match cli.command {
Commands::Init { path } => commands::init::run(path, cli.verbose),
Commands::Apply { dry_run, parallel } => {
commands::apply::run(cli.config, dry_run, parallel, cli.verbose)
}
Commands::Diff { all } => commands::diff::run(cli.config, all, cli.verbose),
Commands::Status => commands::status::run(cli.config, cli.verbose),
Commands::Check => commands::check::run(cli.config, cli.verbose),
Commands::Fmt { check } => commands::fmt::run(cli.config, check, cli.verbose),
Commands::Rollback { snapshot } => {
commands::rollback::run(cli.config, snapshot, cli.verbose)
}
Commands::Snapshot { name } => commands::snapshot::run(cli.config, name, cli.verbose),
Commands::Encrypt { file, recipient } => {
commands::encrypt::run(file, recipient, cli.verbose)
}
Commands::Decrypt { file, identity } => {
commands::decrypt::run(file, identity, cli.verbose)
}
Commands::Package { action } => match action {
PackageAction::Install => commands::package::install(cli.config, cli.verbose),
PackageAction::Update => commands::package::update(cli.verbose),
PackageAction::List => commands::package::list(cli.config, cli.verbose),
},
Commands::Lsp => commands::lsp::run(),
Commands::Tui => commands::tui::run(cli.config),
Commands::Edit { target, apply, yes } => {
commands::edit::run(cli.config, target, apply, yes, cli.verbose)
}
}
}

View file

@ -0,0 +1,281 @@
use std::path::PathBuf;
use std::process::Command;
struct Sandbox {
path: PathBuf,
}
impl Sandbox {
fn new(name: &str) -> Self {
let path = std::env::temp_dir().join(format!("doot-test-{}", name));
if path.exists() {
std::fs::remove_dir_all(&path).unwrap();
}
std::fs::create_dir_all(&path).unwrap();
Self { path }
}
fn run(&self, args: &[&str]) -> std::process::Output {
let doot = env!("CARGO_BIN_EXE_doot");
Command::new(doot)
.args(args)
.env("DOOT_HOME", &self.path)
.env("DOOT_TEST_MODE", "1")
.output()
.expect("failed to run doot")
}
fn config_dir(&self) -> PathBuf {
self.path.join(".config/doot")
}
fn state_dir(&self) -> PathBuf {
self.path.join(".local/state/doot")
}
fn config_file(&self) -> PathBuf {
self.config_dir().join("doot.doot")
}
fn write_config(&self, content: &str) {
std::fs::create_dir_all(self.config_dir()).unwrap();
std::fs::write(self.config_file(), content).unwrap();
}
fn write_source(&self, path: &str, content: &str) {
let full_path = self.config_dir().join(path);
if let Some(parent) = full_path.parent() {
std::fs::create_dir_all(parent).unwrap();
}
std::fs::write(full_path, content).unwrap();
}
fn is_symlink(&self, path: &PathBuf) -> bool {
path.is_symlink()
}
fn symlink_target(&self, path: &PathBuf) -> Option<PathBuf> {
std::fs::read_link(path).ok()
}
}
impl Drop for Sandbox {
fn drop(&mut self) {
let _ = std::fs::remove_dir_all(&self.path);
}
}
#[test]
fn test_init_creates_structure() {
let sandbox = Sandbox::new("init");
let output = sandbox.run(&["init"]);
assert!(output.status.success(), "init failed: {:?}", output);
assert!(sandbox.config_file().exists(), "config file not created");
assert!(sandbox.config_dir().join("config").exists(), "config dir not created");
assert!(sandbox.state_dir().join("backups").exists(), "backups dir not created");
assert!(sandbox.state_dir().join("snapshots").exists(), "snapshots dir not created");
}
#[test]
fn test_check_valid_config() {
let sandbox = Sandbox::new("check-valid");
sandbox.write_config(r#"
package: "ripgrep"
package: "fd"
"#);
let output = sandbox.run(&["check"]);
assert!(output.status.success(), "check failed: {:?}", output);
}
#[test]
fn test_apply_dry_run() {
let sandbox = Sandbox::new("apply-dry");
sandbox.write_config(r#"
dotfile:
source = "config/test.conf"
target = "~/.config/test/test.conf"
"#);
sandbox.write_source("config/test.conf", "test content");
let output = sandbox.run(&["apply", "-n"]);
assert!(output.status.success(), "apply -n failed: {:?}", output);
let target = sandbox.path.join(".config/test/test.conf");
assert!(!target.exists(), "dry run should not create files");
}
#[test]
fn test_apply_creates_symlink() {
let sandbox = Sandbox::new("apply-symlink");
sandbox.write_config(r#"
dotfile:
source = "config/app.conf"
target = "~/.config/app/app.conf"
deploy = "link"
"#);
sandbox.write_source("config/app.conf", "app config content");
let output = sandbox.run(&["apply"]);
assert!(output.status.success(), "apply failed: {:?}", output);
let target = sandbox.path.join(".config/app/app.conf");
assert!(sandbox.is_symlink(&target), "target should be symlink");
let expected_source = sandbox.config_dir().join("config/app.conf");
assert_eq!(
sandbox.symlink_target(&target),
Some(expected_source),
"symlink should point to source"
);
}
#[test]
fn test_apply_unchanged_on_rerun() {
let sandbox = Sandbox::new("apply-unchanged");
sandbox.write_config(
"dotfile:\n source = \"config/app.conf\"\n target = \"~/.config/app/app.conf\"\n deploy = \"link\"\n",
);
sandbox.write_source("config/app.conf", "content");
let first = sandbox.run(&["apply"]);
assert!(first.status.success(), "first apply failed");
let second = sandbox.run(&["apply"]);
assert!(second.status.success(), "second apply failed");
// Second apply should succeed (symlink already exists and points correctly)
let target = sandbox.path.join(".config/app/app.conf");
assert!(target.is_symlink(), "target should still be symlink after second apply");
}
#[test]
fn test_apply_creates_copy() {
let sandbox = Sandbox::new("apply-copy");
sandbox.write_config(r#"
dotfile:
source = "config/app.conf"
target = "~/.config/app/app.conf"
"#);
sandbox.write_source("config/app.conf", "app config content");
let output = sandbox.run(&["apply"]);
assert!(output.status.success(), "apply failed: {:?}", output);
let target = sandbox.path.join(".config/app/app.conf");
assert!(target.exists(), "target should exist");
assert!(!target.is_symlink(), "target should be a copy, not a symlink");
let content = std::fs::read_to_string(&target).unwrap();
assert_eq!(content, "app config content", "content should match source");
}
#[test]
fn test_apply_copy_unchanged_on_rerun() {
let sandbox = Sandbox::new("apply-copy-unchanged");
sandbox.write_config(
"dotfile:\n source = \"config/app.conf\"\n target = \"~/.config/app/app.conf\"\n",
);
sandbox.write_source("config/app.conf", "content");
let first = sandbox.run(&["apply"]);
assert!(first.status.success(), "first apply failed");
let second = sandbox.run(&["apply"]);
assert!(second.status.success(), "second apply failed");
let target = sandbox.path.join(".config/app/app.conf");
assert!(target.exists(), "target should exist after second apply");
assert!(!target.is_symlink(), "target should still be a copy");
}
#[test]
fn test_status_shows_state() {
let sandbox = Sandbox::new("status");
sandbox.write_config(r#"
dotfile:
source = "config/app.conf"
target = "~/.config/app/app.conf"
"#);
sandbox.write_source("config/app.conf", "content");
sandbox.run(&["apply"]);
let output = sandbox.run(&["status"]);
assert!(output.status.success(), "status failed: {:?}", output);
}
#[test]
fn test_snapshot_and_rollback() {
let sandbox = Sandbox::new("snapshot");
sandbox.write_config(r#"
dotfile:
source = "config/app.conf"
target = "~/.config/app/app.conf"
"#);
sandbox.write_source("config/app.conf", "v1");
sandbox.run(&["apply"]);
let snap_output = sandbox.run(&["snapshot", "v1"]);
assert!(snap_output.status.success(), "snapshot failed: {:?}", snap_output);
let snapshot_file = sandbox.state_dir().join("snapshots/v1.json");
assert!(snapshot_file.exists(), "snapshot file not created");
}
#[test]
fn test_dotfile_with_when_condition() {
let sandbox = Sandbox::new("conditional");
// Test that 'when' condition works - only deploy if condition is true
let config = r#"dotfile:
source = "config/test.conf"
target = "~/.config/test.conf"
when = true
"#;
sandbox.write_config(config);
sandbox.write_source("config/test.conf", "test content");
let output = sandbox.run(&["apply"]);
assert!(output.status.success(), "apply failed: {:?}", output);
let target = sandbox.path.join(".config/test.conf");
assert!(target.exists(), "file should be deployed when condition is true");
}
#[test]
fn test_dotfile_when_false_skips() {
let sandbox = Sandbox::new("when-false");
let config = r#"dotfile:
source = "config/skip.conf"
target = "~/.config/skip.conf"
when = false
"#;
sandbox.write_config(config);
sandbox.write_source("config/skip.conf", "should not deploy");
let output = sandbox.run(&["apply"]);
assert!(output.status.success(), "apply failed: {:?}", output);
let target = sandbox.path.join(".config/skip.conf");
assert!(!target.exists(), "file should NOT be deployed when condition is false");
}
#[test]
fn test_diff_shows_changes() {
let sandbox = Sandbox::new("diff");
sandbox.write_config(r#"
dotfile:
source = "config/app.conf"
target = "~/.config/app/app.conf"
"#);
sandbox.write_source("config/app.conf", "new content");
let target_dir = sandbox.path.join(".config/app");
std::fs::create_dir_all(&target_dir).unwrap();
std::fs::write(target_dir.join("app.conf"), "old content").unwrap();
let output = sandbox.run(&["diff"]);
assert!(output.status.success(), "diff failed: {:?}", output);
}

View file

@ -0,0 +1,24 @@
[package]
name = "doot-core"
version.workspace = true
edition.workspace = true
[dependencies]
doot-lang.workspace = true
serde.workspace = true
serde_json.workspace = true
toml.workspace = true
smol.workspace = true
async-fs.workspace = true
age.workspace = true
walkdir.workspace = true
dirs.workspace = true
similar.workspace = true
blake3.workspace = true
os_info.workspace = true
indicatif.workspace = true
thiserror.workspace = true
anyhow.workspace = true
hostname = "0.4"
regex-lite = "0.1"
glob = "0.3"

View file

@ -0,0 +1,122 @@
//! Configuration for doot operations.
use serde::{Deserialize, Serialize};
use std::path::PathBuf;
/// Doot runtime configuration.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Config {
/// Directory containing dotfile sources.
pub source_dir: PathBuf,
/// Doot configuration directory.
pub config_dir: PathBuf,
/// State and data directory.
pub state_dir: PathBuf,
/// Path to the state file.
pub state_file: PathBuf,
/// Directory for file backups.
pub backup_dir: PathBuf,
/// Directory for snapshots.
pub snapshot_dir: PathBuf,
/// Path to age identity file.
pub identity_file: PathBuf,
/// Simulate actions without writing.
pub dry_run: bool,
/// Enable verbose output.
pub verbose: bool,
/// Enable parallel operations.
pub parallel: bool,
}
impl Config {
/// Creates a new config with the given source directory.
pub fn new(source_dir: PathBuf) -> Self {
let config_dir = Self::default_config_dir();
let state_dir = Self::default_state_dir();
Self {
source_dir,
config_dir: config_dir.clone(),
state_dir: state_dir.clone(),
state_file: state_dir.join("state.json"),
backup_dir: state_dir.join("backups"),
snapshot_dir: state_dir.join("snapshots"),
identity_file: config_dir.join("identity.txt"),
dry_run: false,
verbose: false,
parallel: true,
}
}
/// Returns DOOT_HOME if set, otherwise the real home directory.
/// Use DOOT_HOME for sandboxed testing.
pub fn home_dir() -> PathBuf {
std::env::var("DOOT_HOME")
.map(PathBuf::from)
.unwrap_or_else(|_| dirs::home_dir().unwrap_or_default())
}
/// Returns the default configuration directory.
pub fn default_config_dir() -> PathBuf {
if let Ok(doot_home) = std::env::var("DOOT_HOME") {
return PathBuf::from(doot_home).join(".config/doot");
}
dirs::config_dir()
.unwrap_or_else(|| Self::home_dir().join(".config"))
.join("doot")
}
/// Returns the default state directory.
pub fn default_state_dir() -> PathBuf {
if let Ok(doot_home) = std::env::var("DOOT_HOME") {
return PathBuf::from(doot_home).join(".local/state/doot");
}
dirs::state_dir()
.or_else(|| dirs::data_local_dir())
.unwrap_or_else(|| Self::home_dir().join(".local/state"))
.join("doot")
}
/// Returns the default source directory.
pub fn default_source_dir() -> PathBuf {
Self::default_config_dir()
}
/// Returns the default config file path.
pub fn default_config_file() -> PathBuf {
Self::default_config_dir().join("doot.doot")
}
/// Sets dry run mode.
pub fn dry_run(mut self, dry_run: bool) -> Self {
self.dry_run = dry_run;
self
}
/// Sets verbose mode.
pub fn verbose(mut self, verbose: bool) -> Self {
self.verbose = verbose;
self
}
/// Sets parallel mode.
pub fn parallel(mut self, parallel: bool) -> Self {
self.parallel = parallel;
self
}
/// Creates all required directories.
pub fn ensure_dirs(&self) -> std::io::Result<()> {
std::fs::create_dir_all(&self.config_dir)?;
std::fs::create_dir_all(&self.state_dir)?;
std::fs::create_dir_all(&self.backup_dir)?;
std::fs::create_dir_all(&self.snapshot_dir)?;
Ok(())
}
}
impl Default for Config {
fn default() -> Self {
Self::new(Self::default_source_dir())
}
}

View file

@ -0,0 +1,72 @@
//! File diff utilities.
use similar::{ChangeTag, TextDiff};
use std::path::PathBuf;
/// Compares files and displays differences.
pub struct DiffDisplay;
impl DiffDisplay {
/// Diffs two files and returns a formatted string.
pub fn diff_files(source: &PathBuf, target: &PathBuf) -> Result<String, std::io::Error> {
let source_content = std::fs::read_to_string(source)?;
let target_content = if target.exists() {
std::fs::read_to_string(target)?
} else {
String::new()
};
Ok(Self::diff_strings(&source_content, &target_content))
}
/// Diffs two strings.
pub fn diff_strings(old: &str, new: &str) -> String {
let diff = TextDiff::from_lines(old, new);
let mut output = String::new();
for change in diff.iter_all_changes() {
let sign = match change.tag() {
ChangeTag::Delete => "-",
ChangeTag::Insert => "+",
ChangeTag::Equal => " ",
};
output.push_str(&format!("{}{}", sign, change));
}
output
}
/// Checks if source and target differ.
pub fn has_changes(source: &PathBuf, target: &PathBuf) -> Result<bool, std::io::Error> {
if !target.exists() {
return Ok(true);
}
if target.is_symlink() {
let linked = std::fs::read_link(target)?;
return Ok(linked != *source);
}
let source_content = std::fs::read_to_string(source)?;
let target_content = std::fs::read_to_string(target)?;
Ok(source_content != target_content)
}
/// Returns a unified diff format.
pub fn unified_diff(source: &PathBuf, target: &PathBuf) -> Result<String, std::io::Error> {
let source_content = std::fs::read_to_string(source)?;
let target_content = if target.exists() {
std::fs::read_to_string(target)?
} else {
String::new()
};
let diff = TextDiff::from_lines(&target_content, &source_content);
let mut unified = diff.unified_diff();
Ok(unified
.header(&target.display().to_string(), &source.display().to_string())
.to_string())
}
}

View file

@ -0,0 +1,80 @@
//! Symlink management.
use super::{DeployAction, DeployError};
use crate::config::Config;
use std::path::PathBuf;
/// Creates and manages symlinks.
pub struct Linker {
config: Config,
}
impl Linker {
/// Creates a new linker.
pub fn new(config: Config) -> Self {
Self { config }
}
/// Creates a symlink from source to target.
pub fn link(&self, source: &PathBuf, target: &PathBuf) -> Result<DeployAction, DeployError> {
if target.is_symlink() {
let current_target = std::fs::read_link(target)?;
if current_target == *source {
return Ok(DeployAction::Unchanged);
}
if !self.config.dry_run {
std::fs::remove_file(target)?;
}
}
if !self.config.dry_run {
if let Some(parent) = target.parent() {
std::fs::create_dir_all(parent)?;
}
#[cfg(unix)]
std::os::unix::fs::symlink(source, target).map_err(|e| DeployError::SymlinkFailed {
source_display: source.display().to_string(),
target_display: target.display().to_string(),
message: e.to_string(),
})?;
#[cfg(windows)]
{
if source.is_dir() {
std::os::windows::fs::symlink_dir(source, target)
} else {
std::os::windows::fs::symlink_file(source, target)
}
.map_err(|e| DeployError::SymlinkFailed {
source_display: source.display().to_string(),
target_display: target.display().to_string(),
message: e.to_string(),
})?;
}
}
Ok(DeployAction::Created)
}
/// Removes a symlink.
pub fn unlink(&self, target: &PathBuf) -> Result<(), DeployError> {
if target.is_symlink() && !self.config.dry_run {
std::fs::remove_file(target)?;
}
Ok(())
}
/// Checks if target is linked to source.
pub fn is_linked(&self, source: &PathBuf, target: &PathBuf) -> bool {
if !target.is_symlink() {
return false;
}
match std::fs::read_link(target) {
Ok(current) => current == *source,
Err(_) => false,
}
}
}

View file

@ -0,0 +1,589 @@
//! Dotfile deployment system.
pub mod diff;
pub mod linker;
pub mod template;
use crate::config::Config;
use crate::state::store::DeployMode;
use crate::state::StateStore;
use doot_lang::evaluator::DotfileConfig;
use glob::Pattern;
use std::path::{Path, PathBuf};
use thiserror::Error;
pub use diff::DiffDisplay;
pub use linker::Linker;
pub use template::TemplateEngine;
/// Deployment errors.
#[derive(Error, Debug)]
pub enum DeployError {
#[error("source not found: {}", .0.display())]
SourceNotFound(PathBuf),
#[error("target exists and is not a symlink: {}", .0.display())]
TargetExists(PathBuf),
#[error("failed to create symlink: {source_display} -> {target_display}: {message}")]
SymlinkFailed {
source_display: String,
target_display: String,
message: String,
},
#[error("template error: {0}")]
TemplateError(String),
#[error("io error: {0}")]
IoError(#[from] std::io::Error),
#[error("backup failed: {0}")]
BackupFailed(String),
#[error("state error: {0}")]
StateError(#[from] crate::state::store::StateError),
#[error("sandbox violation: target '{}' is outside home directory. Set 'sandbox = false' to allow.", .0.display())]
SandboxViolation(PathBuf),
#[error("chown failed for '{}': {}", .0.display(), .1)]
ChownFailed(PathBuf, String),
}
/// Result of a deployment operation.
#[derive(Debug, Clone)]
pub struct DeployResult {
pub deployed: Vec<DeployedFile>,
pub skipped: Vec<SkippedFile>,
pub errors: Vec<DeployErrorInfo>,
}
/// Successfully deployed file info.
#[derive(Debug, Clone)]
pub struct DeployedFile {
pub source: PathBuf,
pub target: PathBuf,
pub action: DeployAction,
}
/// Action taken during deployment.
#[derive(Debug, Clone)]
pub enum DeployAction {
Created,
Updated,
Unchanged,
}
/// Skipped file with reason.
#[derive(Debug, Clone)]
pub struct SkippedFile {
pub source: PathBuf,
pub target: PathBuf,
pub reason: String,
}
/// Deployment error info.
#[derive(Debug, Clone)]
pub struct DeployErrorInfo {
pub source: PathBuf,
pub target: PathBuf,
pub error: String,
}
/// Handles dotfile deployment.
pub struct Deployer {
config: Config,
linker: Linker,
template_engine: TemplateEngine,
state: StateStore,
sandbox: bool,
}
impl Deployer {
/// Creates a new deployer.
pub fn new(config: Config, sandbox: bool) -> Self {
let state = StateStore::new(&config.state_file);
Self {
linker: Linker::new(config.clone()),
template_engine: TemplateEngine::new(),
state,
config,
sandbox,
}
}
fn check_sandbox(&self, target: &Path) -> Result<(), DeployError> {
if !self.sandbox {
return Ok(());
}
let home = crate::config::Config::home_dir();
let target_canonical = target
.canonicalize()
.unwrap_or_else(|_| target.to_path_buf());
if !target_canonical.starts_with(&home) {
return Err(DeployError::SandboxViolation(target.to_path_buf()));
}
Ok(())
}
/// Deploys all dotfiles.
pub fn deploy(&mut self, dotfiles: &[DotfileConfig]) -> Result<DeployResult, DeployError> {
let mut result = DeployResult {
deployed: Vec::new(),
skipped: Vec::new(),
errors: Vec::new(),
};
for dotfile in dotfiles {
match self.deploy_single(dotfile) {
Ok(deployed) => result.deployed.push(deployed),
Err(DeployError::TargetExists(path)) => {
result.skipped.push(SkippedFile {
source: dotfile.source.clone(),
target: dotfile.target.clone(),
reason: format!("target exists: {}", path.display()),
});
}
Err(e) => {
result.errors.push(DeployErrorInfo {
source: dotfile.source.clone(),
target: dotfile.target.clone(),
error: e.to_string(),
});
}
}
}
self.state.save()?;
Ok(result)
}
fn deploy_single(&mut self, dotfile: &DotfileConfig) -> Result<DeployedFile, DeployError> {
let source = self.config.source_dir.join(&dotfile.source);
let target = &dotfile.target;
// Check sandbox before any file operations
self.check_sandbox(target)?;
if !source.exists() {
return Err(DeployError::SourceNotFound(source));
}
let deploy_mode = self.resolve_deploy_mode(dotfile, &source);
// Handle directories specially for copy mode - only sync changed files
if source.is_dir() && deploy_mode == DeployMode::Copy {
return self.deploy_directory(dotfile, &source, target, deploy_mode);
}
// For files or link mode, handle as before
if target.exists() && !target.is_symlink() {
if !self.config.dry_run {
self.backup_existing(target)?;
std::fs::remove_file(target)?;
}
}
let action = if dotfile.template {
self.deploy_template(&source, target)?
} else {
match deploy_mode {
DeployMode::Link => self.linker.link(&source, target)?,
DeployMode::Copy => self.copy_single_file(&source, target)?,
}
};
// Set permissions if specified (only for copy mode, symlinks inherit from source)
if !dotfile.permissions.is_empty() && deploy_mode == DeployMode::Copy && !self.config.dry_run {
apply_permissions(target, &dotfile.permissions)?;
}
// Set owner if specified
if let Some(ref owner) = dotfile.owner {
if !self.config.dry_run {
set_owner(target, owner)?;
}
}
self.state.record_deployment(&source, target, deploy_mode);
Ok(DeployedFile {
source: source.clone(),
target: target.clone(),
action,
})
}
fn deploy_directory(
&mut self,
dotfile: &DotfileConfig,
source: &Path,
target: &Path,
deploy_mode: DeployMode,
) -> Result<DeployedFile, DeployError> {
use crate::state::SyncStatus;
let changed_files = self.state.get_changed_files_in_dir(source, target);
if changed_files.is_empty() {
return Ok(DeployedFile {
source: source.to_path_buf(),
target: target.to_path_buf(),
action: DeployAction::Unchanged,
});
}
let mut any_updated = false;
let mut any_created = false;
for (src_file, tgt_file, status) in changed_files {
match status {
SyncStatus::NotDeployed
| SyncStatus::TargetMissing
| SyncStatus::SourceChanged => {
// Copy from source to target
if !self.config.dry_run {
if let Some(parent) = tgt_file.parent() {
std::fs::create_dir_all(parent)?;
}
std::fs::copy(&src_file, &tgt_file)?;
// Apply permissions if specified
if !dotfile.permissions.is_empty() {
apply_permissions(&tgt_file, &dotfile.permissions)?;
}
}
if status == SyncStatus::NotDeployed || status == SyncStatus::TargetMissing {
any_created = true;
} else {
any_updated = true;
}
self.state.record_deployment(&src_file, &tgt_file, deploy_mode);
}
SyncStatus::TargetChanged => {
// Target changed but source didn't - keep target, just update state
// This is like keeping local changes in git
self.state.record_deployment(&src_file, &tgt_file, deploy_mode);
}
SyncStatus::Conflict => {
// Real conflict - user already chose "use source" at directory level
if !self.config.dry_run {
if let Some(parent) = tgt_file.parent() {
std::fs::create_dir_all(parent)?;
}
std::fs::copy(&src_file, &tgt_file)?;
if !dotfile.permissions.is_empty() {
apply_permissions(&tgt_file, &dotfile.permissions)?;
}
}
any_updated = true;
self.state.record_deployment(&src_file, &tgt_file, deploy_mode);
}
SyncStatus::SourceMissing => {
// File was deleted from source, remove from target
if !self.config.dry_run && tgt_file.exists() {
std::fs::remove_file(&tgt_file)?;
}
self.state.remove_deployment(&tgt_file);
any_updated = true;
}
SyncStatus::Synced => {
// Nothing to do
}
}
}
// Set owner if specified (for entire directory)
if let Some(ref owner) = dotfile.owner {
if !self.config.dry_run {
set_owner(target, owner)?;
}
}
// Also record the directory-level deployment for sync status checks
self.state.record_deployment(source, target, deploy_mode);
let action = if any_created && !any_updated {
DeployAction::Created
} else if any_updated || any_created {
DeployAction::Updated
} else {
DeployAction::Unchanged
};
Ok(DeployedFile {
source: source.to_path_buf(),
target: target.to_path_buf(),
action,
})
}
fn resolve_deploy_mode(&self, dotfile: &DotfileConfig, source: &Path) -> DeployMode {
let relative_path = source
.strip_prefix(&self.config.source_dir)
.unwrap_or(source)
.to_string_lossy()
.to_string();
let base_mode = match dotfile.deploy {
doot_lang::evaluator::DeployMode::Copy => DeployMode::Copy,
doot_lang::evaluator::DeployMode::Link => DeployMode::Link,
};
match base_mode {
DeployMode::Copy => {
for pattern in &dotfile.link_patterns {
if let Ok(p) = Pattern::new(pattern) {
if p.matches(&relative_path) {
return DeployMode::Link;
}
}
}
DeployMode::Copy
}
DeployMode::Link => {
for pattern in &dotfile.copy_patterns {
if let Ok(p) = Pattern::new(pattern) {
if p.matches(&relative_path) {
return DeployMode::Copy;
}
}
}
DeployMode::Link
}
}
}
fn copy_single_file(&self, source: &Path, target: &Path) -> Result<DeployAction, DeployError> {
if target.exists() {
let source_content = std::fs::read(source)?;
let target_content = std::fs::read(target)?;
if source_content == target_content {
return Ok(DeployAction::Unchanged);
}
}
if !self.config.dry_run {
if let Some(parent) = target.parent() {
std::fs::create_dir_all(parent)?;
}
std::fs::copy(source, target)?;
}
Ok(if target.exists() {
DeployAction::Updated
} else {
DeployAction::Created
})
}
fn deploy_template(
&self,
source: &PathBuf,
target: &PathBuf,
) -> Result<DeployAction, DeployError> {
let content = std::fs::read_to_string(source)?;
let rendered = self
.template_engine
.render(&content)
.map_err(DeployError::TemplateError)?;
if target.exists() {
let existing = std::fs::read_to_string(target)?;
if existing == rendered {
return Ok(DeployAction::Unchanged);
}
}
if !self.config.dry_run {
if let Some(parent) = target.parent() {
std::fs::create_dir_all(parent)?;
}
std::fs::write(target, rendered)?;
}
Ok(if target.exists() {
DeployAction::Updated
} else {
DeployAction::Created
})
}
fn backup_existing(&self, target: &PathBuf) -> Result<(), DeployError> {
let backup_path = self.config.backup_dir.join(
target
.file_name()
.unwrap_or_default()
.to_string_lossy()
.to_string()
+ ".backup",
);
std::fs::create_dir_all(&self.config.backup_dir)?;
if target.is_dir() {
copy_dir_recursive(target, &backup_path).map_err(|e| {
DeployError::BackupFailed(format!(
"failed to backup {} to {}: {}",
target.display(),
backup_path.display(),
e
))
})?;
} else {
std::fs::copy(target, &backup_path).map_err(|e| {
DeployError::BackupFailed(format!(
"failed to backup {} to {}: {}",
target.display(),
backup_path.display(),
e
))
})?;
}
Ok(())
}
}
fn copy_dir_recursive(src: &Path, dst: &Path) -> std::io::Result<()> {
std::fs::create_dir_all(dst)?;
for entry in std::fs::read_dir(src)? {
let entry = entry?;
let ty = entry.file_type()?;
let src_path = entry.path();
let dst_path = dst.join(entry.file_name());
if ty.is_dir() {
copy_dir_recursive(&src_path, &dst_path)?;
} else {
std::fs::copy(&src_path, &dst_path)?;
}
}
Ok(())
}
use doot_lang::evaluator::PermissionRule;
fn apply_permissions(target: &Path, rules: &[PermissionRule]) -> Result<(), DeployError> {
if target.is_file() {
// For single files, apply first matching rule
for rule in rules {
match rule {
PermissionRule::Single(mode) => {
set_file_permissions(target, *mode)?;
return Ok(());
}
PermissionRule::Pattern { pattern, mode } => {
if let Ok(p) = Pattern::new(pattern) {
let name = target.file_name().unwrap_or_default().to_string_lossy();
if p.matches(&name) {
set_file_permissions(target, *mode)?;
return Ok(());
}
}
}
}
}
} else if target.is_dir() {
// For directories, walk all files and apply matching rules
apply_permissions_recursive(target, target, rules)?;
}
Ok(())
}
fn apply_permissions_recursive(
base: &Path,
current: &Path,
rules: &[PermissionRule],
) -> Result<(), DeployError> {
for entry in std::fs::read_dir(current)? {
let entry = entry?;
let path = entry.path();
if path.is_dir() {
apply_permissions_recursive(base, &path, rules)?;
} else {
let relative = path
.strip_prefix(base)
.unwrap_or(&path)
.to_string_lossy()
.to_string();
for rule in rules {
match rule {
PermissionRule::Single(mode) => {
set_file_permissions(&path, *mode)?;
break;
}
PermissionRule::Pattern { pattern, mode } => {
if let Ok(p) = Pattern::new(pattern) {
if p.matches(&relative) {
set_file_permissions(&path, *mode)?;
break;
}
}
}
}
}
}
}
Ok(())
}
#[cfg(unix)]
fn set_file_permissions(path: &Path, mode: u32) -> Result<(), DeployError> {
use std::os::unix::fs::PermissionsExt;
let permissions = std::fs::Permissions::from_mode(mode);
std::fs::set_permissions(path, permissions)?;
Ok(())
}
#[cfg(not(unix))]
fn set_file_permissions(_path: &Path, _mode: u32) -> Result<(), DeployError> {
Ok(())
}
#[cfg(unix)]
fn set_owner(path: &Path, owner: &str) -> Result<(), DeployError> {
use std::process::Command;
// Try without sudo first
let output = Command::new("chown")
.arg("-R")
.arg(owner)
.arg(path)
.output()
.map_err(|e| DeployError::ChownFailed(path.to_path_buf(), e.to_string()))?;
if output.status.success() {
return Ok(());
}
// If failed, try with sudo
let sudo_output = Command::new("sudo")
.arg("chown")
.arg("-R")
.arg(owner)
.arg(path)
.output()
.map_err(|e| DeployError::ChownFailed(path.to_path_buf(), e.to_string()))?;
if !sudo_output.status.success() {
let stderr = String::from_utf8_lossy(&sudo_output.stderr);
return Err(DeployError::ChownFailed(path.to_path_buf(), stderr.to_string()));
}
Ok(())
}
#[cfg(not(unix))]
fn set_owner(_path: &Path, _owner: &str) -> Result<(), DeployError> {
Ok(())
}

View file

@ -0,0 +1,73 @@
//! Template rendering for dotfiles.
use std::collections::HashMap;
/// Renders templates with variable substitution.
pub struct TemplateEngine {
variables: HashMap<String, String>,
}
impl TemplateEngine {
/// Creates a new engine with default variables.
pub fn new() -> Self {
let mut variables = HashMap::new();
if let Some(home) = dirs::home_dir() {
variables.insert("home".to_string(), home.display().to_string());
}
if let Some(config) = dirs::config_dir() {
variables.insert("config_dir".to_string(), config.display().to_string());
}
if let Some(data) = dirs::data_dir() {
variables.insert("data_dir".to_string(), data.display().to_string());
}
if let Some(cache) = dirs::cache_dir() {
variables.insert("cache_dir".to_string(), cache.display().to_string());
}
variables.insert("os".to_string(), std::env::consts::OS.to_string());
variables.insert("arch".to_string(), std::env::consts::ARCH.to_string());
if let Ok(hostname) = hostname::get() {
variables.insert("hostname".to_string(), hostname.to_string_lossy().to_string());
}
for (key, value) in std::env::vars() {
variables.insert(format!("env.{}", key), value);
}
Self { variables }
}
/// Sets a template variable.
pub fn set_variable(&mut self, key: String, value: String) {
self.variables.insert(key, value);
}
/// Renders a template string.
pub fn render(&self, template: &str) -> Result<String, String> {
let mut result = template.to_string();
for (key, value) in &self.variables {
result = result.replace(&format!("{{{{ {} }}}}", key), value);
result = result.replace(&format!("{{{{{}}}}}", key), value);
}
let re = regex_lite::Regex::new(r"\{\{[^}]+\}\}").unwrap();
if re.is_match(&result) {
let unresolved: Vec<&str> = re.find_iter(&result).map(|m| m.as_str()).collect();
return Err(format!(
"unresolved template variables: {}",
unresolved.join(", ")
));
}
Ok(result)
}
}
impl Default for TemplateEngine {
fn default() -> Self {
Self::new()
}
}

View file

@ -0,0 +1,151 @@
//! Age-based encryption for secret files.
use age::secrecy::ExposeSecret;
use std::io::{Read, Write};
use std::path::PathBuf;
use thiserror::Error;
/// Encryption errors.
#[derive(Error, Debug)]
pub enum EncryptionError {
#[error("invalid recipient: {0}")]
InvalidRecipient(String),
#[error("invalid identity: {0}")]
InvalidIdentity(String),
#[error("encryption failed: {0}")]
EncryptionFailed(String),
#[error("decryption failed: {0}")]
DecryptionFailed(String),
#[error("io error: {0}")]
IoError(#[from] std::io::Error),
}
/// Age encryption handler.
pub struct AgeEncryption {
identity: Option<age::x25519::Identity>,
recipients: Vec<age::x25519::Recipient>,
}
impl AgeEncryption {
/// Creates a new encryption handler.
pub fn new() -> Self {
Self {
identity: None,
recipients: Vec::new(),
}
}
/// Sets the identity for decryption.
pub fn with_identity(mut self, identity_str: &str) -> Result<Self, EncryptionError> {
let identity = identity_str
.parse::<age::x25519::Identity>()
.map_err(|e| EncryptionError::InvalidIdentity(e.to_string()))?;
self.identity = Some(identity);
Ok(self)
}
/// Adds a recipient public key.
pub fn add_recipient(&mut self, recipient_str: &str) -> Result<(), EncryptionError> {
let recipient = recipient_str
.parse::<age::x25519::Recipient>()
.map_err(|e| EncryptionError::InvalidRecipient(e.to_string()))?;
self.recipients.push(recipient);
Ok(())
}
/// Encrypts data for all recipients.
pub fn encrypt(&self, data: &[u8]) -> Result<Vec<u8>, EncryptionError> {
if self.recipients.is_empty() {
return Err(EncryptionError::EncryptionFailed(
"no recipients configured".to_string(),
));
}
let recipients: Vec<Box<dyn age::Recipient + Send>> = self
.recipients
.iter()
.map(|r| Box::new(r.clone()) as Box<dyn age::Recipient + Send>)
.collect();
let encryptor =
age::Encryptor::with_recipients(recipients).expect("failed to create encryptor");
let mut encrypted = Vec::new();
let mut writer = encryptor
.wrap_output(&mut encrypted)
.map_err(|e| EncryptionError::EncryptionFailed(e.to_string()))?;
writer
.write_all(data)
.map_err(|e| EncryptionError::EncryptionFailed(e.to_string()))?;
writer
.finish()
.map_err(|e| EncryptionError::EncryptionFailed(e.to_string()))?;
Ok(encrypted)
}
/// Decrypts data using the configured identity.
pub fn decrypt(&self, data: &[u8]) -> Result<Vec<u8>, EncryptionError> {
let identity = self
.identity
.as_ref()
.ok_or_else(|| EncryptionError::DecryptionFailed("no identity configured".to_string()))?;
let decryptor = match age::Decryptor::new(data)
.map_err(|e| EncryptionError::DecryptionFailed(e.to_string()))?
{
age::Decryptor::Recipients(d) => d,
_ => {
return Err(EncryptionError::DecryptionFailed(
"unexpected decryptor type".to_string(),
))
}
};
let mut decrypted = Vec::new();
let mut reader = decryptor
.decrypt(std::iter::once(identity as &dyn age::Identity))
.map_err(|e| EncryptionError::DecryptionFailed(e.to_string()))?;
reader
.read_to_end(&mut decrypted)
.map_err(|e| EncryptionError::DecryptionFailed(e.to_string()))?;
Ok(decrypted)
}
/// Encrypts a file to a target path.
pub fn encrypt_file(&self, source: &PathBuf, target: &PathBuf) -> Result<(), EncryptionError> {
let data = std::fs::read(source)?;
let encrypted = self.encrypt(&data)?;
std::fs::write(target, encrypted)?;
Ok(())
}
/// Decrypts a file to a target path.
pub fn decrypt_file(&self, source: &PathBuf, target: &PathBuf) -> Result<(), EncryptionError> {
let data = std::fs::read(source)?;
let decrypted = self.decrypt(&data)?;
std::fs::write(target, decrypted)?;
Ok(())
}
/// Generates a new keypair, returning (identity, recipient).
pub fn generate_keypair() -> (String, String) {
let identity = age::x25519::Identity::generate();
let recipient = identity.to_public();
let identity_str = identity.to_string();
(identity_str.expose_secret().clone(), recipient.to_string())
}
}
impl Default for AgeEncryption {
fn default() -> Self {
Self::new()
}
}

View file

@ -0,0 +1,89 @@
//! Lifecycle hook execution.
use doot_lang::HookStage;
use std::process::Command;
use thiserror::Error;
/// Hook execution errors.
#[derive(Error, Debug)]
pub enum HookError {
#[error("hook failed: {command}: {message}")]
ExecutionFailed { command: String, message: String },
#[error("io error: {0}")]
IoError(#[from] std::io::Error),
}
/// A lifecycle hook.
#[derive(Debug, Clone)]
pub struct Hook {
pub stage: HookStage,
pub command: String,
pub working_dir: Option<std::path::PathBuf>,
}
/// Executes lifecycle hooks.
pub struct HookRunner {
hooks: Vec<Hook>,
dry_run: bool,
}
impl HookRunner {
/// Creates a new hook runner.
pub fn new() -> Self {
Self {
hooks: Vec::new(),
dry_run: false,
}
}
/// Sets dry run mode.
pub fn dry_run(mut self, dry_run: bool) -> Self {
self.dry_run = dry_run;
self
}
/// Registers a hook.
pub fn add_hook(&mut self, hook: Hook) {
self.hooks.push(hook);
}
/// Runs all hooks for a given stage.
pub fn run_stage(&self, stage: HookStage) -> Result<(), HookError> {
for hook in self.hooks.iter().filter(|h| h.stage == stage) {
self.run_hook(hook)?;
}
Ok(())
}
fn run_hook(&self, hook: &Hook) -> Result<(), HookError> {
if self.dry_run {
println!("[dry-run] would run: {}", hook.command);
return Ok(());
}
let mut cmd = Command::new("sh");
cmd.arg("-c").arg(&hook.command);
if let Some(ref dir) = hook.working_dir {
cmd.current_dir(dir);
}
let output = cmd.output()?;
if !output.status.success() {
return Err(HookError::ExecutionFailed {
command: hook.command.clone(),
message: String::from_utf8_lossy(&output.stderr).to_string(),
});
}
Ok(())
}
}
impl Default for HookRunner {
fn default() -> Self {
Self::new()
}
}

View file

@ -0,0 +1,20 @@
//! Core functionality for the doot dotfiles manager.
//!
//! Provides configuration, deployment, encryption, package management,
//! and state tracking.
pub mod config;
pub mod deploy;
pub mod encryption;
pub mod hooks;
pub mod os;
pub mod package;
pub mod state;
pub use config::Config;
pub use deploy::{Deployer, DeployResult};
pub use encryption::AgeEncryption;
pub use hooks::HookRunner;
pub use os::OsInfo;
pub use package::PackageManager;
pub use state::StateStore;

151
crates/doot-core/src/os.rs Normal file
View file

@ -0,0 +1,151 @@
//! Operating system detection.
use os_info::Type;
use std::collections::HashMap;
use std::sync::OnceLock;
/// System information.
#[derive(Debug, Clone)]
pub struct OsInfo {
pub os_type: OsType,
pub distro: Option<String>,
pub version: Option<String>,
pub arch: String,
pub hostname: String,
}
/// Operating system type.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum OsType {
Linux,
MacOS,
Windows,
Unknown,
}
impl OsInfo {
/// Detects the current operating system.
pub fn detect() -> Self {
let info = os_info::get();
let os_type = match info.os_type() {
Type::Macos => OsType::MacOS,
Type::Windows => OsType::Windows,
Type::Linux
| Type::Ubuntu
| Type::Debian
| Type::Arch
| Type::Fedora
| Type::CentOS
| Type::Redhat
| Type::Alpine
| Type::NixOS
| Type::Gentoo
| Type::openSUSE => OsType::Linux,
_ => OsType::Unknown,
};
let distro = match info.os_type() {
Type::Linux => None,
_ => Some(info.os_type().to_string().to_lowercase()),
};
let hostname = hostname::get()
.map(|h| h.to_string_lossy().to_string())
.unwrap_or_default();
Self {
os_type,
distro,
version: info.version().to_string().into(),
arch: std::env::consts::ARCH.to_string(),
hostname,
}
}
/// Returns true if Linux.
pub fn is_linux(&self) -> bool {
matches!(self.os_type, OsType::Linux)
}
/// Returns true if macOS.
pub fn is_macos(&self) -> bool {
matches!(self.os_type, OsType::MacOS)
}
/// Returns true if Windows.
pub fn is_windows(&self) -> bool {
matches!(self.os_type, OsType::Windows)
}
/// Detects the available package manager.
pub fn detect_package_manager(&self) -> Option<&'static str> {
match self.os_type {
OsType::MacOS => Some("brew"),
OsType::Linux => {
// Check AUR helpers first (they wrap pacman)
// Prefer PATH lookup, fallback to hardcoded paths
if command_exists("yay") {
Some("yay")
} else if command_exists("paru") {
Some("paru")
} else if command_exists("pacman") {
Some("pacman")
} else if command_exists("apt") {
Some("apt")
} else if command_exists("dnf") {
Some("dnf")
} else if command_exists("nix") {
Some("nix")
} else {
None
}
}
_ => None,
}
}
}
impl Default for OsInfo {
fn default() -> Self {
Self::detect()
}
}
/// Cache for command_exists checks.
static COMMAND_CACHE: OnceLock<std::sync::Mutex<HashMap<String, bool>>> = OnceLock::new();
/// Checks if a command exists in PATH or common bin directories (cached).
fn command_exists(cmd: &str) -> bool {
let cache = COMMAND_CACHE.get_or_init(|| std::sync::Mutex::new(HashMap::new()));
let mut cache = cache.lock().unwrap();
if let Some(&exists) = cache.get(cmd) {
return exists;
}
// Check PATH first using `which`
let exists = if std::process::Command::new("which")
.arg(cmd)
.output()
.map(|o| o.status.success())
.unwrap_or(false)
{
true
} else {
// Fallback to hardcoded paths
let paths = ["/usr/bin/", "/usr/local/bin/", "/bin/"];
paths.iter().any(|p| std::path::Path::new(&format!("{}{}", p, cmd)).exists())
};
cache.insert(cmd.to_string(), exists);
exists
}
/// Cached OsInfo instance.
static OS_INFO: OnceLock<OsInfo> = OnceLock::new();
/// Returns cached OS info.
pub fn get_os_info() -> &'static OsInfo {
OS_INFO.get_or_init(OsInfo::detect)
}

View file

@ -0,0 +1,159 @@
use super::{PackageError, PackageManager};
use std::io::Write;
use std::process::{Command, Stdio};
pub struct Apt {
dry_run: bool,
use_sudo: bool,
}
impl Apt {
pub fn new() -> Self {
Self {
dry_run: false,
use_sudo: true,
}
}
pub fn dry_run(mut self, dry_run: bool) -> Self {
self.dry_run = dry_run;
self
}
pub fn use_sudo(mut self, use_sudo: bool) -> Self {
self.use_sudo = use_sudo;
self
}
fn run_apt(&self, args: &[&str]) -> Result<(), PackageError> {
if self.dry_run {
let prefix = if self.use_sudo { "sudo " } else { "" };
println!("[dry-run] {}apt {}", prefix, args.join(" "));
return Ok(());
}
let output = if self.use_sudo {
Command::new("sudo")
.arg("apt")
.args(args)
.output()?
} else {
Command::new("apt").args(args).output()?
};
if !output.status.success() {
return Err(PackageError::InstallFailed {
package: args.join(" "),
message: String::from_utf8_lossy(&output.stderr).to_string(),
});
}
Ok(())
}
fn run_apt_with_password(&self, args: &[&str], password: &str) -> Result<(), PackageError> {
if self.dry_run {
println!("[dry-run] sudo apt {}", args.join(" "));
return Ok(());
}
let mut child = Command::new("sudo")
.arg("-S")
.arg("apt")
.args(args)
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()?;
if let Some(mut stdin) = child.stdin.take() {
writeln!(stdin, "{}", password).ok();
}
let output = child.wait_with_output()?;
if !output.status.success() {
return Err(PackageError::InstallFailed {
package: args.join(" "),
message: String::from_utf8_lossy(&output.stderr).to_string(),
});
}
Ok(())
}
}
impl PackageManager for Apt {
fn name(&self) -> &'static str {
"apt"
}
fn is_available(&self) -> bool {
std::path::Path::new("/usr/bin/apt").exists()
}
fn needs_sudo(&self) -> bool {
self.use_sudo
}
fn install(&self, packages: &[String]) -> Result<(), PackageError> {
if packages.is_empty() {
return Ok(());
}
let mut args = vec!["install", "-y"];
for pkg in packages {
args.push(pkg);
}
self.run_apt(&args)
}
fn install_with_sudo(&self, packages: &[String], password: &str) -> Result<(), PackageError> {
if packages.is_empty() {
return Ok(());
}
let mut args = vec!["install", "-y"];
for pkg in packages {
args.push(pkg);
}
self.run_apt_with_password(&args, password)
}
fn uninstall(&self, packages: &[String]) -> Result<(), PackageError> {
if packages.is_empty() {
return Ok(());
}
let mut args = vec!["remove", "-y"];
for pkg in packages {
args.push(pkg);
}
self.run_apt(&args)
}
fn is_installed(&self, package: &str) -> Result<bool, PackageError> {
let output = Command::new("dpkg")
.args(["-s", package])
.output()?;
Ok(output.status.success())
}
fn update(&self) -> Result<(), PackageError> {
self.run_apt(&["update"])
}
fn upgrade(&self) -> Result<(), PackageError> {
self.run_apt(&["upgrade", "-y"])
}
}
impl Default for Apt {
fn default() -> Self {
Self::new()
}
}

View file

@ -0,0 +1,105 @@
use super::{PackageError, PackageManager};
use std::process::Command;
pub struct Brew {
dry_run: bool,
}
impl Brew {
pub fn new() -> Self {
Self { dry_run: false }
}
pub fn dry_run(mut self, dry_run: bool) -> Self {
self.dry_run = dry_run;
self
}
fn run_brew(&self, args: &[&str]) -> Result<(), PackageError> {
if self.dry_run {
println!("[dry-run] brew {}", args.join(" "));
return Ok(());
}
let output = Command::new("brew").args(args).output()?;
if !output.status.success() {
return Err(PackageError::InstallFailed {
package: args.join(" "),
message: String::from_utf8_lossy(&output.stderr).to_string(),
});
}
Ok(())
}
}
impl PackageManager for Brew {
fn name(&self) -> &'static str {
"brew"
}
fn is_available(&self) -> bool {
Command::new("brew")
.arg("--version")
.output()
.map(|o| o.status.success())
.unwrap_or(false)
}
fn needs_sudo(&self) -> bool {
false
}
fn install(&self, packages: &[String]) -> Result<(), PackageError> {
if packages.is_empty() {
return Ok(());
}
let mut args = vec!["install"];
for pkg in packages {
args.push(pkg);
}
self.run_brew(&args)
}
fn install_with_sudo(&self, packages: &[String], _password: &str) -> Result<(), PackageError> {
self.install(packages)
}
fn uninstall(&self, packages: &[String]) -> Result<(), PackageError> {
if packages.is_empty() {
return Ok(());
}
let mut args = vec!["uninstall"];
for pkg in packages {
args.push(pkg);
}
self.run_brew(&args)
}
fn is_installed(&self, package: &str) -> Result<bool, PackageError> {
let output = Command::new("brew")
.args(["list", package])
.output()?;
Ok(output.status.success())
}
fn update(&self) -> Result<(), PackageError> {
self.run_brew(&["update"])
}
fn upgrade(&self) -> Result<(), PackageError> {
self.run_brew(&["upgrade"])
}
}
impl Default for Brew {
fn default() -> Self {
Self::new()
}
}

View file

@ -0,0 +1,164 @@
//! Package manager abstraction.
pub mod apt;
pub mod brew;
pub mod pacman;
pub mod yay;
use std::collections::HashSet;
use std::sync::Mutex;
use thiserror::Error;
pub use apt::Apt;
pub use brew::Brew;
pub use pacman::Pacman;
pub use yay::Yay;
/// Package management errors.
#[derive(Error, Debug)]
pub enum PackageError {
#[error("package not found: {0}")]
NotFound(String),
#[error("installation failed: {package}: {message}")]
InstallFailed { package: String, message: String },
#[error("package manager not available: {0}")]
ManagerNotAvailable(String),
#[error("io error: {0}")]
IoError(#[from] std::io::Error),
}
/// Common interface for package managers.
pub trait PackageManager: Send + Sync {
/// Returns the manager name.
fn name(&self) -> &'static str;
/// Checks if this manager is available on the system.
fn is_available(&self) -> bool;
/// Returns true if this package manager requires sudo.
fn needs_sudo(&self) -> bool;
/// Installs packages.
fn install(&self, packages: &[String]) -> Result<(), PackageError>;
/// Installs packages with sudo using the provided password.
fn install_with_sudo(&self, packages: &[String], password: &str) -> Result<(), PackageError>;
/// Uninstalls packages.
fn uninstall(&self, packages: &[String]) -> Result<(), PackageError>;
/// Checks if a package is installed.
fn is_installed(&self, package: &str) -> Result<bool, PackageError>;
/// Updates package lists.
fn update(&self) -> Result<(), PackageError>;
/// Upgrades installed packages.
fn upgrade(&self) -> Result<(), PackageError>;
}
/// Returns true if running in test mode (DOOT_TEST_MODE=1)
pub fn is_test_mode() -> bool {
std::env::var("DOOT_TEST_MODE").map(|v| v == "1").unwrap_or(false)
}
/// Mock package manager for testing - doesn't actually install anything
pub struct MockPackageManager {
installed: Mutex<HashSet<String>>,
}
impl MockPackageManager {
pub fn new() -> Self {
Self {
installed: Mutex::new(HashSet::new()),
}
}
}
impl Default for MockPackageManager {
fn default() -> Self {
Self::new()
}
}
impl PackageManager for MockPackageManager {
fn name(&self) -> &'static str {
"mock"
}
fn is_available(&self) -> bool {
true
}
fn needs_sudo(&self) -> bool {
false
}
fn install(&self, packages: &[String]) -> Result<(), PackageError> {
let mut installed = self.installed.lock().unwrap();
for pkg in packages {
installed.insert(pkg.clone());
}
Ok(())
}
fn install_with_sudo(&self, packages: &[String], _password: &str) -> Result<(), PackageError> {
self.install(packages)
}
fn uninstall(&self, packages: &[String]) -> Result<(), PackageError> {
let mut installed = self.installed.lock().unwrap();
for pkg in packages {
installed.remove(pkg);
}
Ok(())
}
fn is_installed(&self, package: &str) -> Result<bool, PackageError> {
let installed = self.installed.lock().unwrap();
Ok(installed.contains(package))
}
fn update(&self) -> Result<(), PackageError> {
Ok(())
}
fn upgrade(&self) -> Result<(), PackageError> {
Ok(())
}
}
/// Detects the available package manager.
pub fn detect_package_manager() -> Option<Box<dyn PackageManager>> {
if is_test_mode() {
return Some(Box::new(MockPackageManager::new()));
}
// AUR helpers (yay) should come before pacman since they wrap it
let managers: Vec<Box<dyn PackageManager>> = vec![
Box::new(Brew::new()),
Box::new(Yay::new()),
Box::new(Pacman::new()),
Box::new(Apt::new()),
];
managers.into_iter().find(|m| m.is_available())
}
/// Gets a package manager by name.
pub fn get_package_manager(name: &str) -> Option<Box<dyn PackageManager>> {
if is_test_mode() {
return Some(Box::new(MockPackageManager::new()));
}
match name {
"brew" => Some(Box::new(Brew::new())),
"apt" => Some(Box::new(Apt::new())),
"pacman" => Some(Box::new(Pacman::new())),
"yay" => Some(Box::new(Yay::new())),
_ => None,
}
}

View file

@ -0,0 +1,159 @@
use super::{PackageError, PackageManager};
use std::io::Write;
use std::process::{Command, Stdio};
pub struct Pacman {
dry_run: bool,
use_sudo: bool,
}
impl Pacman {
pub fn new() -> Self {
Self {
dry_run: false,
use_sudo: true,
}
}
pub fn dry_run(mut self, dry_run: bool) -> Self {
self.dry_run = dry_run;
self
}
pub fn use_sudo(mut self, use_sudo: bool) -> Self {
self.use_sudo = use_sudo;
self
}
fn run_pacman(&self, args: &[&str]) -> Result<(), PackageError> {
if self.dry_run {
let prefix = if self.use_sudo { "sudo " } else { "" };
println!("[dry-run] {}pacman {}", prefix, args.join(" "));
return Ok(());
}
let output = if self.use_sudo {
Command::new("sudo")
.arg("pacman")
.args(args)
.output()?
} else {
Command::new("pacman").args(args).output()?
};
if !output.status.success() {
return Err(PackageError::InstallFailed {
package: args.join(" "),
message: String::from_utf8_lossy(&output.stderr).to_string(),
});
}
Ok(())
}
fn run_pacman_with_password(&self, args: &[&str], password: &str) -> Result<(), PackageError> {
if self.dry_run {
println!("[dry-run] sudo pacman {}", args.join(" "));
return Ok(());
}
let mut child = Command::new("sudo")
.arg("-S")
.arg("pacman")
.args(args)
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()?;
if let Some(mut stdin) = child.stdin.take() {
writeln!(stdin, "{}", password).ok();
}
let output = child.wait_with_output()?;
if !output.status.success() {
return Err(PackageError::InstallFailed {
package: args.join(" "),
message: String::from_utf8_lossy(&output.stderr).to_string(),
});
}
Ok(())
}
}
impl PackageManager for Pacman {
fn name(&self) -> &'static str {
"pacman"
}
fn is_available(&self) -> bool {
std::path::Path::new("/usr/bin/pacman").exists()
}
fn needs_sudo(&self) -> bool {
self.use_sudo
}
fn install(&self, packages: &[String]) -> Result<(), PackageError> {
if packages.is_empty() {
return Ok(());
}
let mut args = vec!["-S", "--noconfirm"];
for pkg in packages {
args.push(pkg);
}
self.run_pacman(&args)
}
fn install_with_sudo(&self, packages: &[String], password: &str) -> Result<(), PackageError> {
if packages.is_empty() {
return Ok(());
}
let mut args = vec!["-S", "--noconfirm"];
for pkg in packages {
args.push(pkg);
}
self.run_pacman_with_password(&args, password)
}
fn uninstall(&self, packages: &[String]) -> Result<(), PackageError> {
if packages.is_empty() {
return Ok(());
}
let mut args = vec!["-R", "--noconfirm"];
for pkg in packages {
args.push(pkg);
}
self.run_pacman(&args)
}
fn is_installed(&self, package: &str) -> Result<bool, PackageError> {
let output = Command::new("pacman")
.args(["-Q", package])
.output()?;
Ok(output.status.success())
}
fn update(&self) -> Result<(), PackageError> {
self.run_pacman(&["-Sy"])
}
fn upgrade(&self) -> Result<(), PackageError> {
self.run_pacman(&["-Syu", "--noconfirm"])
}
}
impl Default for Pacman {
fn default() -> Self {
Self::new()
}
}

View file

@ -0,0 +1,107 @@
use super::{PackageError, PackageManager};
use std::process::Command;
pub struct Yay {
dry_run: bool,
}
impl Yay {
pub fn new() -> Self {
Self { dry_run: false }
}
pub fn dry_run(mut self, dry_run: bool) -> Self {
self.dry_run = dry_run;
self
}
fn run_yay(&self, args: &[&str]) -> Result<(), PackageError> {
if self.dry_run {
println!("[dry-run] yay {}", args.join(" "));
return Ok(());
}
let output = Command::new("yay").args(args).output()?;
if !output.status.success() {
return Err(PackageError::InstallFailed {
package: args.join(" "),
message: String::from_utf8_lossy(&output.stderr).to_string(),
});
}
Ok(())
}
}
impl PackageManager for Yay {
fn name(&self) -> &'static str {
"yay"
}
fn is_available(&self) -> bool {
Command::new("yay")
.arg("--version")
.output()
.map(|o| o.status.success())
.unwrap_or(false)
}
fn needs_sudo(&self) -> bool {
// yay handles sudo internally
false
}
fn install(&self, packages: &[String]) -> Result<(), PackageError> {
if packages.is_empty() {
return Ok(());
}
let mut args = vec!["-S", "--noconfirm"];
for pkg in packages {
args.push(pkg);
}
self.run_yay(&args)
}
fn install_with_sudo(&self, packages: &[String], _password: &str) -> Result<(), PackageError> {
// yay handles sudo internally, no need for password
self.install(packages)
}
fn uninstall(&self, packages: &[String]) -> Result<(), PackageError> {
if packages.is_empty() {
return Ok(());
}
let mut args = vec!["-R", "--noconfirm"];
for pkg in packages {
args.push(pkg);
}
self.run_yay(&args)
}
fn is_installed(&self, package: &str) -> Result<bool, PackageError> {
let output = Command::new("yay")
.args(["-Q", package])
.output()?;
Ok(output.status.success())
}
fn update(&self) -> Result<(), PackageError> {
self.run_yay(&["-Sy"])
}
fn upgrade(&self) -> Result<(), PackageError> {
self.run_yay(&["-Syu", "--noconfirm"])
}
}
impl Default for Yay {
fn default() -> Self {
Self::new()
}
}

View file

@ -0,0 +1,7 @@
//! State persistence and snapshots.
pub mod snapshot;
pub mod store;
pub use snapshot::Snapshot;
pub use store::{DeployMode, DeploymentRecord, StateStore, SyncStatus};

View file

@ -0,0 +1,87 @@
//! State snapshots for rollback.
use super::store::{State, StateError};
use std::path::Path;
/// A named snapshot of doot state.
pub struct Snapshot {
pub name: String,
pub created_at: String,
pub state: State,
}
impl Snapshot {
/// Creates and saves a new snapshot.
pub fn create(name: &str, state: &State, snapshot_dir: &Path) -> Result<Self, StateError> {
let created_at = chrono_now();
let snapshot = Self {
name: name.to_string(),
created_at: created_at.clone(),
state: state.clone(),
};
snapshot.save(snapshot_dir)?;
Ok(snapshot)
}
/// Loads a snapshot by name.
pub fn load(name: &str, snapshot_dir: &Path) -> Result<Self, StateError> {
let path = snapshot_dir.join(format!("{}.json", name));
let content = std::fs::read_to_string(&path)?;
let state: State = serde_json::from_str(&content)?;
Ok(Self {
name: name.to_string(),
created_at: String::new(),
state,
})
}
/// Saves the snapshot to disk.
pub fn save(&self, snapshot_dir: &Path) -> Result<(), StateError> {
std::fs::create_dir_all(snapshot_dir)?;
let path = snapshot_dir.join(format!("{}.json", self.name));
let json = serde_json::to_string_pretty(&self.state)?;
std::fs::write(path, json)?;
Ok(())
}
/// Lists all snapshots.
pub fn list(snapshot_dir: &Path) -> Result<Vec<String>, StateError> {
if !snapshot_dir.exists() {
return Ok(Vec::new());
}
let mut snapshots = Vec::new();
for entry in std::fs::read_dir(snapshot_dir)? {
let entry = entry?;
let path = entry.path();
if path.extension().map(|e| e == "json").unwrap_or(false) {
if let Some(name) = path.file_stem() {
snapshots.push(name.to_string_lossy().to_string());
}
}
}
snapshots.sort();
Ok(snapshots)
}
/// Deletes a snapshot.
pub fn delete(name: &str, snapshot_dir: &Path) -> Result<(), StateError> {
let path = snapshot_dir.join(format!("{}.json", name));
if path.exists() {
std::fs::remove_file(path)?;
}
Ok(())
}
}
fn chrono_now() -> String {
use std::time::{SystemTime, UNIX_EPOCH};
let secs = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_secs();
format!("{}", secs)
}

View file

@ -0,0 +1,367 @@
//! State persistence for doot.
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use thiserror::Error;
/// State storage errors.
#[derive(Error, Debug)]
pub enum StateError {
#[error("io error: {0}")]
IoError(#[from] std::io::Error),
#[error("serialization error: {0}")]
SerializationError(#[from] serde_json::Error),
}
/// Persistent doot state.
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct State {
pub version: u32,
pub deployments: HashMap<String, DeploymentRecord>,
pub packages: HashMap<String, PackageRecord>,
pub snapshots: Vec<String>,
}
/// Deploy mode for a file.
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Default)]
pub enum DeployMode {
#[default]
Copy,
Link,
}
/// Record of a deployed file.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DeploymentRecord {
pub source: PathBuf,
pub target: PathBuf,
pub source_hash: String,
pub target_hash: String,
pub deployed_at: String,
pub mode: DeployMode,
}
/// Sync status after comparing current hashes with recorded state.
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum SyncStatus {
Synced,
SourceChanged,
TargetChanged,
Conflict,
NotDeployed,
TargetMissing,
SourceMissing,
}
/// Record of an installed package.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PackageRecord {
pub name: String,
pub manager: String,
pub installed_at: String,
}
/// Manages doot state persistence.
pub struct StateStore {
path: PathBuf,
state: State,
dirty: bool,
}
impl StateStore {
/// Loads or creates a state store at the given path.
pub fn new(path: &Path) -> Self {
let state = if path.exists() {
std::fs::read_to_string(path)
.ok()
.and_then(|s| serde_json::from_str(&s).ok())
.unwrap_or_default()
} else {
State::default()
};
Self {
path: path.to_path_buf(),
state,
dirty: false,
}
}
/// Records a deployment with both source and target hashes.
pub fn record_deployment(&mut self, source: &Path, target: &Path, mode: DeployMode) {
let source_hash = hash_path(source);
let target_hash = hash_path(target);
let record = DeploymentRecord {
source: source.to_path_buf(),
target: target.to_path_buf(),
source_hash,
target_hash,
deployed_at: chrono_now(),
mode,
};
self.state
.deployments
.insert(target.display().to_string(), record);
self.dirty = true;
}
/// Checks sync status by comparing current hashes with recorded state.
pub fn check_sync_status(&self, source: &Path, target: &Path) -> SyncStatus {
let Some(record) = self.get_deployment(target) else {
return SyncStatus::NotDeployed;
};
if !source.exists() {
return SyncStatus::SourceMissing;
}
if !target.exists() {
return SyncStatus::TargetMissing;
}
// If stored hashes are empty (legacy record), treat as needing re-sync
if record.source_hash.is_empty() || record.target_hash.is_empty() {
let current_source_hash = hash_path(source);
let current_target_hash = hash_path(target);
// If source and target currently match, consider it synced
if current_source_hash == current_target_hash {
return SyncStatus::Synced;
}
// Otherwise, treat as source changed (needs re-deploy)
return SyncStatus::SourceChanged;
}
let current_source_hash = hash_path(source);
let current_target_hash = hash_path(target);
let source_changed = current_source_hash != record.source_hash;
let target_changed = current_target_hash != record.target_hash;
match (source_changed, target_changed) {
(false, false) => SyncStatus::Synced,
(true, false) => SyncStatus::SourceChanged,
(false, true) => SyncStatus::TargetChanged,
(true, true) => SyncStatus::Conflict,
}
}
/// Records a package installation.
pub fn record_package(&mut self, name: &str, manager: &str) {
let record = PackageRecord {
name: name.to_string(),
manager: manager.to_string(),
installed_at: chrono_now(),
};
self.state.packages.insert(name.to_string(), record);
self.dirty = true;
}
/// Gets a deployment record by target path.
pub fn get_deployment(&self, target: &Path) -> Option<&DeploymentRecord> {
self.state.deployments.get(&target.display().to_string())
}
/// Returns all deployment records.
pub fn get_all_deployments(&self) -> &HashMap<String, DeploymentRecord> {
&self.state.deployments
}
/// Removes a deployment record.
pub fn remove_deployment(&mut self, target: &Path) {
self.state.deployments.remove(&target.display().to_string());
self.dirty = true;
}
/// Records a snapshot name.
pub fn add_snapshot(&mut self, name: &str) {
self.state.snapshots.push(name.to_string());
self.dirty = true;
}
/// Returns all snapshot names.
pub fn get_snapshots(&self) -> &[String] {
&self.state.snapshots
}
/// Saves state to disk if dirty.
pub fn save(&mut self) -> Result<(), StateError> {
if !self.dirty {
return Ok(());
}
if let Some(parent) = self.path.parent() {
std::fs::create_dir_all(parent)?;
}
let json = serde_json::to_string_pretty(&self.state)?;
std::fs::write(&self.path, json)?;
self.dirty = false;
Ok(())
}
/// Checks if a target is deployed.
pub fn is_deployed(&self, target: &Path) -> bool {
self.state
.deployments
.contains_key(&target.display().to_string())
}
/// Checks if source has changed since deployment.
pub fn has_changed(&self, source: &Path, target: &Path) -> bool {
matches!(
self.check_sync_status(source, target),
SyncStatus::SourceChanged | SyncStatus::Conflict | SyncStatus::NotDeployed
)
}
/// Records a directory deployment by tracking each file individually.
pub fn record_directory_deployment(
&mut self,
source_dir: &Path,
target_dir: &Path,
mode: DeployMode,
) {
let mut files = Vec::new();
collect_files(source_dir, &mut files);
for source_file in files {
if let Ok(relative) = source_file.strip_prefix(source_dir) {
let target_file = target_dir.join(relative);
self.record_deployment(&source_file, &target_file, mode);
}
}
}
/// Returns files that have changed in a directory.
/// Returns (source_path, target_path, status) for each changed file.
pub fn get_changed_files_in_dir(
&self,
source_dir: &Path,
target_dir: &Path,
) -> Vec<(PathBuf, PathBuf, SyncStatus)> {
let mut changed = Vec::new();
// Check files in source directory
let mut source_files = Vec::new();
collect_files(source_dir, &mut source_files);
for source_file in source_files {
if let Ok(relative) = source_file.strip_prefix(source_dir) {
let target_file = target_dir.join(relative);
let status = self.check_sync_status(&source_file, &target_file);
if status != SyncStatus::Synced {
changed.push((source_file, target_file, status));
}
}
}
// Check for files that exist in target but not in source (deleted from source)
let mut target_files = Vec::new();
if target_dir.exists() {
collect_files(target_dir, &mut target_files);
}
for target_file in target_files {
if let Ok(relative) = target_file.strip_prefix(target_dir) {
let source_file = source_dir.join(relative);
if !source_file.exists() {
// Only mark as SourceMissing if we previously tracked this file
// Files that were never in source (e.g., fish_variables) should be ignored
if self.get_deployment(&target_file).is_some() {
changed.push((source_file, target_file, SyncStatus::SourceMissing));
}
// Otherwise, ignore - it's an untracked file created in target
}
}
}
changed
}
/// Removes all deployment records for files within a directory.
pub fn remove_directory_deployment(&mut self, target_dir: &Path) {
let target_prefix = target_dir.display().to_string();
let to_remove: Vec<String> = self
.state
.deployments
.keys()
.filter(|k| k.starts_with(&target_prefix))
.cloned()
.collect();
for key in to_remove {
self.state.deployments.remove(&key);
}
self.dirty = true;
}
}
fn hash_path(path: &Path) -> String {
if !path.exists() {
return String::new();
}
if path.is_file() {
std::fs::read(path)
.map(|content| blake3::hash(&content).to_hex().to_string())
.unwrap_or_default()
} else if path.is_dir() {
hash_directory(path)
} else {
String::new()
}
}
fn hash_directory(dir: &Path) -> String {
let mut hasher = blake3::Hasher::new();
let mut entries = Vec::new();
// Collect all file paths recursively
collect_files(dir, &mut entries);
// Sort for deterministic hashing
entries.sort();
for file_path in entries {
// Include the relative path in the hash to detect renames
if let Ok(relative) = file_path.strip_prefix(dir) {
hasher.update(relative.to_string_lossy().as_bytes());
}
// Hash the file content
if let Ok(content) = std::fs::read(&file_path) {
hasher.update(&content);
}
}
hasher.finalize().to_hex().to_string()
}
fn collect_files(dir: &Path, files: &mut Vec<PathBuf>) {
if let Ok(entries) = std::fs::read_dir(dir) {
for entry in entries.flatten() {
let path = entry.path();
if path.is_dir() {
collect_files(&path, files);
} else if path.is_file() {
files.push(path);
}
}
}
}
fn chrono_now() -> String {
use std::time::{SystemTime, UNIX_EPOCH};
let secs = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_secs();
format!("{}", secs)
}

View file

@ -0,0 +1,30 @@
[package]
name = "doot-lang"
version.workspace = true
edition.workspace = true
[dependencies]
chumsky.workspace = true
ariadne.workspace = true
serde.workspace = true
serde_json.workspace = true
toml.workspace = true
smol.workspace = true
async-fs.workspace = true
async-net.workspace = true
surf.workspace = true
rayon.workspace = true
walkdir.workspace = true
dirs.workspace = true
blake3.workspace = true
os_info.workspace = true
thiserror.workspace = true
anyhow.workspace = true
indexmap = "2"
glob = "0.3"
hostname = "0.4"
age = "0.10"
ordered-float = "5"
[dev-dependencies]
tempfile = "3"

323
crates/doot-lang/src/ast.rs Normal file
View file

@ -0,0 +1,323 @@
//! Abstract syntax tree definitions for the doot language.
use crate::lexer::Span;
use std::collections::HashMap;
/// Identifier type alias.
pub type Ident = String;
/// A parsed doot program.
#[derive(Clone, Debug, PartialEq)]
pub struct Program {
pub statements: Vec<Spanned<Statement>>,
}
/// Wraps a node with source location information.
#[derive(Clone, Debug, PartialEq)]
pub struct Spanned<T> {
pub node: T,
pub span: Span,
}
impl<T> Spanned<T> {
/// Creates a new spanned node.
pub fn new(node: T, span: Span) -> Self {
Self { node, span }
}
}
/// Top-level statement types.
#[derive(Clone, Debug, PartialEq)]
pub enum Statement {
VarDecl(VarDecl),
FnDecl(FnDecl),
StructDecl(StructDecl),
EnumDecl(EnumDecl),
TypeAlias(TypeAlias),
Import(Import),
Dotfile(Dotfile),
Package(Package),
Secret(Secret),
Hook(Hook),
MacroDecl(MacroDecl),
MacroCall(MacroCall),
ForLoop(ForLoop),
If(IfStatement),
Match(MatchStatement),
Expr(Expr),
Return(Option<Expr>),
}
/// Variable declaration.
#[derive(Clone, Debug, PartialEq)]
pub struct VarDecl {
pub name: Ident,
pub ty: Option<TypeAnnotation>,
pub value: Expr,
}
/// Function declaration.
#[derive(Clone, Debug, PartialEq)]
pub struct FnDecl {
pub name: Ident,
pub is_async: bool,
pub params: Vec<FnParam>,
pub return_type: Option<TypeAnnotation>,
pub body: Vec<Spanned<Statement>>,
}
/// Function parameter.
#[derive(Clone, Debug, PartialEq)]
pub struct FnParam {
pub name: Ident,
pub ty: TypeAnnotation,
pub default: Option<Expr>,
}
/// Struct type declaration.
#[derive(Clone, Debug, PartialEq)]
pub struct StructDecl {
pub name: Ident,
pub fields: Vec<StructField>,
pub methods: Vec<FnDecl>,
}
/// Struct field definition.
#[derive(Clone, Debug, PartialEq)]
pub struct StructField {
pub name: Ident,
pub ty: TypeAnnotation,
pub default: Option<Expr>,
}
/// Enum type declaration.
#[derive(Clone, Debug, PartialEq)]
pub struct EnumDecl {
pub name: Ident,
pub variants: Vec<EnumVariant>,
}
/// Enum variant definition.
#[derive(Clone, Debug, PartialEq)]
pub struct EnumVariant {
pub name: Ident,
pub fields: Option<Vec<TypeAnnotation>>,
}
/// Type alias declaration.
#[derive(Clone, Debug, PartialEq)]
pub struct TypeAlias {
pub name: Ident,
pub ty: TypeAnnotation,
}
/// Module import statement.
#[derive(Clone, Debug, PartialEq)]
pub struct Import {
pub path: String,
pub alias: Option<Ident>,
}
/// Deploy mode for dotfiles.
#[derive(Clone, Copy, Debug, PartialEq, Default)]
pub enum DeployMode {
#[default]
Copy,
Link,
}
/// Permission rule - either a single mode or pattern-based.
#[derive(Clone, Debug, PartialEq)]
pub enum PermissionRule {
Single(u32),
Pattern { pattern: String, mode: u32 },
}
/// Dotfile deployment declaration.
#[derive(Clone, Debug, PartialEq)]
pub struct Dotfile {
pub source: Expr,
pub target: Expr,
pub when: Option<Expr>,
pub template: Option<bool>,
pub permissions: Vec<PermissionRule>,
pub owner: Option<String>,
pub deploy: DeployMode,
pub link_patterns: Vec<String>,
pub copy_patterns: Vec<String>,
}
/// Package installation declaration.
#[derive(Clone, Debug, PartialEq)]
pub struct Package {
pub default: Option<Expr>,
pub brew: Option<PackageSpec>,
pub apt: Option<PackageSpec>,
pub pacman: Option<PackageSpec>,
pub yay: Option<PackageSpec>,
pub when: Option<Expr>,
}
/// Package manager-specific specification.
#[derive(Clone, Debug, PartialEq)]
pub struct PackageSpec {
pub name: Expr,
pub cask: Option<bool>,
pub tap: Option<String>,
}
/// Encrypted secret file declaration.
#[derive(Clone, Debug, PartialEq)]
pub struct Secret {
pub source: Expr,
pub target: Expr,
pub mode: Option<u32>,
}
/// Lifecycle hook declaration.
#[derive(Clone, Debug, PartialEq)]
pub struct Hook {
pub stage: HookStage,
pub run: Expr,
pub when: Option<Expr>,
}
/// Hook execution stage.
#[derive(Clone, Debug, PartialEq)]
pub enum HookStage {
BeforeDeploy,
AfterDeploy,
BeforePackage,
AfterPackage,
}
/// Macro definition.
#[derive(Clone, Debug, PartialEq)]
pub struct MacroDecl {
pub name: Ident,
pub params: Vec<Ident>,
pub body: Vec<Spanned<Statement>>,
}
/// Macro invocation.
#[derive(Clone, Debug, PartialEq)]
pub struct MacroCall {
pub name: Ident,
pub args: Vec<Expr>,
}
/// For loop statement.
#[derive(Clone, Debug, PartialEq)]
pub struct ForLoop {
pub var: Ident,
pub iter: Expr,
pub body: Vec<Spanned<Statement>>,
}
/// Conditional statement.
#[derive(Clone, Debug, PartialEq)]
pub struct IfStatement {
pub condition: Expr,
pub then_body: Vec<Spanned<Statement>>,
pub else_body: Option<Vec<Spanned<Statement>>>,
}
/// Pattern matching statement.
#[derive(Clone, Debug, PartialEq)]
pub struct MatchStatement {
pub expr: Expr,
pub arms: Vec<MatchArm>,
}
/// Single arm in a match statement.
#[derive(Clone, Debug, PartialEq)]
pub struct MatchArm {
pub pattern: Pattern,
pub body: Expr,
}
/// Match pattern types.
#[derive(Clone, Debug, PartialEq)]
pub enum Pattern {
Literal(Literal),
Ident(Ident),
EnumVariant { ty: Ident, variant: Ident },
Wildcard,
}
/// Expression types.
#[derive(Clone, Debug, PartialEq)]
pub enum Expr {
Literal(Literal),
Ident(Ident),
Path(Box<Expr>, Box<Expr>),
Binary(Box<Expr>, BinOp, Box<Expr>),
Unary(UnaryOp, Box<Expr>),
Call(Box<Expr>, Vec<Expr>),
MethodCall(Box<Expr>, Ident, Vec<Expr>),
Index(Box<Expr>, Box<Expr>),
Field(Box<Expr>, Ident),
EnumVariant(Ident, Ident),
StructInit(Ident, HashMap<Ident, Expr>),
List(Vec<Expr>),
If(Box<Expr>, Box<Expr>, Option<Box<Expr>>),
Lambda(Vec<FnParam>, Box<Expr>),
Await(Box<Expr>),
Interpolated(Vec<InterpolatedPart>),
HomePath(Box<Expr>),
}
/// Part of an interpolated string.
#[derive(Clone, Debug, PartialEq)]
pub enum InterpolatedPart {
Literal(String),
Expr(Expr),
}
/// Literal value types.
#[derive(Clone, Debug, PartialEq)]
pub enum Literal {
Int(i64),
Float(f64),
Str(String),
Bool(bool),
None,
}
/// Binary operators.
#[derive(Clone, Debug, PartialEq)]
pub enum BinOp {
Add,
Sub,
Mul,
Div,
Mod,
Eq,
NotEq,
Lt,
Gt,
LtEq,
GtEq,
And,
Or,
PathJoin,
NullCoalesce,
}
/// Unary operators.
#[derive(Clone, Debug, PartialEq)]
pub enum UnaryOp {
Neg,
Not,
}
/// Type annotation in source code.
#[derive(Clone, Debug, PartialEq)]
pub enum TypeAnnotation {
Simple(Ident),
List(Box<TypeAnnotation>),
Optional(Box<TypeAnnotation>),
Function(Vec<TypeAnnotation>, Box<TypeAnnotation>),
Union(Vec<TypeAnnotation>),
Literal(Literal),
}

View file

@ -0,0 +1,199 @@
use crate::evaluator::{EvalError, Value};
pub fn all(args: &[Value]) -> Result<Value, EvalError> {
Ok(Value::List(args.to_vec()))
}
pub fn race(args: &[Value]) -> Result<Value, EvalError> {
Ok(args.first().cloned().unwrap_or(Value::None))
}
pub fn fetch(args: &[Value]) -> Result<Value, EvalError> {
let url = match args.first() {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("fetch expects a URL string".to_string())),
};
smol::block_on(async {
let mut response = surf::get(url)
.await
.map_err(|e| EvalError::AsyncError(e.to_string()))?;
let body = response
.body_string()
.await
.map_err(|e| EvalError::AsyncError(e.to_string()))?;
Ok(Value::Str(body))
})
}
pub fn fetch_json(args: &[Value]) -> Result<Value, EvalError> {
let url = match args.first() {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("fetch_json expects a URL string".to_string())),
};
smol::block_on(async {
let mut response = surf::get(url)
.await
.map_err(|e| EvalError::AsyncError(e.to_string()))?;
let json: serde_json::Value = response
.body_json()
.await
.map_err(|e| EvalError::AsyncError(e.to_string()))?;
Ok(json_to_value(&json))
})
}
pub fn fetch_bytes(args: &[Value]) -> Result<Value, EvalError> {
let url = match args.first() {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("fetch_bytes expects a URL string".to_string())),
};
smol::block_on(async {
let mut response = surf::get(url)
.await
.map_err(|e| EvalError::AsyncError(e.to_string()))?;
let bytes = response
.body_bytes()
.await
.map_err(|e| EvalError::AsyncError(e.to_string()))?;
let values: Vec<Value> = bytes.iter().map(|b| Value::Int(*b as i64)).collect();
Ok(Value::List(values))
})
}
pub fn post(args: &[Value]) -> Result<Value, EvalError> {
let url = match args.first() {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("post expects a URL string".to_string())),
};
let body = match args.get(1) {
Some(Value::Str(s)) => s.clone(),
_ => String::new(),
};
smol::block_on(async {
let mut response = surf::post(url)
.body(body)
.await
.map_err(|e| EvalError::AsyncError(e.to_string()))?;
let result = response
.body_string()
.await
.map_err(|e| EvalError::AsyncError(e.to_string()))?;
Ok(Value::Str(result))
})
}
pub fn post_json(args: &[Value]) -> Result<Value, EvalError> {
let url = match args.first() {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("post_json expects a URL string".to_string())),
};
let data = args.get(1).unwrap_or(&Value::None);
let json = value_to_json(data);
smol::block_on(async {
let mut response = surf::post(url)
.body_json(&json)
.map_err(|e| EvalError::AsyncError(e.to_string()))?
.await
.map_err(|e| EvalError::AsyncError(e.to_string()))?;
let result: serde_json::Value = response
.body_json()
.await
.map_err(|e| EvalError::AsyncError(e.to_string()))?;
Ok(json_to_value(&result))
})
}
pub fn download(args: &[Value]) -> Result<Value, EvalError> {
let url = match args.first() {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("download expects a URL string".to_string())),
};
let path = match args.get(1) {
Some(Value::Path(p)) => p.clone(),
Some(Value::Str(s)) => std::path::PathBuf::from(s),
_ => return Err(EvalError::TypeError("download requires destination path".to_string())),
};
smol::block_on(async {
let mut response = surf::get(url)
.await
.map_err(|e| EvalError::AsyncError(e.to_string()))?;
let bytes = response
.body_bytes()
.await
.map_err(|e| EvalError::AsyncError(e.to_string()))?;
std::fs::write(&path, bytes)?;
Ok(Value::Bool(true))
})
}
fn json_to_value(json: &serde_json::Value) -> Value {
match json {
serde_json::Value::Null => Value::None,
serde_json::Value::Bool(b) => Value::Bool(*b),
serde_json::Value::Number(n) => {
if let Some(i) = n.as_i64() {
Value::Int(i)
} else if let Some(f) = n.as_f64() {
Value::Float(f)
} else {
Value::None
}
}
serde_json::Value::String(s) => Value::Str(s.clone()),
serde_json::Value::Array(arr) => {
Value::List(arr.iter().map(json_to_value).collect())
}
serde_json::Value::Object(obj) => {
let fields: indexmap::IndexMap<String, Value> = obj
.iter()
.map(|(k, v)| (k.clone(), json_to_value(v)))
.collect();
Value::Struct("object".to_string(), fields)
}
}
}
fn value_to_json(val: &Value) -> serde_json::Value {
match val {
Value::Int(n) => serde_json::Value::Number(serde_json::Number::from(*n)),
Value::Float(n) => serde_json::Number::from_f64(*n)
.map(serde_json::Value::Number)
.unwrap_or(serde_json::Value::Null),
Value::Str(s) => serde_json::Value::String(s.clone()),
Value::Bool(b) => serde_json::Value::Bool(*b),
Value::Path(p) => serde_json::Value::String(p.display().to_string()),
Value::List(items) => {
serde_json::Value::Array(items.iter().map(value_to_json).collect())
}
Value::Struct(_, fields) => {
let map: serde_json::Map<String, serde_json::Value> = fields
.iter()
.map(|(k, v)| (k.clone(), value_to_json(v)))
.collect();
serde_json::Value::Object(map)
}
Value::None => serde_json::Value::Null,
_ => serde_json::Value::Null,
}
}

View file

@ -0,0 +1,349 @@
use crate::ast::Expr;
use crate::evaluator::{EvalError, Evaluator, Value};
pub fn map(eval: &mut Evaluator, args: &[Value], _arg_exprs: &[Expr]) -> Result<Value, EvalError> {
let list = match args.first() {
Some(Value::List(items)) => items.clone(),
Some(v) => return Err(EvalError::TypeError(format!("map expects list, got {}", v.type_name()))),
None => return Err(EvalError::TypeError("map requires a list argument".to_string())),
};
match args.get(1) {
Some(Value::Lambda(params, body, env)) => {
let mut results = Vec::new();
for item in list {
let mut local_env = env.clone();
local_env.push_scope();
if let Some(param) = params.first() {
local_env.define(param.name.clone(), item);
}
let result = eval.eval_in_env(body, local_env)?;
results.push(result);
}
Ok(Value::List(results))
}
Some(Value::Function(func, func_env)) => {
let mut results = Vec::new();
for item in list {
let result = eval.call_fn(func, func_env, &[item])?;
results.push(result);
}
Ok(Value::List(results))
}
_ => Err(EvalError::TypeError("map requires a function".to_string())),
}
}
pub fn filter(eval: &mut Evaluator, args: &[Value], _arg_exprs: &[Expr]) -> Result<Value, EvalError> {
let list = match args.first() {
Some(Value::List(items)) => items.clone(),
Some(v) => return Err(EvalError::TypeError(format!("filter expects list, got {}", v.type_name()))),
None => return Err(EvalError::TypeError("filter requires a list argument".to_string())),
};
match args.get(1) {
Some(Value::Lambda(params, body, env)) => {
let mut results = Vec::new();
for item in list {
let mut local_env = env.clone();
local_env.push_scope();
if let Some(param) = params.first() {
local_env.define(param.name.clone(), item.clone());
}
let result = eval.eval_in_env(body, local_env)?;
if result.is_truthy() {
results.push(item);
}
}
Ok(Value::List(results))
}
Some(Value::Function(func, func_env)) => {
let mut results = Vec::new();
for item in list {
let result = eval.call_fn(func, func_env, &[item.clone()])?;
if result.is_truthy() {
results.push(item);
}
}
Ok(Value::List(results))
}
_ => Err(EvalError::TypeError("filter requires a function".to_string())),
}
}
pub fn fold(eval: &mut Evaluator, args: &[Value], _arg_exprs: &[Expr]) -> Result<Value, EvalError> {
let list = match args.first() {
Some(Value::List(items)) => items.clone(),
Some(v) => return Err(EvalError::TypeError(format!("fold expects list, got {}", v.type_name()))),
None => return Err(EvalError::TypeError("fold requires a list argument".to_string())),
};
let init = args.get(1).cloned().unwrap_or(Value::None);
match args.get(2) {
Some(Value::Lambda(params, body, env)) => {
let mut acc = init;
for item in list {
let mut local_env = env.clone();
local_env.push_scope();
if let Some(acc_param) = params.first() {
local_env.define(acc_param.name.clone(), acc.clone());
}
if let Some(item_param) = params.get(1) {
local_env.define(item_param.name.clone(), item);
}
acc = eval.eval_in_env(body, local_env)?;
}
Ok(acc)
}
Some(Value::Function(func, func_env)) => {
let mut acc = init;
for item in list {
acc = eval.call_fn(func, func_env, &[acc, item])?;
}
Ok(acc)
}
_ => Err(EvalError::TypeError("fold requires a function".to_string())),
}
}
pub fn flatten(args: &[Value]) -> Result<Value, EvalError> {
let list = match args.first() {
Some(Value::List(items)) => items,
_ => return Err(EvalError::TypeError("flatten expects a list".to_string())),
};
let mut result = Vec::new();
for item in list {
match item {
Value::List(inner) => result.extend(inner.clone()),
v => result.push(v.clone()),
}
}
Ok(Value::List(result))
}
pub fn concat(args: &[Value]) -> Result<Value, EvalError> {
let mut result = Vec::new();
for arg in args {
match arg {
Value::List(items) => result.extend(items.clone()),
v => result.push(v.clone()),
}
}
Ok(Value::List(result))
}
pub fn zip(args: &[Value]) -> Result<Value, EvalError> {
if args.len() < 2 {
return Err(EvalError::TypeError("zip requires at least 2 lists".to_string()));
}
let lists: Result<Vec<&Vec<Value>>, _> = args.iter().map(|a| match a {
Value::List(items) => Ok(items),
_ => Err(EvalError::TypeError("zip expects lists".to_string())),
}).collect();
let lists = lists?;
let min_len = lists.iter().map(|l| l.len()).min().unwrap_or(0);
let mut result = Vec::new();
for i in 0..min_len {
let tuple: Vec<Value> = lists.iter().map(|l| l[i].clone()).collect();
result.push(Value::List(tuple));
}
Ok(Value::List(result))
}
pub fn enumerate(args: &[Value]) -> Result<Value, EvalError> {
let list = match args.first() {
Some(Value::List(items)) => items,
_ => return Err(EvalError::TypeError("enumerate expects a list".to_string())),
};
let result: Vec<Value> = list
.iter()
.enumerate()
.map(|(i, v)| Value::List(vec![Value::Int(i as i64), v.clone()]))
.collect();
Ok(Value::List(result))
}
pub fn first(args: &[Value]) -> Result<Value, EvalError> {
match args.first() {
Some(Value::List(items)) => Ok(items.first().cloned().unwrap_or(Value::None)),
_ => Err(EvalError::TypeError("first expects a list".to_string())),
}
}
pub fn last(args: &[Value]) -> Result<Value, EvalError> {
match args.first() {
Some(Value::List(items)) => Ok(items.last().cloned().unwrap_or(Value::None)),
_ => Err(EvalError::TypeError("last expects a list".to_string())),
}
}
pub fn len(args: &[Value]) -> Result<Value, EvalError> {
match args.first() {
Some(Value::List(items)) => Ok(Value::Int(items.len() as i64)),
Some(Value::Str(s)) => Ok(Value::Int(s.len() as i64)),
_ => Err(EvalError::TypeError("len expects a list or string".to_string())),
}
}
pub fn contains(args: &[Value]) -> Result<Value, EvalError> {
let list = match args.first() {
Some(Value::List(items)) => items,
_ => return Err(EvalError::TypeError("contains expects a list".to_string())),
};
let needle = args.get(1).unwrap_or(&Value::None);
Ok(Value::Bool(list.iter().any(|v| values_equal(v, needle))))
}
pub fn unique(args: &[Value]) -> Result<Value, EvalError> {
let list = match args.first() {
Some(Value::List(items)) => items,
_ => return Err(EvalError::TypeError("unique expects a list".to_string())),
};
let mut seen = Vec::new();
let mut result = Vec::new();
for item in list {
if !seen.iter().any(|s| values_equal(s, item)) {
seen.push(item.clone());
result.push(item.clone());
}
}
Ok(Value::List(result))
}
pub fn sort(args: &[Value]) -> Result<Value, EvalError> {
let list = match args.first() {
Some(Value::List(items)) => items.clone(),
_ => return Err(EvalError::TypeError("sort expects a list".to_string())),
};
let mut sortable: Vec<(Value, String)> = list
.into_iter()
.map(|v| {
let key = match &v {
Value::Int(n) => format!("{:020}", n),
Value::Float(n) => format!("{:020.10}", n),
Value::Str(s) => s.clone(),
_ => v.to_string_repr(),
};
(v, key)
})
.collect();
sortable.sort_by(|a, b| a.1.cmp(&b.1));
Ok(Value::List(sortable.into_iter().map(|(v, _)| v).collect()))
}
pub fn sort_by(eval: &mut Evaluator, args: &[Value], _arg_exprs: &[Expr]) -> Result<Value, EvalError> {
let list = match args.first() {
Some(Value::List(items)) => items.clone(),
_ => return Err(EvalError::TypeError("sort_by expects a list".to_string())),
};
match args.get(1) {
Some(Value::Lambda(params, body, env)) => {
let mut keyed: Vec<(Value, String)> = Vec::new();
for item in list {
let mut local_env = env.clone();
local_env.push_scope();
if let Some(param) = params.first() {
local_env.define(param.name.clone(), item.clone());
}
let key = eval.eval_in_env(body, local_env)?;
keyed.push((item, key.to_string_repr()));
}
keyed.sort_by(|a, b| a.1.cmp(&b.1));
Ok(Value::List(keyed.into_iter().map(|(v, _)| v).collect()))
}
_ => Err(EvalError::TypeError("sort_by requires a function".to_string())),
}
}
pub fn reverse(args: &[Value]) -> Result<Value, EvalError> {
let list = match args.first() {
Some(Value::List(items)) => items.clone(),
_ => return Err(EvalError::TypeError("reverse expects a list".to_string())),
};
let mut reversed = list;
reversed.reverse();
Ok(Value::List(reversed))
}
pub fn seq(eval: &mut Evaluator, args: &[Value], _arg_exprs: &[Expr]) -> Result<Value, EvalError> {
let list = match args.first() {
Some(Value::List(items)) => items.clone(),
_ => return Err(EvalError::TypeError("seq expects a list".to_string())),
};
match args.get(1) {
Some(Value::Lambda(params, body, env)) => {
let mut results = Vec::new();
for item in list {
let mut local_env = env.clone();
local_env.push_scope();
if let Some(param) = params.first() {
local_env.define(param.name.clone(), item);
}
let result = eval.eval_in_env(body, local_env)?;
results.push(result);
}
Ok(Value::List(results))
}
_ => Err(EvalError::TypeError("seq requires a function".to_string())),
}
}
pub fn batch(eval: &mut Evaluator, args: &[Value], _arg_exprs: &[Expr]) -> Result<Value, EvalError> {
let list = match args.first() {
Some(Value::List(items)) => items.clone(),
_ => return Err(EvalError::TypeError("batch expects a list".to_string())),
};
let batch_size = match args.get(1) {
Some(Value::Int(n)) => *n as usize,
_ => return Err(EvalError::TypeError("batch requires batch size".to_string())),
};
match args.get(2) {
Some(Value::Lambda(params, body, env)) => {
let mut results = Vec::new();
for chunk in list.chunks(batch_size) {
for item in chunk {
let mut local_env = env.clone();
local_env.push_scope();
if let Some(param) = params.first() {
local_env.define(param.name.clone(), item.clone());
}
let result = eval.eval_in_env(body, local_env)?;
results.push(result);
}
}
Ok(Value::List(results))
}
_ => Err(EvalError::TypeError("batch requires a function".to_string())),
}
}
fn values_equal(a: &Value, b: &Value) -> bool {
match (a, b) {
(Value::Int(x), Value::Int(y)) => x == y,
(Value::Float(x), Value::Float(y)) => (x - y).abs() < f64::EPSILON,
(Value::Str(x), Value::Str(y)) => x == y,
(Value::Bool(x), Value::Bool(y)) => x == y,
(Value::None, Value::None) => true,
(Value::Enum(t1, v1), Value::Enum(t2, v2)) => t1 == t2 && v1 == v2,
_ => false,
}
}

View file

@ -0,0 +1,165 @@
use crate::evaluator::{EvalError, Value};
use std::path::PathBuf;
pub fn hash_file(args: &[Value]) -> Result<Value, EvalError> {
let path = match args.first() {
Some(Value::Path(p)) => p.clone(),
Some(Value::Str(s)) => PathBuf::from(s),
_ => return Err(EvalError::TypeError("hash_file expects a path".to_string())),
};
let content = std::fs::read(&path)?;
let hash = blake3::hash(&content);
Ok(Value::Str(hash.to_hex().to_string()))
}
pub fn hash_str(args: &[Value]) -> Result<Value, EvalError> {
let s = match args.first() {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("hash_str expects a string".to_string())),
};
let hash = blake3::hash(s.as_bytes());
Ok(Value::Str(hash.to_hex().to_string()))
}
pub fn encrypt_age(args: &[Value]) -> Result<Value, EvalError> {
let content = match args.first() {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("encrypt_age expects content string".to_string())),
};
let recipient = match args.get(1) {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("encrypt_age requires recipient public key".to_string())),
};
let recipient = recipient
.parse::<age::x25519::Recipient>()
.map_err(|e| EvalError::TypeError(format!("invalid recipient: {}", e)))?;
let encryptor = age::Encryptor::with_recipients(vec![Box::new(recipient)])
.expect("failed to create encryptor");
let mut encrypted = vec![];
let mut writer = encryptor
.wrap_output(&mut encrypted)
.map_err(|e| EvalError::TypeError(format!("encryption error: {}", e)))?;
use std::io::Write;
writer
.write_all(content.as_bytes())
.map_err(|e| EvalError::TypeError(format!("encryption error: {}", e)))?;
writer
.finish()
.map_err(|e| EvalError::TypeError(format!("encryption error: {}", e)))?;
Ok(Value::Str(base64_encode(&encrypted)))
}
pub fn decrypt_age(args: &[Value]) -> Result<Value, EvalError> {
let encrypted = match args.first() {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("decrypt_age expects encrypted string".to_string())),
};
let identity_str = match args.get(1) {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("decrypt_age requires identity".to_string())),
};
let identity = identity_str
.parse::<age::x25519::Identity>()
.map_err(|e| EvalError::TypeError(format!("invalid identity: {}", e)))?;
let encrypted_bytes = base64_decode(encrypted)
.map_err(|e| EvalError::TypeError(format!("invalid base64: {}", e)))?;
let decryptor = match age::Decryptor::new(&encrypted_bytes[..])
.map_err(|e| EvalError::TypeError(format!("decryption error: {}", e)))?
{
age::Decryptor::Recipients(d) => d,
_ => return Err(EvalError::TypeError("unexpected decryptor type".to_string())),
};
let mut decrypted = vec![];
let mut reader = decryptor
.decrypt(std::iter::once(&identity as &dyn age::Identity))
.map_err(|e| EvalError::TypeError(format!("decryption error: {}", e)))?;
use std::io::Read;
reader
.read_to_end(&mut decrypted)
.map_err(|e| EvalError::TypeError(format!("decryption error: {}", e)))?;
Ok(Value::Str(
String::from_utf8(decrypted)
.map_err(|e| EvalError::TypeError(format!("invalid UTF-8: {}", e)))?,
))
}
fn base64_encode(data: &[u8]) -> String {
const ALPHABET: &[u8] = b"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
let mut result = String::new();
for chunk in data.chunks(3) {
let b0 = chunk[0] as usize;
let b1 = chunk.get(1).copied().unwrap_or(0) as usize;
let b2 = chunk.get(2).copied().unwrap_or(0) as usize;
result.push(ALPHABET[b0 >> 2] as char);
result.push(ALPHABET[((b0 & 0x03) << 4) | (b1 >> 4)] as char);
if chunk.len() > 1 {
result.push(ALPHABET[((b1 & 0x0f) << 2) | (b2 >> 6)] as char);
} else {
result.push('=');
}
if chunk.len() > 2 {
result.push(ALPHABET[b2 & 0x3f] as char);
} else {
result.push('=');
}
}
result
}
fn base64_decode(s: &str) -> Result<Vec<u8>, String> {
const DECODE: [i8; 256] = {
let mut table = [-1i8; 256];
let alphabet = b"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
let mut i = 0;
while i < 64 {
table[alphabet[i] as usize] = i as i8;
i += 1;
}
table
};
let s = s.trim_end_matches('=');
let mut result = Vec::with_capacity(s.len() * 3 / 4);
let chars: Vec<u8> = s.bytes().collect();
for chunk in chars.chunks(4) {
let mut buf = [0u8; 4];
for (i, &c) in chunk.iter().enumerate() {
let val = DECODE[c as usize];
if val < 0 {
return Err(format!("invalid base64 character: {}", c as char));
}
buf[i] = val as u8;
}
result.push((buf[0] << 2) | (buf[1] >> 4));
if chunk.len() > 2 {
result.push((buf[1] << 4) | (buf[2] >> 2));
}
if chunk.len() > 3 {
result.push((buf[2] << 6) | buf[3]);
}
}
Ok(result)
}

View file

@ -0,0 +1,398 @@
use crate::evaluator::{EvalError, Value};
use std::path::PathBuf;
use std::process::Command;
use walkdir::WalkDir;
pub fn read_file(args: &[Value]) -> Result<Value, EvalError> {
let path = get_path(args)?;
let content = std::fs::read_to_string(&path)?;
Ok(Value::Str(content))
}
pub fn read_file_lines(args: &[Value]) -> Result<Value, EvalError> {
let path = get_path(args)?;
let content = std::fs::read_to_string(&path)?;
let lines: Vec<Value> = content.lines().map(|l| Value::Str(l.to_string())).collect();
Ok(Value::List(lines))
}
pub fn write_file(args: &[Value]) -> Result<Value, EvalError> {
let path = get_path(args)?;
let content = match args.get(1) {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("write_file requires content string".to_string())),
};
std::fs::write(&path, content)?;
Ok(Value::Bool(true))
}
pub fn copy_file(args: &[Value]) -> Result<Value, EvalError> {
let src = get_path(args)?;
let dst = match args.get(1) {
Some(Value::Path(p)) => p.clone(),
Some(Value::Str(s)) => expand_path(s),
_ => return Err(EvalError::TypeError("copy_file requires destination path".to_string())),
};
std::fs::copy(&src, &dst)?;
Ok(Value::Bool(true))
}
pub fn delete_file(args: &[Value]) -> Result<Value, EvalError> {
let path = get_path(args)?;
std::fs::remove_file(&path)?;
Ok(Value::Bool(true))
}
pub fn file_exists(args: &[Value]) -> Result<Value, EvalError> {
let path = get_path(args)?;
Ok(Value::Bool(path.is_file()))
}
pub fn dir_exists(args: &[Value]) -> Result<Value, EvalError> {
let path = get_path(args)?;
Ok(Value::Bool(path.is_dir()))
}
pub fn create_dir_all(args: &[Value]) -> Result<Value, EvalError> {
let path = get_path(args)?;
std::fs::create_dir_all(&path)?;
Ok(Value::Bool(true))
}
pub fn list_dir(args: &[Value]) -> Result<Value, EvalError> {
let path = get_path(args)?;
let entries: Vec<Value> = std::fs::read_dir(&path)?
.filter_map(|e| e.ok())
.map(|e| Value::Path(e.path()))
.collect();
Ok(Value::List(entries))
}
pub fn glob_files(args: &[Value]) -> Result<Value, EvalError> {
let pattern = match args.first() {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("glob expects a pattern string".to_string())),
};
let entries: Vec<Value> = glob::glob(pattern)
.map_err(|e| EvalError::TypeError(e.to_string()))?
.filter_map(|e| e.ok())
.map(Value::Path)
.collect();
Ok(Value::List(entries))
}
pub fn walk_dir(args: &[Value]) -> Result<Value, EvalError> {
let path = get_path(args)?;
let entries: Vec<Value> = WalkDir::new(&path)
.into_iter()
.filter_map(|e| e.ok())
.map(|e| Value::Path(e.path().to_path_buf()))
.collect();
Ok(Value::List(entries))
}
pub fn temp_dir() -> Result<Value, EvalError> {
Ok(Value::Path(std::env::temp_dir()))
}
pub fn temp_file(args: &[Value]) -> Result<Value, EvalError> {
let prefix = match args.first() {
Some(Value::Str(s)) => s.as_str(),
_ => "doot",
};
let suffix = match args.get(1) {
Some(Value::Str(s)) => s.as_str(),
_ => "",
};
let path = std::env::temp_dir().join(format!("{}_{}{}", prefix, uuid_simple(), suffix));
Ok(Value::Path(path))
}
pub fn is_symlink(args: &[Value]) -> Result<Value, EvalError> {
let path = get_path(args)?;
Ok(Value::Bool(path.is_symlink()))
}
pub fn read_link(args: &[Value]) -> Result<Value, EvalError> {
let path = get_path(args)?;
let target = std::fs::read_link(&path)?;
Ok(Value::Path(target))
}
pub fn path_join(args: &[Value]) -> Result<Value, EvalError> {
let mut result = PathBuf::new();
for arg in args {
match arg {
Value::Path(p) => result.push(p),
Value::Str(s) => result.push(s),
_ => return Err(EvalError::TypeError("path_join expects paths or strings".to_string())),
}
}
Ok(Value::Path(result))
}
pub fn path_parent(args: &[Value]) -> Result<Value, EvalError> {
let path = get_path(args)?;
Ok(Value::Path(path.parent().map(|p| p.to_path_buf()).unwrap_or_default()))
}
pub fn path_filename(args: &[Value]) -> Result<Value, EvalError> {
let path = get_path(args)?;
Ok(Value::Str(
path.file_name()
.map(|s| s.to_string_lossy().to_string())
.unwrap_or_default(),
))
}
pub fn path_extension(args: &[Value]) -> Result<Value, EvalError> {
let path = get_path(args)?;
Ok(Value::Str(
path.extension()
.map(|s| s.to_string_lossy().to_string())
.unwrap_or_default(),
))
}
pub fn home() -> Result<Value, EvalError> {
Ok(Value::Path(dirs::home_dir().unwrap_or_default()))
}
pub fn config_dir() -> Result<Value, EvalError> {
Ok(Value::Path(dirs::config_dir().unwrap_or_default()))
}
pub fn config_path(args: &[Value]) -> Result<Value, EvalError> {
let app = match args.first() {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("config_path expects an app name string".to_string())),
};
let config = dirs::config_dir().unwrap_or_default();
Ok(Value::Path(config.join(app)))
}
pub fn data_dir() -> Result<Value, EvalError> {
Ok(Value::Path(dirs::data_dir().unwrap_or_default()))
}
pub fn cache_dir() -> Result<Value, EvalError> {
Ok(Value::Path(dirs::cache_dir().unwrap_or_default()))
}
pub fn exec(args: &[Value]) -> Result<Value, EvalError> {
let cmd = match args.first() {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("exec expects a command string".to_string())),
};
let output = Command::new("sh")
.arg("-c")
.arg(cmd)
.output()?;
Ok(Value::Str(String::from_utf8_lossy(&output.stdout).to_string()))
}
pub fn exec_with_status(args: &[Value]) -> Result<Value, EvalError> {
let cmd = match args.first() {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("exec_with_status expects a command string".to_string())),
};
let status = Command::new("sh")
.arg("-c")
.arg(cmd)
.status()?;
Ok(Value::Int(status.code().unwrap_or(-1) as i64))
}
pub fn shell(args: &[Value]) -> Result<Value, EvalError> {
exec(args)
}
pub fn which(args: &[Value]) -> Result<Value, EvalError> {
let cmd = match args.first() {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("which expects a command name".to_string())),
};
let output = Command::new("which")
.arg(cmd)
.output()?;
if output.status.success() {
let path = String::from_utf8_lossy(&output.stdout).trim().to_string();
Ok(Value::Path(PathBuf::from(path)))
} else {
Ok(Value::None)
}
}
pub fn to_json(args: &[Value]) -> Result<Value, EvalError> {
let val = args.first().unwrap_or(&Value::None);
let json = value_to_json(val);
Ok(Value::Str(json.to_string()))
}
pub fn from_json(args: &[Value]) -> Result<Value, EvalError> {
let s = match args.first() {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("from_json expects a string".to_string())),
};
let json: serde_json::Value = serde_json::from_str(s)
.map_err(|e| EvalError::TypeError(format!("invalid JSON: {}", e)))?;
Ok(json_to_value(&json))
}
pub fn to_toml(args: &[Value]) -> Result<Value, EvalError> {
let val = args.first().unwrap_or(&Value::None);
let toml_val = value_to_toml(val);
let s = toml::to_string(&toml_val)
.map_err(|e| EvalError::TypeError(format!("TOML serialization error: {}", e)))?;
Ok(Value::Str(s))
}
pub fn from_toml(args: &[Value]) -> Result<Value, EvalError> {
let s = match args.first() {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("from_toml expects a string".to_string())),
};
let toml_val: toml::Value = toml::from_str(s)
.map_err(|e| EvalError::TypeError(format!("invalid TOML: {}", e)))?;
Ok(toml_to_value(&toml_val))
}
pub fn to_yaml(args: &[Value]) -> Result<Value, EvalError> {
let val = args.first().unwrap_or(&Value::None);
let json = value_to_json(val);
Ok(Value::Str(serde_json::to_string_pretty(&json).unwrap_or_default()))
}
pub fn from_yaml(args: &[Value]) -> Result<Value, EvalError> {
from_json(args)
}
fn get_path(args: &[Value]) -> Result<PathBuf, EvalError> {
match args.first() {
Some(Value::Path(p)) => Ok(p.clone()),
Some(Value::Str(s)) => Ok(expand_path(s)),
_ => Err(EvalError::TypeError("expected path or string".to_string())),
}
}
fn expand_path(s: &str) -> PathBuf {
if s.starts_with('~') {
let home = dirs::home_dir().unwrap_or_default();
home.join(s.strip_prefix("~/").unwrap_or(&s[1..]))
} else {
PathBuf::from(s)
}
}
fn uuid_simple() -> String {
use std::time::{SystemTime, UNIX_EPOCH};
let nanos = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_nanos();
format!("{:x}", nanos)
}
fn value_to_json(val: &Value) -> serde_json::Value {
match val {
Value::Int(n) => serde_json::Value::Number(serde_json::Number::from(*n)),
Value::Float(n) => serde_json::Number::from_f64(*n)
.map(serde_json::Value::Number)
.unwrap_or(serde_json::Value::Null),
Value::Str(s) => serde_json::Value::String(s.clone()),
Value::Bool(b) => serde_json::Value::Bool(*b),
Value::Path(p) => serde_json::Value::String(p.display().to_string()),
Value::List(items) => {
serde_json::Value::Array(items.iter().map(value_to_json).collect())
}
Value::Struct(_, fields) => {
let map: serde_json::Map<String, serde_json::Value> = fields
.iter()
.map(|(k, v)| (k.clone(), value_to_json(v)))
.collect();
serde_json::Value::Object(map)
}
Value::None => serde_json::Value::Null,
_ => serde_json::Value::Null,
}
}
fn json_to_value(json: &serde_json::Value) -> Value {
match json {
serde_json::Value::Null => Value::None,
serde_json::Value::Bool(b) => Value::Bool(*b),
serde_json::Value::Number(n) => {
if let Some(i) = n.as_i64() {
Value::Int(i)
} else if let Some(f) = n.as_f64() {
Value::Float(f)
} else {
Value::None
}
}
serde_json::Value::String(s) => Value::Str(s.clone()),
serde_json::Value::Array(arr) => {
Value::List(arr.iter().map(json_to_value).collect())
}
serde_json::Value::Object(obj) => {
let fields: indexmap::IndexMap<String, Value> = obj
.iter()
.map(|(k, v)| (k.clone(), json_to_value(v)))
.collect();
Value::Struct("object".to_string(), fields)
}
}
}
fn value_to_toml(val: &Value) -> toml::Value {
match val {
Value::Int(n) => toml::Value::Integer(*n),
Value::Float(n) => toml::Value::Float(*n),
Value::Str(s) => toml::Value::String(s.clone()),
Value::Bool(b) => toml::Value::Boolean(*b),
Value::Path(p) => toml::Value::String(p.display().to_string()),
Value::List(items) => {
toml::Value::Array(items.iter().map(value_to_toml).collect())
}
Value::Struct(_, fields) => {
let map: toml::map::Map<String, toml::Value> = fields
.iter()
.map(|(k, v)| (k.clone(), value_to_toml(v)))
.collect();
toml::Value::Table(map)
}
_ => toml::Value::String(String::new()),
}
}
fn toml_to_value(toml: &toml::Value) -> Value {
match toml {
toml::Value::Boolean(b) => Value::Bool(*b),
toml::Value::Integer(i) => Value::Int(*i),
toml::Value::Float(f) => Value::Float(*f),
toml::Value::String(s) => Value::Str(s.clone()),
toml::Value::Array(arr) => {
Value::List(arr.iter().map(toml_to_value).collect())
}
toml::Value::Table(table) => {
let fields: indexmap::IndexMap<String, Value> = table
.iter()
.map(|(k, v)| (k.clone(), toml_to_value(v)))
.collect();
Value::Struct("table".to_string(), fields)
}
toml::Value::Datetime(dt) => Value::Str(dt.to_string()),
}
}

View file

@ -0,0 +1,346 @@
//! Built-in functions for the doot language.
pub mod async_ops;
pub mod collections;
pub mod crypto;
pub mod io;
pub mod strings;
use crate::ast::Expr;
use crate::evaluator::{EvalError, Evaluator, Value};
/// Dispatches a built-in function call.
pub fn call_builtin(
eval: &mut Evaluator,
name: &str,
args: &[Value],
arg_exprs: &[Expr],
) -> Result<Value, EvalError> {
match name {
// Collections
"map" => collections::map(eval, args, arg_exprs),
"filter" => collections::filter(eval, args, arg_exprs),
"fold" => collections::fold(eval, args, arg_exprs),
"flatten" => collections::flatten(args),
"concat" => collections::concat(args),
"zip" => collections::zip(args),
"enumerate" => collections::enumerate(args),
"first" => collections::first(args),
"last" => collections::last(args),
"len" => collections::len(args),
"contains" => collections::contains(args),
"unique" => collections::unique(args),
"sort" => collections::sort(args),
"sort_by" => collections::sort_by(eval, args, arg_exprs),
"reverse" => collections::reverse(args),
"seq" => collections::seq(eval, args, arg_exprs),
"batch" => collections::batch(eval, args, arg_exprs),
// Strings
"join" => strings::join(args),
"split" => strings::split(args),
"upper" => strings::upper(args),
"lower" => strings::lower(args),
"trim" => strings::trim(args),
"replace" => strings::replace(args),
"starts_with" => strings::starts_with(args),
"ends_with" => strings::ends_with(args),
"format" => strings::format(args),
// Options
"unwrap" => options_unwrap(args),
"unwrap_or" => options_unwrap_or(args),
"is_some" => options_is_some(args),
"is_none" => options_is_none(args),
// I/O
"read_file" => io::read_file(args),
"read_file_lines" => io::read_file_lines(args),
"write_file" => io::write_file(args),
"copy_file" => io::copy_file(args),
"delete_file" => io::delete_file(args),
"file_exists" => io::file_exists(args),
"dir_exists" => io::dir_exists(args),
"create_dir_all" => io::create_dir_all(args),
"list_dir" => io::list_dir(args),
"glob" => io::glob_files(args),
"walk_dir" => io::walk_dir(args),
"temp_dir" => io::temp_dir(),
"temp_file" => io::temp_file(args),
"is_symlink" => io::is_symlink(args),
"read_link" => io::read_link(args),
// Paths
"path_join" => io::path_join(args),
"path_parent" => io::path_parent(args),
"path_filename" => io::path_filename(args),
"path_extension" => io::path_extension(args),
"home" => io::home(),
"config_dir" => io::config_dir(),
"config_path" => io::config_path(args),
"data_dir" => io::data_dir(),
"cache_dir" => io::cache_dir(),
// Process
"exec" => io::exec(args),
"exec_with_status" => io::exec_with_status(args),
"shell" => io::shell(args),
"which" => io::which(args),
// Serialization
"to_json" => io::to_json(args),
"from_json" => io::from_json(args),
"to_toml" => io::to_toml(args),
"from_toml" => io::from_toml(args),
"to_yaml" => io::to_yaml(args),
"from_yaml" => io::from_yaml(args),
// Crypto
"hash_file" => crypto::hash_file(args),
"hash_str" => crypto::hash_str(args),
"encrypt_age" => crypto::encrypt_age(args),
"decrypt_age" => crypto::decrypt_age(args),
// Async
"all" => async_ops::all(args),
"race" => async_ops::race(args),
// Network
"fetch" => async_ops::fetch(args),
"fetch_json" => async_ops::fetch_json(args),
"fetch_bytes" => async_ops::fetch_bytes(args),
"post" => async_ops::post(args),
"post_json" => async_ops::post_json(args),
"download" => async_ops::download(args),
// Environment
"env" => env_get(args),
// Debug
"print" => print_values(args),
"println" => println_values(args),
"dbg" => dbg_values(args),
_ => Err(EvalError::UndefinedFunction(name.to_string())),
}
}
/// Dispatches a method call on a value.
pub fn call_method(
eval: &mut Evaluator,
obj: &Value,
method: &str,
args: &[Value],
arg_exprs: &[Expr],
) -> Result<Value, EvalError> {
match obj {
Value::List(items) => match method {
"len" => Ok(Value::Int(items.len() as i64)),
"first" => Ok(items.first().cloned().unwrap_or(Value::None)),
"last" => Ok(items.last().cloned().unwrap_or(Value::None)),
"contains" => {
if let Some(needle) = args.first() {
Ok(Value::Bool(items.iter().any(|v| values_equal(v, needle))))
} else {
Ok(Value::Bool(false))
}
}
"map" => {
let all_args = std::iter::once(obj.clone()).chain(args.iter().cloned()).collect::<Vec<_>>();
collections::map(eval, &all_args, arg_exprs)
}
"filter" => {
let all_args = std::iter::once(obj.clone()).chain(args.iter().cloned()).collect::<Vec<_>>();
collections::filter(eval, &all_args, arg_exprs)
}
"fold" => {
let all_args = std::iter::once(obj.clone()).chain(args.iter().cloned()).collect::<Vec<_>>();
collections::fold(eval, &all_args, arg_exprs)
}
"join" => {
let sep = args.first().map(|v| match v {
Value::Str(s) => s.as_str(),
_ => "",
}).unwrap_or("");
let result = items
.iter()
.map(|v| v.to_string_repr())
.collect::<Vec<_>>()
.join(sep);
Ok(Value::Str(result))
}
"sort" => {
let all_args = std::iter::once(obj.clone()).chain(args.iter().cloned()).collect::<Vec<_>>();
collections::sort(&all_args)
}
"reverse" => {
let mut reversed = items.clone();
reversed.reverse();
Ok(Value::List(reversed))
}
"unique" => {
let all_args = std::iter::once(obj.clone()).chain(args.iter().cloned()).collect::<Vec<_>>();
collections::unique(&all_args)
}
_ => Err(EvalError::UndefinedFunction(format!("list.{}", method))),
},
Value::Str(s) => match method {
"len" => Ok(Value::Int(s.len() as i64)),
"upper" => Ok(Value::Str(s.to_uppercase())),
"lower" => Ok(Value::Str(s.to_lowercase())),
"trim" => Ok(Value::Str(s.trim().to_string())),
"split" => {
let sep = args.first().map(|v| match v {
Value::Str(s) => s.as_str(),
_ => " ",
}).unwrap_or(" ");
let parts: Vec<Value> = s.split(sep).map(|p| Value::Str(p.to_string())).collect();
Ok(Value::List(parts))
}
"replace" => {
if args.len() >= 2 {
if let (Value::Str(from), Value::Str(to)) = (&args[0], &args[1]) {
return Ok(Value::Str(s.replace(from, to)));
}
}
Ok(Value::Str(s.clone()))
}
"starts_with" => {
if let Some(Value::Str(prefix)) = args.first() {
Ok(Value::Bool(s.starts_with(prefix)))
} else {
Ok(Value::Bool(false))
}
}
"ends_with" => {
if let Some(Value::Str(suffix)) = args.first() {
Ok(Value::Bool(s.ends_with(suffix)))
} else {
Ok(Value::Bool(false))
}
}
"contains" => {
if let Some(Value::Str(needle)) = args.first() {
Ok(Value::Bool(s.contains(needle)))
} else {
Ok(Value::Bool(false))
}
}
_ => Err(EvalError::UndefinedFunction(format!("str.{}", method))),
},
Value::Path(p) => match method {
"parent" => Ok(Value::Path(p.parent().map(|p| p.to_path_buf()).unwrap_or_default())),
"filename" => Ok(Value::Str(p.file_name().map(|s| s.to_string_lossy().to_string()).unwrap_or_default())),
"extension" => Ok(Value::Str(p.extension().map(|s| s.to_string_lossy().to_string()).unwrap_or_default())),
"exists" => Ok(Value::Bool(p.exists())),
"is_file" => Ok(Value::Bool(p.is_file())),
"is_dir" => Ok(Value::Bool(p.is_dir())),
"join" => {
if let Some(Value::Str(other)) = args.first() {
Ok(Value::Path(p.join(other)))
} else if let Some(Value::Path(other)) = args.first() {
Ok(Value::Path(p.join(other)))
} else {
Ok(Value::Path(p.clone()))
}
}
_ => Err(EvalError::UndefinedFunction(format!("path.{}", method))),
},
Value::Struct(name, fields) => {
if let Some(decl) = eval.env().get_struct(name).cloned() {
for m in &decl.methods {
if m.name == method {
let mut method_args = vec![obj.clone()];
method_args.extend(args.iter().cloned());
let env_clone = eval.env().clone();
return eval.call_function(&m, &env_clone, &method_args);
}
}
}
if let Some(field) = fields.get(method) {
if let Value::Function(func, env) = field {
return eval.call_function(func, env, args);
}
}
Err(EvalError::FieldNotFound {
ty: name.clone(),
field: method.to_string(),
})
}
_ => Err(EvalError::TypeError(format!(
"cannot call method {} on {}",
method,
obj.type_name()
))),
}
}
fn values_equal(a: &Value, b: &Value) -> bool {
match (a, b) {
(Value::Int(x), Value::Int(y)) => x == y,
(Value::Float(x), Value::Float(y)) => (x - y).abs() < f64::EPSILON,
(Value::Str(x), Value::Str(y)) => x == y,
(Value::Bool(x), Value::Bool(y)) => x == y,
(Value::None, Value::None) => true,
(Value::Enum(t1, v1), Value::Enum(t2, v2)) => t1 == t2 && v1 == v2,
_ => false,
}
}
fn options_unwrap(args: &[Value]) -> Result<Value, EvalError> {
match args.first() {
Some(Value::None) => Err(EvalError::TypeError("unwrap called on none".to_string())),
Some(v) => Ok(v.clone()),
None => Err(EvalError::TypeError("unwrap requires an argument".to_string())),
}
}
fn options_unwrap_or(args: &[Value]) -> Result<Value, EvalError> {
match args.first() {
Some(Value::None) => Ok(args.get(1).cloned().unwrap_or(Value::None)),
Some(v) => Ok(v.clone()),
None => Ok(args.get(1).cloned().unwrap_or(Value::None)),
}
}
fn options_is_some(args: &[Value]) -> Result<Value, EvalError> {
Ok(Value::Bool(!matches!(args.first(), Some(Value::None) | None)))
}
fn options_is_none(args: &[Value]) -> Result<Value, EvalError> {
Ok(Value::Bool(matches!(args.first(), Some(Value::None) | None)))
}
fn env_get(args: &[Value]) -> Result<Value, EvalError> {
if let Some(Value::Str(key)) = args.first() {
Ok(std::env::var(key)
.map(Value::Str)
.unwrap_or(Value::None))
} else {
Ok(Value::None)
}
}
fn print_values(args: &[Value]) -> Result<Value, EvalError> {
let output: Vec<String> = args.iter().map(|v| v.to_string_repr()).collect();
print!("{}", output.join(" "));
Ok(Value::None)
}
fn println_values(args: &[Value]) -> Result<Value, EvalError> {
let output: Vec<String> = args.iter().map(|v| v.to_string_repr()).collect();
println!("{}", output.join(" "));
Ok(Value::None)
}
fn dbg_values(args: &[Value]) -> Result<Value, EvalError> {
for (i, arg) in args.iter().enumerate() {
eprintln!("[dbg {}] {:?}", i, arg);
}
// Return the last argument (or None) for easy chaining
Ok(args.last().cloned().unwrap_or(Value::None))
}

View file

@ -0,0 +1,119 @@
use crate::evaluator::{EvalError, Value};
pub fn join(args: &[Value]) -> Result<Value, EvalError> {
let list = match args.first() {
Some(Value::List(items)) => items,
_ => return Err(EvalError::TypeError("join expects a list".to_string())),
};
let sep = match args.get(1) {
Some(Value::Str(s)) => s.as_str(),
_ => "",
};
let result = list
.iter()
.map(|v| v.to_string_repr())
.collect::<Vec<_>>()
.join(sep);
Ok(Value::Str(result))
}
pub fn split(args: &[Value]) -> Result<Value, EvalError> {
let s = match args.first() {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("split expects a string".to_string())),
};
let sep = match args.get(1) {
Some(Value::Str(s)) => s.as_str(),
_ => " ",
};
let parts: Vec<Value> = s.split(sep).map(|p| Value::Str(p.to_string())).collect();
Ok(Value::List(parts))
}
pub fn upper(args: &[Value]) -> Result<Value, EvalError> {
match args.first() {
Some(Value::Str(s)) => Ok(Value::Str(s.to_uppercase())),
_ => Err(EvalError::TypeError("upper expects a string".to_string())),
}
}
pub fn lower(args: &[Value]) -> Result<Value, EvalError> {
match args.first() {
Some(Value::Str(s)) => Ok(Value::Str(s.to_lowercase())),
_ => Err(EvalError::TypeError("lower expects a string".to_string())),
}
}
pub fn trim(args: &[Value]) -> Result<Value, EvalError> {
match args.first() {
Some(Value::Str(s)) => Ok(Value::Str(s.trim().to_string())),
_ => Err(EvalError::TypeError("trim expects a string".to_string())),
}
}
pub fn replace(args: &[Value]) -> Result<Value, EvalError> {
let s = match args.first() {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("replace expects a string".to_string())),
};
let from = match args.get(1) {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("replace requires from string".to_string())),
};
let to = match args.get(2) {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("replace requires to string".to_string())),
};
Ok(Value::Str(s.replace(from.as_str(), to.as_str())))
}
pub fn starts_with(args: &[Value]) -> Result<Value, EvalError> {
let s = match args.first() {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("starts_with expects a string".to_string())),
};
let prefix = match args.get(1) {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("starts_with requires prefix".to_string())),
};
Ok(Value::Bool(s.starts_with(prefix.as_str())))
}
pub fn ends_with(args: &[Value]) -> Result<Value, EvalError> {
let s = match args.first() {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("ends_with expects a string".to_string())),
};
let suffix = match args.get(1) {
Some(Value::Str(s)) => s,
_ => return Err(EvalError::TypeError("ends_with requires suffix".to_string())),
};
Ok(Value::Bool(s.ends_with(suffix.as_str())))
}
pub fn format(args: &[Value]) -> Result<Value, EvalError> {
let template = match args.first() {
Some(Value::Str(s)) => s.clone(),
_ => return Err(EvalError::TypeError("format expects a template string".to_string())),
};
let mut result = template;
for (i, arg) in args.iter().skip(1).enumerate() {
let placeholder = format!("{{{}}}", i);
result = result.replace(&placeholder, &arg.to_string_repr());
}
Ok(Value::Str(result))
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,419 @@
//! Lexer for the doot language.
use chumsky::prelude::*;
use ordered_float::OrderedFloat;
use std::fmt;
/// Token types produced by the lexer.
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub enum Token {
// Literals
Int(i64),
Float(OrderedFloat<f64>),
Str(String),
Bool(bool),
// Identifiers and keywords
Ident(String),
// Keywords
Let,
Fn,
AsyncFn,
If,
Else,
Then,
For,
In,
Match,
Struct,
Enum,
Type,
Import,
As,
Dotfile,
Package,
Secret,
Hook,
BeforeDeploy,
AfterDeploy,
BeforePackage,
AfterPackage,
Macro,
Await,
Return,
When,
// Operators
Plus,
Minus,
Star,
Slash,
Percent,
Eq,
EqEq,
NotEq,
Lt,
Gt,
LtEq,
GtEq,
And,
Or,
Not,
Pipe,
DoublePipe,
DoubleColon,
Arrow,
FatArrow,
Dot,
DotDot,
QuestionQuestion,
// Delimiters
LParen,
RParen,
LBracket,
RBracket,
LBrace,
RBrace,
Comma,
Colon,
Semicolon,
Newline,
// Special
Tilde,
At,
Hash,
Bang,
Indent(usize),
Dedent,
}
impl fmt::Display for Token {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Token::Int(n) => write!(f, "{}", n),
Token::Float(n) => write!(f, "{}", n),
Token::Str(s) => write!(f, "\"{}\"", s),
Token::Bool(b) => write!(f, "{}", b),
Token::Ident(s) => write!(f, "{}", s),
Token::Let => write!(f, "let"),
Token::Fn => write!(f, "fn"),
Token::AsyncFn => write!(f, "async fn"),
Token::If => write!(f, "if"),
Token::Else => write!(f, "else"),
Token::Then => write!(f, "then"),
Token::For => write!(f, "for"),
Token::In => write!(f, "in"),
Token::Match => write!(f, "match"),
Token::Struct => write!(f, "struct"),
Token::Enum => write!(f, "enum"),
Token::Type => write!(f, "type"),
Token::Import => write!(f, "import"),
Token::As => write!(f, "as"),
Token::Dotfile => write!(f, "dotfile"),
Token::Package => write!(f, "package"),
Token::Secret => write!(f, "secret"),
Token::Hook => write!(f, "hook"),
Token::BeforeDeploy => write!(f, "before_deploy"),
Token::AfterDeploy => write!(f, "after_deploy"),
Token::BeforePackage => write!(f, "before_package"),
Token::AfterPackage => write!(f, "after_package"),
Token::Macro => write!(f, "macro"),
Token::Await => write!(f, "await"),
Token::Return => write!(f, "return"),
Token::When => write!(f, "when"),
Token::Plus => write!(f, "+"),
Token::Minus => write!(f, "-"),
Token::Star => write!(f, "*"),
Token::Slash => write!(f, "/"),
Token::Percent => write!(f, "%"),
Token::Eq => write!(f, "="),
Token::EqEq => write!(f, "=="),
Token::NotEq => write!(f, "!="),
Token::Lt => write!(f, "<"),
Token::Gt => write!(f, ">"),
Token::LtEq => write!(f, "<="),
Token::GtEq => write!(f, ">="),
Token::And => write!(f, "&&"),
Token::Or => write!(f, "||"),
Token::Not => write!(f, "!"),
Token::Pipe => write!(f, "|"),
Token::DoublePipe => write!(f, "||"),
Token::DoubleColon => write!(f, "::"),
Token::Arrow => write!(f, "->"),
Token::FatArrow => write!(f, "=>"),
Token::Dot => write!(f, "."),
Token::DotDot => write!(f, ".."),
Token::QuestionQuestion => write!(f, "??"),
Token::LParen => write!(f, "("),
Token::RParen => write!(f, ")"),
Token::LBracket => write!(f, "["),
Token::RBracket => write!(f, "]"),
Token::LBrace => write!(f, "{{"),
Token::RBrace => write!(f, "}}"),
Token::Comma => write!(f, ","),
Token::Colon => write!(f, ":"),
Token::Semicolon => write!(f, ";"),
Token::Newline => write!(f, "\\n"),
Token::Tilde => write!(f, "~"),
Token::At => write!(f, "@"),
Token::Hash => write!(f, "#"),
Token::Bang => write!(f, "!"),
Token::Indent(n) => write!(f, "<indent {}>", n),
Token::Dedent => write!(f, "<dedent>"),
}
}
}
/// Source location range.
pub type Span = std::ops::Range<usize>;
/// Token with source location.
#[derive(Clone, Debug)]
pub struct Spanned<T> {
pub node: T,
pub span: Span,
}
impl<T> Spanned<T> {
/// Creates a new spanned token.
pub fn new(node: T, span: Span) -> Self {
Self { node, span }
}
}
/// Tokenizes doot source code.
pub struct Lexer;
impl Lexer {
/// Returns the token parser combinator.
pub fn lexer() -> impl chumsky::Parser<char, Vec<Spanned<Token>>, Error = Simple<char>> {
let octal = just("0o")
.ignore_then(text::digits(8))
.map(|s: String| Token::Int(i64::from_str_radix(&s, 8).unwrap_or(0)));
let hex = just("0x")
.ignore_then(text::digits(16))
.map(|s: String| Token::Int(i64::from_str_radix(&s, 16).unwrap_or(0)));
let decimal = text::int(10)
.map(|s: String| Token::Int(s.parse().unwrap()));
let int = octal.or(hex).or(decimal);
let float = text::int(10)
.then(just('.').then(text::digits(10)))
.map(|(a, (_, b)): (String, (char, String))| {
let f: f64 = format!("{}.{}", a, b).parse().unwrap();
Token::Float(OrderedFloat(f))
});
let escape = just('\\').ignore_then(
just('\\')
.or(just('/'))
.or(just('"'))
.or(just('n').to('\n'))
.or(just('r').to('\r'))
.or(just('t').to('\t')),
);
let string = just('"')
.ignore_then(filter(|c| *c != '\\' && *c != '"').or(escape).repeated())
.then_ignore(just('"'))
.collect::<String>()
.map(Token::Str);
// Heredoc: >>>...<<<
let heredoc = just(">>>")
.ignore_then(take_until(just("<<<")))
.map(|(chars, _): (Vec<char>, _)| {
let s: String = chars.into_iter().collect();
// Trim leading newline if present
let s = s.strip_prefix('\n').unwrap_or(&s);
Token::Str(s.to_string())
});
let keyword_or_ident = text::ident().map(|s: String| match s.as_str() {
"let" => Token::Let,
"fn" => Token::Fn,
"async" => Token::Ident("async".to_string()),
"if" => Token::If,
"else" => Token::Else,
"then" => Token::Then,
"for" => Token::For,
"in" => Token::In,
"match" => Token::Match,
"struct" => Token::Struct,
"enum" => Token::Enum,
"type" => Token::Type,
"import" => Token::Import,
"as" => Token::As,
"dotfile" => Token::Dotfile,
"package" => Token::Package,
"secret" => Token::Secret,
"hook" => Token::Hook,
"before_deploy" => Token::BeforeDeploy,
"after_deploy" => Token::AfterDeploy,
"before_package" => Token::BeforePackage,
"after_package" => Token::AfterPackage,
"macro" => Token::Macro,
"await" => Token::Await,
"return" => Token::Return,
"when" => Token::When,
"true" => Token::Bool(true),
"false" => Token::Bool(false),
_ => Token::Ident(s),
});
let op = choice((
just("??").to(Token::QuestionQuestion),
just("=>").to(Token::FatArrow),
just("->").to(Token::Arrow),
just("::").to(Token::DoubleColon),
just("..").to(Token::DotDot),
just("==").to(Token::EqEq),
just("!=").to(Token::NotEq),
just("<=").to(Token::LtEq),
just(">=").to(Token::GtEq),
just("&&").to(Token::And),
just("||").to(Token::Or),
just('+').to(Token::Plus),
just('-').to(Token::Minus),
just('*').to(Token::Star),
just('/').to(Token::Slash),
just('%').to(Token::Percent),
just('=').to(Token::Eq),
just('<').to(Token::Lt),
just('>').to(Token::Gt),
just('!').to(Token::Bang),
just('|').to(Token::Pipe),
just('.').to(Token::Dot),
));
let delim = choice((
just('(').to(Token::LParen),
just(')').to(Token::RParen),
just('[').to(Token::LBracket),
just(']').to(Token::RBracket),
just('{').to(Token::LBrace),
just('}').to(Token::RBrace),
just(',').to(Token::Comma),
just(':').to(Token::Colon),
just(';').to(Token::Semicolon),
just('~').to(Token::Tilde),
just('@').to(Token::At),
just('#').to(Token::Hash),
));
let comment = just('#')
.then(none_of("\n").repeated())
.ignored();
let whitespace = just(' ').or(just('\t')).repeated().at_least(1).ignored();
let newline = just('\n').to(Token::Newline);
let token = choice((
float,
int,
heredoc,
string,
keyword_or_ident,
op,
delim,
newline,
))
.map_with_span(Spanned::new);
token
.padded_by(comment.repeated())
.padded_by(whitespace.repeated())
.repeated()
.then_ignore(end())
}
/// Tokenizes the input string with indentation processing.
pub fn lex(input: &str) -> Result<Vec<Spanned<Token>>, Vec<Simple<char>>> {
let tokens = Self::lexer().parse(input)?;
Ok(Self::process_indentation(tokens))
}
/// Converts whitespace into indent/dedent tokens.
fn process_indentation(tokens: Vec<Spanned<Token>>) -> Vec<Spanned<Token>> {
let mut result = Vec::new();
let mut indent_stack = vec![0usize];
let mut at_line_start = true;
let mut line_start_pos = 0;
for token in tokens {
match &token.node {
Token::Newline => {
result.push(token.clone());
at_line_start = true;
line_start_pos = token.span.end;
}
_ if at_line_start => {
let span_start = token.span.start;
let current_indent = span_start.saturating_sub(line_start_pos);
let last_indent = *indent_stack.last().unwrap();
if current_indent > last_indent {
indent_stack.push(current_indent);
result.push(Spanned::new(Token::Indent(current_indent), span_start..span_start));
} else {
while indent_stack.len() > 1 && current_indent < *indent_stack.last().unwrap() {
indent_stack.pop();
result.push(Spanned::new(Token::Dedent, span_start..span_start));
}
}
at_line_start = false;
result.push(token);
}
_ => {
result.push(token);
}
}
}
let end = result.last().map(|t| t.span.end).unwrap_or(0);
while indent_stack.len() > 1 {
indent_stack.pop();
result.push(Spanned::new(Token::Dedent, end..end));
}
result
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_basic_tokens() {
let input = "let x = 42";
let tokens = Lexer::lex(input).unwrap();
assert!(matches!(tokens[0].node, Token::Let));
assert!(matches!(tokens[1].node, Token::Ident(ref s) if s == "x"));
assert!(matches!(tokens[2].node, Token::Eq));
assert!(matches!(tokens[3].node, Token::Int(42)));
}
#[test]
fn test_string_literal() {
let input = r#""hello world""#;
let tokens = Lexer::lex(input).unwrap();
assert!(matches!(tokens[0].node, Token::Str(ref s) if s == "hello world"));
}
#[test]
fn test_operators() {
let input = "a ?? b => c";
let tokens = Lexer::lex(input).unwrap();
assert!(matches!(tokens[1].node, Token::QuestionQuestion));
assert!(matches!(tokens[3].node, Token::FatArrow));
}
}

View file

@ -0,0 +1,22 @@
//! Doot language implementation.
//!
//! This crate provides the lexer, parser, type checker, and evaluator
//! for the doot configuration language.
pub mod ast;
pub mod builtins;
pub mod evaluator;
pub mod lexer;
pub mod macros;
pub mod parser;
pub mod planner;
pub mod type_checker;
pub mod types;
pub use ast::*;
pub use evaluator::Evaluator;
pub use lexer::Lexer;
pub use parser::Parser;
pub use planner::{validate_dotfile_targets, DotfileConflict, DotfileValidation, DotfileWarning};
pub use type_checker::TypeChecker;
pub use types::Type;

View file

@ -0,0 +1,226 @@
//! Macro expansion for doot.
use crate::ast::*;
use std::collections::HashMap;
/// Expands macros in the AST.
pub struct MacroExpander {
macros: HashMap<String, MacroDecl>,
}
impl MacroExpander {
/// Creates a new macro expander.
pub fn new() -> Self {
Self {
macros: HashMap::new(),
}
}
/// Registers a macro definition.
pub fn register(&mut self, decl: MacroDecl) {
self.macros.insert(decl.name.clone(), decl);
}
/// Expands a macro call into statements.
pub fn expand(&self, call: &MacroCall) -> Option<Vec<Spanned<Statement>>> {
let decl = self.macros.get(&call.name)?;
let mut substitutions: HashMap<String, &Expr> = HashMap::new();
for (param, arg) in decl.params.iter().zip(call.args.iter()) {
substitutions.insert(param.clone(), arg);
}
let expanded: Vec<Spanned<Statement>> = decl
.body
.iter()
.map(|stmt| Spanned::new(self.substitute_statement(&stmt.node, &substitutions), stmt.span.clone()))
.collect();
Some(expanded)
}
fn substitute_statement(
&self,
stmt: &Statement,
subs: &HashMap<String, &Expr>,
) -> Statement {
match stmt {
Statement::VarDecl(decl) => Statement::VarDecl(VarDecl {
name: decl.name.clone(),
ty: decl.ty.clone(),
value: self.substitute_expr(&decl.value, subs),
}),
Statement::Dotfile(dotfile) => Statement::Dotfile(Dotfile {
source: self.substitute_expr(&dotfile.source, subs),
target: self.substitute_expr(&dotfile.target, subs),
when: dotfile.when.as_ref().map(|e| self.substitute_expr(e, subs)),
template: dotfile.template,
permissions: dotfile.permissions.clone(),
owner: dotfile.owner.clone(),
deploy: dotfile.deploy,
link_patterns: dotfile.link_patterns.clone(),
copy_patterns: dotfile.copy_patterns.clone(),
}),
Statement::Package(pkg) => Statement::Package(Package {
default: pkg.default.as_ref().map(|e| self.substitute_expr(e, subs)),
brew: pkg.brew.as_ref().map(|s| PackageSpec {
name: self.substitute_expr(&s.name, subs),
cask: s.cask,
tap: s.tap.clone(),
}),
apt: pkg.apt.as_ref().map(|s| PackageSpec {
name: self.substitute_expr(&s.name, subs),
cask: s.cask,
tap: s.tap.clone(),
}),
pacman: pkg.pacman.as_ref().map(|s| PackageSpec {
name: self.substitute_expr(&s.name, subs),
cask: s.cask,
tap: s.tap.clone(),
}),
yay: pkg.yay.as_ref().map(|s| PackageSpec {
name: self.substitute_expr(&s.name, subs),
cask: s.cask,
tap: s.tap.clone(),
}),
when: pkg.when.as_ref().map(|e| self.substitute_expr(e, subs)),
}),
Statement::ForLoop(for_loop) => Statement::ForLoop(ForLoop {
var: for_loop.var.clone(),
iter: self.substitute_expr(&for_loop.iter, subs),
body: for_loop
.body
.iter()
.map(|s| {
Spanned::new(self.substitute_statement(&s.node, subs), s.span.clone())
})
.collect(),
}),
Statement::If(if_stmt) => Statement::If(IfStatement {
condition: self.substitute_expr(&if_stmt.condition, subs),
then_body: if_stmt
.then_body
.iter()
.map(|s| {
Spanned::new(self.substitute_statement(&s.node, subs), s.span.clone())
})
.collect(),
else_body: if_stmt.else_body.as_ref().map(|body| {
body.iter()
.map(|s| {
Spanned::new(self.substitute_statement(&s.node, subs), s.span.clone())
})
.collect()
}),
}),
Statement::Expr(expr) => Statement::Expr(self.substitute_expr(expr, subs)),
other => other.clone(),
}
}
fn substitute_expr(&self, expr: &Expr, subs: &HashMap<String, &Expr>) -> Expr {
match expr {
Expr::Ident(name) => {
if let Some(&replacement) = subs.get(name) {
replacement.clone()
} else {
expr.clone()
}
}
Expr::Binary(left, op, right) => Expr::Binary(
Box::new(self.substitute_expr(left, subs)),
op.clone(),
Box::new(self.substitute_expr(right, subs)),
),
Expr::Unary(op, inner) => {
Expr::Unary(op.clone(), Box::new(self.substitute_expr(inner, subs)))
}
Expr::Call(callee, args) => Expr::Call(
Box::new(self.substitute_expr(callee, subs)),
args.iter().map(|a| self.substitute_expr(a, subs)).collect(),
),
Expr::MethodCall(obj, method, args) => Expr::MethodCall(
Box::new(self.substitute_expr(obj, subs)),
method.clone(),
args.iter().map(|a| self.substitute_expr(a, subs)).collect(),
),
Expr::Field(obj, field) => {
Expr::Field(Box::new(self.substitute_expr(obj, subs)), field.clone())
}
Expr::Index(obj, idx) => Expr::Index(
Box::new(self.substitute_expr(obj, subs)),
Box::new(self.substitute_expr(idx, subs)),
),
Expr::List(items) => {
Expr::List(items.iter().map(|i| self.substitute_expr(i, subs)).collect())
}
Expr::StructInit(name, fields) => Expr::StructInit(
name.clone(),
fields
.iter()
.map(|(k, v)| (k.clone(), self.substitute_expr(v, subs)))
.collect(),
),
Expr::If(cond, then_expr, else_expr) => Expr::If(
Box::new(self.substitute_expr(cond, subs)),
Box::new(self.substitute_expr(then_expr, subs)),
else_expr
.as_ref()
.map(|e| Box::new(self.substitute_expr(e, subs))),
),
Expr::Lambda(params, body) => Expr::Lambda(
params.clone(),
Box::new(self.substitute_expr(body, subs)),
),
Expr::Await(inner) => {
Expr::Await(Box::new(self.substitute_expr(inner, subs)))
}
Expr::Path(left, right) => Expr::Path(
Box::new(self.substitute_expr(left, subs)),
Box::new(self.substitute_expr(right, subs)),
),
Expr::HomePath(path) => {
Expr::HomePath(Box::new(self.substitute_expr(path, subs)))
}
Expr::Interpolated(parts) => Expr::Interpolated(
parts
.iter()
.map(|p| match p {
InterpolatedPart::Literal(s) => InterpolatedPart::Literal(s.clone()),
InterpolatedPart::Expr(e) => {
InterpolatedPart::Expr(self.substitute_expr(e, subs))
}
})
.collect(),
),
other => other.clone(),
}
}
}
impl Default for MacroExpander {
fn default() -> Self {
Self::new()
}
}

View file

@ -0,0 +1,945 @@
//! Parser for the doot language.
use crate::ast::*;
use crate::lexer::Token;
use chumsky::prelude::*;
use chumsky::Parser as _;
use std::collections::HashMap;
/// Parses tokens into an AST.
pub struct Parser;
type ParserInput = crate::lexer::Spanned<Token>;
impl Parser {
/// Parses a token stream into a program AST.
pub fn parse(tokens: Vec<ParserInput>) -> Result<Program, Vec<Simple<Token>>> {
let stream = tokens
.into_iter()
.map(|t| (t.node, t.span))
.collect::<Vec<_>>();
let len = stream.last().map(|(_, s)| s.end).unwrap_or(0);
let stream = chumsky::Stream::from_iter(len..len + 1, stream.into_iter());
Self::program_parser().parse(stream)
}
fn program_parser() -> impl chumsky::Parser<Token, Program, Error = Simple<Token>> {
Self::statement_parser()
.repeated()
.map(|statements| Program { statements })
.then_ignore(end())
}
fn statement_parser() -> impl chumsky::Parser<Token, Spanned<Statement>, Error = Simple<Token>>
{
recursive(|stmt| {
let whitespace = choice((
just(Token::Newline),
just(Token::Dedent),
)).repeated();
let var_decl = Self::var_decl_parser().map(Statement::VarDecl);
let fn_decl = Self::fn_decl_parser(stmt.clone()).map(Statement::FnDecl);
let struct_decl = Self::struct_decl_parser(stmt.clone()).map(Statement::StructDecl);
let enum_decl = Self::enum_decl_parser().map(Statement::EnumDecl);
let type_alias = Self::type_alias_parser().map(Statement::TypeAlias);
let import = Self::import_parser().map(Statement::Import);
let dotfile = Self::dotfile_parser().map(Statement::Dotfile);
let package = Self::package_parser().map(Statement::Package);
let secret = Self::secret_parser().map(Statement::Secret);
let hook = Self::hook_parser().map(Statement::Hook);
let simple_hook = Self::simple_hook_parser().map(Statement::Hook);
let macro_decl = Self::macro_decl_parser(stmt.clone()).map(Statement::MacroDecl);
let macro_call = Self::macro_call_parser().map(Statement::MacroCall);
let for_loop = Self::for_loop_parser(stmt.clone()).map(Statement::ForLoop);
let if_stmt = Self::if_parser(stmt.clone()).map(Statement::If);
let match_stmt = Self::match_parser().map(Statement::Match);
let return_stmt = just(Token::Return)
.ignore_then(Self::expr_parser().or_not())
.map(Statement::Return);
let expr_stmt = Self::expr_parser().map(Statement::Expr);
choice((
fn_decl,
struct_decl,
enum_decl,
type_alias,
import,
dotfile,
package,
secret,
hook,
simple_hook,
macro_decl,
macro_call,
for_loop,
if_stmt,
match_stmt,
return_stmt,
var_decl,
expr_stmt,
))
.map_with_span(Spanned::new)
.padded_by(whitespace)
})
}
fn var_decl_parser() -> impl chumsky::Parser<Token, VarDecl, Error = Simple<Token>> {
Self::ident_parser()
.then(
just(Token::Colon)
.ignore_then(Self::type_annotation_parser())
.or_not(),
)
.then_ignore(just(Token::Eq))
.then(Self::expr_parser())
.map(|((name, ty), value)| VarDecl { name, ty, value })
}
fn fn_decl_parser(
stmt: impl chumsky::Parser<Token, Spanned<Statement>, Error = Simple<Token>> + Clone,
) -> impl chumsky::Parser<Token, FnDecl, Error = Simple<Token>> {
let is_async = select! { Token::Ident(s) if s == "async" => true }
.or_not()
.map(|a| a.is_some());
is_async
.then_ignore(just(Token::Fn))
.then(Self::ident_parser())
.then(Self::fn_params_parser())
.then(
just(Token::Arrow)
.ignore_then(Self::type_annotation_parser())
.or_not(),
)
.then_ignore(just(Token::Colon))
.then(Self::block_parser(stmt))
.map(|((((is_async, name), params), return_type), body)| FnDecl {
name,
is_async,
params,
return_type,
body,
})
}
fn fn_params_parser() -> impl chumsky::Parser<Token, Vec<FnParam>, Error = Simple<Token>> {
let param = Self::ident_parser()
.then_ignore(just(Token::Colon))
.then(Self::type_annotation_parser())
.then(
just(Token::Eq)
.ignore_then(Self::expr_parser())
.or_not(),
)
.map(|((name, ty), default)| FnParam { name, ty, default });
param
.separated_by(just(Token::Comma))
.allow_trailing()
.delimited_by(just(Token::LParen), just(Token::RParen))
}
fn struct_decl_parser(
stmt: impl chumsky::Parser<Token, Spanned<Statement>, Error = Simple<Token>> + Clone,
) -> impl chumsky::Parser<Token, StructDecl, Error = Simple<Token>> {
let field = Self::ident_parser()
.then_ignore(just(Token::Colon))
.then(Self::type_annotation_parser())
.then(
just(Token::Eq)
.ignore_then(Self::expr_parser())
.or_not(),
)
.map(|((name, ty), default)| StructField { name, ty, default });
let method = Self::fn_decl_parser(stmt);
just(Token::Struct)
.ignore_then(Self::ident_parser())
.then_ignore(just(Token::Colon))
.then_ignore(just(Token::Newline).repeated())
.then_ignore(just(Token::Indent(0)).rewind().or_not())
.then(
choice((
field.map(Either::Left),
method.map(Either::Right),
))
.padded_by(just(Token::Newline).repeated())
.repeated(),
)
.then_ignore(just(Token::Dedent).or_not())
.map(|(name, members)| {
let mut fields = Vec::new();
let mut methods = Vec::new();
for m in members {
match m {
Either::Left(f) => fields.push(f),
Either::Right(m) => methods.push(m),
}
}
StructDecl { name, fields, methods }
})
}
fn enum_decl_parser() -> impl chumsky::Parser<Token, EnumDecl, Error = Simple<Token>> {
let variant = Self::ident_parser()
.then(
Self::type_annotation_parser()
.separated_by(just(Token::Comma))
.allow_trailing()
.delimited_by(just(Token::LParen), just(Token::RParen))
.or_not(),
)
.map(|(name, fields)| EnumVariant { name, fields });
just(Token::Enum)
.ignore_then(Self::ident_parser())
.then_ignore(just(Token::Colon))
.then_ignore(just(Token::Newline).repeated())
.then(
variant
.padded_by(just(Token::Newline).repeated())
.repeated()
.at_least(1),
)
.then_ignore(just(Token::Dedent).or_not())
.map(|(name, variants)| EnumDecl { name, variants })
}
fn type_alias_parser() -> impl chumsky::Parser<Token, TypeAlias, Error = Simple<Token>> {
just(Token::Type)
.ignore_then(Self::ident_parser())
.then_ignore(just(Token::Eq))
.then(Self::type_annotation_parser())
.map(|(name, ty)| TypeAlias { name, ty })
}
fn import_parser() -> impl chumsky::Parser<Token, Import, Error = Simple<Token>> {
just(Token::Import)
.ignore_then(select! { Token::Str(s) => s })
.then(just(Token::As).ignore_then(Self::ident_parser()).or_not())
.map(|(path, alias)| Import { path, alias })
}
fn indent_parser() -> impl chumsky::Parser<Token, (), Error = Simple<Token>> + Clone {
select! { Token::Indent(_) => () }.or_not().ignored()
}
fn field_name_parser() -> impl chumsky::Parser<Token, String, Error = Simple<Token>> + Clone {
Self::ident_parser().or(just(Token::When).to("when".to_string()))
}
fn dotfile_parser() -> impl chumsky::Parser<Token, Dotfile, Error = Simple<Token>> {
let field = Self::field_name_parser()
.then_ignore(just(Token::Eq))
.then(Self::expr_parser());
just(Token::Dotfile)
.ignore_then(just(Token::Colon))
.ignore_then(just(Token::Newline).repeated())
.ignore_then(Self::indent_parser())
.ignore_then(
field
.padded_by(Self::indent_parser())
.padded_by(just(Token::Newline).repeated())
.repeated()
.at_least(1),
)
.then_ignore(just(Token::Dedent).or_not())
.map(|fields| {
let mut dotfile = Dotfile {
source: Expr::Literal(Literal::None),
target: Expr::Literal(Literal::None),
when: None,
template: None,
permissions: Vec::new(),
owner: None,
deploy: DeployMode::default(),
link_patterns: Vec::new(),
copy_patterns: Vec::new(),
};
for (name, value) in fields {
match name.as_str() {
"source" => dotfile.source = value,
"target" => dotfile.target = value,
"when" => dotfile.when = Some(value),
"template" => {
if let Expr::Literal(Literal::Bool(b)) = value {
dotfile.template = Some(b);
}
}
"permissions" => {
dotfile.permissions = expr_to_permission_rules(&value);
}
"deploy" => {
if let Expr::Literal(Literal::Str(s)) = value {
dotfile.deploy = match s.as_str() {
"link" => DeployMode::Link,
_ => DeployMode::Copy,
};
}
}
"link" => {
dotfile.link_patterns = expr_to_string_list(&value);
}
"copy" => {
dotfile.copy_patterns = expr_to_string_list(&value);
}
"owner" => {
if let Expr::Literal(Literal::Str(s)) = value {
dotfile.owner = Some(s);
}
}
_ => {}
}
}
dotfile
})
}
fn package_parser() -> impl chumsky::Parser<Token, Package, Error = Simple<Token>> {
let inline = just(Token::Package)
.ignore_then(just(Token::Colon))
.ignore_then(Self::expr_parser())
.map(|name| Package {
default: Some(name),
brew: None,
apt: None,
pacman: None,
yay: None,
when: None,
});
let field = Self::field_name_parser()
.then_ignore(just(Token::Eq))
.then(Self::expr_parser());
let block = just(Token::Package)
.ignore_then(just(Token::Colon))
.ignore_then(just(Token::Newline).repeated())
.ignore_then(Self::indent_parser())
.ignore_then(
field
.padded_by(Self::indent_parser())
.padded_by(just(Token::Newline).repeated())
.repeated()
.at_least(1),
)
.then_ignore(just(Token::Dedent).or_not())
.map(|fields| {
let mut pkg = Package {
default: None,
brew: None,
apt: None,
pacman: None,
yay: None,
when: None,
};
for (name, value) in fields {
match name.as_str() {
"default" => pkg.default = Some(value),
"brew" => pkg.brew = Some(PackageSpec { name: value, cask: None, tap: None }),
"apt" => pkg.apt = Some(PackageSpec { name: value, cask: None, tap: None }),
"pacman" => pkg.pacman = Some(PackageSpec { name: value, cask: None, tap: None }),
"yay" => pkg.yay = Some(PackageSpec { name: value, cask: None, tap: None }),
"when" => pkg.when = Some(value),
_ => {}
}
}
pkg
});
inline.or(block)
}
fn secret_parser() -> impl chumsky::Parser<Token, Secret, Error = Simple<Token>> {
let field = Self::field_name_parser()
.then_ignore(just(Token::Eq))
.then(Self::expr_parser());
just(Token::Secret)
.ignore_then(just(Token::Colon))
.ignore_then(just(Token::Newline).repeated())
.ignore_then(Self::indent_parser())
.ignore_then(
field
.padded_by(Self::indent_parser())
.padded_by(just(Token::Newline).repeated())
.repeated()
.at_least(1),
)
.then_ignore(just(Token::Dedent).or_not())
.map(|fields| {
let mut secret = Secret {
source: Expr::Literal(Literal::None),
target: Expr::Literal(Literal::None),
mode: None,
};
for (name, value) in fields {
match name.as_str() {
"source" => secret.source = value,
"target" => secret.target = value,
"mode" => {
if let Expr::Literal(Literal::Int(m)) = value {
secret.mode = Some(m as u32);
}
}
_ => {}
}
}
secret
})
}
fn hook_parser() -> impl chumsky::Parser<Token, Hook, Error = Simple<Token>> {
let stage = Self::ident_parser().map(|s| match s.as_str() {
"BeforeDeploy" => HookStage::BeforeDeploy,
"AfterDeploy" => HookStage::AfterDeploy,
"BeforePackage" => HookStage::BeforePackage,
"AfterPackage" => HookStage::AfterPackage,
_ => HookStage::AfterDeploy,
});
let field = Self::field_name_parser()
.then_ignore(just(Token::Eq))
.then(choice((
stage.map(Either::Left),
Self::expr_parser().map(Either::Right),
)));
just(Token::Hook)
.ignore_then(just(Token::Colon))
.ignore_then(just(Token::Newline).repeated())
.ignore_then(Self::indent_parser())
.ignore_then(
field
.padded_by(Self::indent_parser())
.padded_by(just(Token::Newline).repeated())
.repeated()
.at_least(1),
)
.then_ignore(just(Token::Dedent).or_not())
.map(|fields| {
let mut hook = Hook {
stage: HookStage::AfterDeploy,
run: Expr::Literal(Literal::None),
when: None,
};
for (name, value) in fields {
match (name.as_str(), value) {
("stage", Either::Left(s)) => hook.stage = s,
("run", Either::Right(e)) => hook.run = e,
("when", Either::Right(e)) => hook.when = Some(e),
_ => {}
}
}
hook
})
}
fn simple_hook_parser() -> impl chumsky::Parser<Token, Hook, Error = Simple<Token>> {
let stage_token = choice((
just(Token::BeforeDeploy).to(HookStage::BeforeDeploy),
just(Token::AfterDeploy).to(HookStage::AfterDeploy),
just(Token::BeforePackage).to(HookStage::BeforePackage),
just(Token::AfterPackage).to(HookStage::AfterPackage),
));
stage_token
.then_ignore(just(Token::Colon))
.then(Self::expr_parser())
.map(|(stage, run)| Hook {
stage,
run,
when: None,
})
}
fn macro_decl_parser(
stmt: impl chumsky::Parser<Token, Spanned<Statement>, Error = Simple<Token>> + Clone,
) -> impl chumsky::Parser<Token, MacroDecl, Error = Simple<Token>> {
just(Token::Macro)
.ignore_then(Self::ident_parser())
.then_ignore(just(Token::Bang))
.then(
Self::ident_parser()
.separated_by(just(Token::Comma))
.allow_trailing()
.delimited_by(just(Token::LParen), just(Token::RParen)),
)
.then_ignore(just(Token::Colon))
.then(Self::block_parser(stmt))
.map(|((name, params), body)| MacroDecl { name, params, body })
}
fn macro_call_parser() -> impl chumsky::Parser<Token, MacroCall, Error = Simple<Token>> {
Self::ident_parser()
.then_ignore(just(Token::Bang))
.then(
Self::expr_parser()
.separated_by(just(Token::Comma))
.allow_trailing()
.delimited_by(just(Token::LParen), just(Token::RParen)),
)
.map(|(name, args)| MacroCall { name, args })
}
fn for_loop_parser(
stmt: impl chumsky::Parser<Token, Spanned<Statement>, Error = Simple<Token>> + Clone,
) -> impl chumsky::Parser<Token, ForLoop, Error = Simple<Token>> {
just(Token::For)
.ignore_then(Self::ident_parser())
.then_ignore(just(Token::In))
.then(Self::expr_parser())
.then_ignore(just(Token::Colon))
.then(Self::block_parser(stmt))
.map(|((var, iter), body)| ForLoop { var, iter, body })
}
fn if_parser(
stmt: impl chumsky::Parser<Token, Spanned<Statement>, Error = Simple<Token>> + Clone,
) -> impl chumsky::Parser<Token, IfStatement, Error = Simple<Token>> {
just(Token::If)
.ignore_then(Self::expr_parser())
.then_ignore(just(Token::Colon))
.then(Self::block_parser(stmt.clone()))
.then(
just(Token::Else)
.ignore_then(just(Token::Colon))
.ignore_then(Self::block_parser(stmt))
.or_not(),
)
.map(|((condition, then_body), else_body)| IfStatement {
condition,
then_body,
else_body,
})
}
fn match_parser() -> impl chumsky::Parser<Token, MatchStatement, Error = Simple<Token>> {
let pattern = choice((
select! {
Token::Int(n) => Pattern::Literal(Literal::Int(n)),
Token::Float(n) => Pattern::Literal(Literal::Float(n.into_inner())),
Token::Str(s) => Pattern::Literal(Literal::Str(s)),
Token::Bool(b) => Pattern::Literal(Literal::Bool(b)),
},
Self::ident_parser()
.then_ignore(just(Token::DoubleColon))
.then(Self::ident_parser())
.map(|(ty, variant)| Pattern::EnumVariant { ty, variant }),
select! { Token::Ident(s) if s == "_" => Pattern::Wildcard },
Self::ident_parser().map(Pattern::Ident),
));
let arm = pattern
.then_ignore(just(Token::FatArrow))
.then(Self::expr_parser())
.map(|(pattern, body)| MatchArm { pattern, body });
Self::ident_parser()
.then_ignore(just(Token::Eq))
.then_ignore(just(Token::Match))
.then(Self::expr_parser())
.then_ignore(just(Token::Colon))
.then_ignore(just(Token::Newline).repeated())
.then(
arm.padded_by(just(Token::Newline).repeated())
.repeated()
.at_least(1),
)
.then_ignore(just(Token::Dedent).or_not())
.map(|((_, expr), arms)| MatchStatement { expr, arms })
}
fn block_parser(
stmt: impl chumsky::Parser<Token, Spanned<Statement>, Error = Simple<Token>> + Clone,
) -> impl chumsky::Parser<Token, Vec<Spanned<Statement>>, Error = Simple<Token>> {
just(Token::Newline)
.repeated()
.ignore_then(filter(|t| matches!(t, Token::Indent(_))))
.ignore_then(stmt.repeated().at_least(1))
.then_ignore(just(Token::Dedent).or_not())
}
fn type_annotation_parser(
) -> impl chumsky::Parser<Token, TypeAnnotation, Error = Simple<Token>> {
recursive(|ty| {
let simple = Self::ident_parser().map(TypeAnnotation::Simple);
let list = ty
.clone()
.delimited_by(just(Token::LBracket), just(Token::RBracket))
.map(|t| TypeAnnotation::List(Box::new(t)));
let literal_str = select! { Token::Str(s) => TypeAnnotation::Literal(Literal::Str(s)) };
let base = choice((list, literal_str, simple));
let optional = base
.clone()
.then(select! { Token::Ident(s) if s == "?" => () }.or_not())
.map(|(t, opt)| {
if opt.is_some() {
TypeAnnotation::Optional(Box::new(t))
} else {
t
}
});
optional
.clone()
.then(
just(Token::Pipe)
.ignore_then(optional.clone())
.repeated(),
)
.map(|(first, rest)| {
if rest.is_empty() {
first
} else {
let mut types = vec![first];
types.extend(rest);
TypeAnnotation::Union(types)
}
})
})
}
fn expr_parser() -> impl chumsky::Parser<Token, Expr, Error = Simple<Token>> {
recursive(|expr| {
let literal = select! {
Token::Int(n) => Expr::Literal(Literal::Int(n)),
Token::Float(n) => Expr::Literal(Literal::Float(n.into_inner())),
Token::Str(s) => {
if s.contains('{') && s.contains('}') {
Self::parse_interpolated(&s)
} else {
Expr::Literal(Literal::Str(s))
}
},
Token::Bool(b) => Expr::Literal(Literal::Bool(b)),
};
let ident = Self::ident_parser().map(Expr::Ident);
let list = expr
.clone()
.separated_by(just(Token::Comma))
.allow_trailing()
.delimited_by(just(Token::LBracket), just(Token::RBracket))
.map(Expr::List);
let struct_init = Self::ident_parser()
.then(
Self::ident_parser()
.then_ignore(just(Token::Eq))
.then(expr.clone())
.separated_by(just(Token::Comma))
.allow_trailing()
.delimited_by(just(Token::LBrace), just(Token::RBrace)),
)
.map(|(name, fields)| {
let map: HashMap<_, _> = fields.into_iter().collect();
Expr::StructInit(name, map)
});
let enum_variant = Self::ident_parser()
.then_ignore(just(Token::DoubleColon))
.then(Self::ident_parser())
.map(|(ty, variant)| Expr::EnumVariant(ty, variant));
let home_path = just(Token::Tilde)
.ignore_then(just(Token::Slash).ignore_then(expr.clone()).or_not())
.map(|path| {
Expr::HomePath(Box::new(
path.unwrap_or(Expr::Literal(Literal::Str(String::new()))),
))
});
let paren = expr
.clone()
.delimited_by(just(Token::LParen), just(Token::RParen));
let lambda = just(Token::Pipe)
.ignore_then(
Self::ident_parser()
.then(
just(Token::Colon)
.ignore_then(Self::type_annotation_parser())
.or_not(),
)
.map(|(name, ty)| FnParam {
name,
ty: ty.unwrap_or(TypeAnnotation::Simple("any".to_string())),
default: None,
})
.separated_by(just(Token::Comma)),
)
.then_ignore(just(Token::Pipe))
.then(expr.clone())
.map(|(params, body)| Expr::Lambda(params, Box::new(body)));
let if_expr = just(Token::If)
.ignore_then(expr.clone())
.then_ignore(just(Token::Then))
.then(expr.clone())
.then(just(Token::Else).ignore_then(expr.clone()).or_not())
.map(|((cond, then_expr), else_expr)| {
Expr::If(Box::new(cond), Box::new(then_expr), else_expr.map(Box::new))
});
let await_expr = just(Token::Await)
.ignore_then(expr.clone())
.map(|e| Expr::Await(Box::new(e)));
let atom = choice((
await_expr,
if_expr,
lambda,
home_path,
struct_init,
enum_variant,
list,
literal,
ident,
paren,
));
let call_or_access = atom.then(
choice((
expr.clone()
.separated_by(just(Token::Comma))
.allow_trailing()
.delimited_by(just(Token::LParen), just(Token::RParen))
.map(CallOrAccess::Call),
just(Token::Dot)
.ignore_then(Self::ident_parser())
.then(
expr.clone()
.separated_by(just(Token::Comma))
.allow_trailing()
.delimited_by(just(Token::LParen), just(Token::RParen))
.or_not(),
)
.map(|(name, args)| {
if let Some(args) = args {
CallOrAccess::MethodCall(name, args)
} else {
CallOrAccess::Field(name)
}
}),
expr.clone()
.delimited_by(just(Token::LBracket), just(Token::RBracket))
.map(CallOrAccess::Index),
))
.repeated(),
).foldl(|e, access| match access {
CallOrAccess::Call(args) => Expr::Call(Box::new(e), args),
CallOrAccess::MethodCall(name, args) => Expr::MethodCall(Box::new(e), name, args),
CallOrAccess::Field(name) => Expr::Field(Box::new(e), name),
CallOrAccess::Index(idx) => Expr::Index(Box::new(e), Box::new(idx)),
});
let unary_ops = choice((
just(Token::Minus).to(UnaryOp::Neg),
just(Token::Bang).to(UnaryOp::Not),
))
.repeated()
.collect::<Vec<_>>();
let unary = unary_ops
.then(call_or_access)
.map(|(ops, expr)| {
ops.into_iter().rev().fold(expr, |e, op| Expr::Unary(op, Box::new(e)))
})
.boxed();
let path_op = unary
.clone()
.then(just(Token::Slash).ignore_then(unary.clone()).repeated())
.foldl(|a, b| Expr::Path(Box::new(a), Box::new(b)))
.boxed();
let product = path_op
.clone()
.then(
choice((
just(Token::Star).to(BinOp::Mul),
just(Token::Percent).to(BinOp::Mod),
))
.then(path_op.clone())
.repeated(),
)
.foldl(|a, (op, b)| Expr::Binary(Box::new(a), op, Box::new(b)))
.boxed();
let sum = product
.clone()
.then(
choice((
just(Token::Plus).to(BinOp::Add),
just(Token::Minus).to(BinOp::Sub),
))
.then(product.clone())
.repeated(),
)
.foldl(|a, (op, b)| Expr::Binary(Box::new(a), op, Box::new(b)))
.boxed();
let comparison = sum
.clone()
.then(
choice((
just(Token::EqEq).to(BinOp::Eq),
just(Token::NotEq).to(BinOp::NotEq),
just(Token::LtEq).to(BinOp::LtEq),
just(Token::GtEq).to(BinOp::GtEq),
just(Token::Lt).to(BinOp::Lt),
just(Token::Gt).to(BinOp::Gt),
))
.then(sum.clone())
.repeated(),
)
.foldl(|a, (op, b)| Expr::Binary(Box::new(a), op, Box::new(b)))
.boxed();
let and_expr = comparison
.clone()
.then(just(Token::And).ignore_then(comparison.clone()).repeated())
.foldl(|a, b| Expr::Binary(Box::new(a), BinOp::And, Box::new(b)))
.boxed();
let or_expr = and_expr
.clone()
.then(just(Token::Or).ignore_then(and_expr.clone()).repeated())
.foldl(|a, b| Expr::Binary(Box::new(a), BinOp::Or, Box::new(b)))
.boxed();
or_expr
.clone()
.then(just(Token::QuestionQuestion).ignore_then(or_expr.clone()).repeated())
.foldl(|a, b| Expr::Binary(Box::new(a), BinOp::NullCoalesce, Box::new(b)))
})
}
fn ident_parser() -> impl chumsky::Parser<Token, String, Error = Simple<Token>> + Clone {
select! { Token::Ident(s) => s }
}
fn parse_interpolated(s: &str) -> Expr {
let mut parts = Vec::new();
let mut current = String::new();
let mut in_expr = false;
let mut expr_depth = 0;
let mut expr_str = String::new();
for c in s.chars() {
if in_expr {
if c == '{' {
expr_depth += 1;
expr_str.push(c);
} else if c == '}' {
if expr_depth == 0 {
in_expr = false;
parts.push(InterpolatedPart::Expr(Expr::Ident(expr_str.clone())));
expr_str.clear();
} else {
expr_depth -= 1;
expr_str.push(c);
}
} else {
expr_str.push(c);
}
} else if c == '{' {
if !current.is_empty() {
parts.push(InterpolatedPart::Literal(current.clone()));
current.clear();
}
in_expr = true;
} else {
current.push(c);
}
}
if !current.is_empty() {
parts.push(InterpolatedPart::Literal(current));
}
if parts.len() == 1 {
if let InterpolatedPart::Literal(s) = &parts[0] {
return Expr::Literal(Literal::Str(s.clone()));
}
}
Expr::Interpolated(parts)
}
}
enum CallOrAccess {
Call(Vec<Expr>),
MethodCall(String, Vec<Expr>),
Field(String),
Index(Expr),
}
enum Either<L, R> {
Left(L),
Right(R),
}
fn expr_to_string_list(expr: &Expr) -> Vec<String> {
match expr {
Expr::List(items) => items
.iter()
.filter_map(|e| {
if let Expr::Literal(Literal::Str(s)) = e {
Some(s.clone())
} else {
None
}
})
.collect(),
Expr::Literal(Literal::Str(s)) => vec![s.clone()],
_ => Vec::new(),
}
}
fn expr_to_permission_rules(expr: &Expr) -> Vec<PermissionRule> {
match expr {
// Single mode: permissions = 0o755
Expr::Literal(Literal::Int(mode)) => {
vec![PermissionRule::Single(*mode as u32)]
}
// Array of rules: permissions = [["*.sh", 0o755], ["secret/*", 0o600]]
Expr::List(items) => items
.iter()
.filter_map(|e| {
match e {
// [pattern, mode] pair
Expr::List(pair) if pair.len() == 2 => {
if let (
Expr::Literal(Literal::Str(pattern)),
Expr::Literal(Literal::Int(mode)),
) = (&pair[0], &pair[1])
{
Some(PermissionRule::Pattern {
pattern: pattern.clone(),
mode: *mode as u32,
})
} else {
None
}
}
// Single mode in array (less common but supported)
Expr::Literal(Literal::Int(mode)) => {
Some(PermissionRule::Single(*mode as u32))
}
_ => None,
}
})
.collect(),
_ => Vec::new(),
}
}

View file

@ -0,0 +1,192 @@
//! Dependency graph for task ordering.
use std::collections::{HashMap, HashSet};
/// Directed acyclic graph of task dependencies.
#[derive(Debug, Clone)]
pub struct DependencyGraph {
nodes: HashMap<String, Node>,
edges: HashMap<String, HashSet<String>>,
}
/// A node in the dependency graph.
#[derive(Debug, Clone)]
pub struct Node {
pub id: String,
pub task_type: TaskType,
pub data: TaskData,
}
/// Task category.
#[derive(Debug, Clone)]
pub enum TaskType {
Dotfile,
Package,
Secret,
Hook,
Custom,
}
/// Task-specific data.
#[derive(Debug, Clone)]
pub enum TaskData {
Dotfile {
source: std::path::PathBuf,
target: std::path::PathBuf,
template: bool,
},
Package {
name: String,
manager: String,
},
Secret {
source: std::path::PathBuf,
target: std::path::PathBuf,
},
Hook {
command: String,
},
Custom(String),
}
impl DependencyGraph {
/// Creates an empty dependency graph.
pub fn new() -> Self {
Self {
nodes: HashMap::new(),
edges: HashMap::new(),
}
}
/// Adds a task node.
pub fn add_node(&mut self, id: String, task_type: TaskType, data: TaskData) {
self.nodes.insert(
id.clone(),
Node {
id: id.clone(),
task_type,
data,
},
);
self.edges.entry(id).or_default();
}
/// Adds a dependency edge (from depends on to).
pub fn add_edge(&mut self, from: &str, to: &str) {
self.edges
.entry(from.to_string())
.or_default()
.insert(to.to_string());
}
/// Returns tasks in dependency order.
pub fn topological_sort(&self) -> Result<Vec<String>, String> {
let mut in_degree: HashMap<String, usize> = HashMap::new();
let mut reverse_edges: HashMap<String, HashSet<String>> = HashMap::new();
for id in self.nodes.keys() {
in_degree.insert(id.clone(), 0);
reverse_edges.insert(id.clone(), HashSet::new());
}
for (from, tos) in &self.edges {
for to in tos {
*in_degree.entry(to.clone()).or_default() += 1;
reverse_edges
.entry(from.clone())
.or_default()
.insert(to.clone());
}
}
let mut queue: Vec<String> = in_degree
.iter()
.filter(|(_, deg)| **deg == 0)
.map(|(id, _)| id.clone())
.collect();
let mut result = Vec::new();
while let Some(node) = queue.pop() {
result.push(node.clone());
if let Some(deps) = self.edges.get(&node) {
for dep in deps {
if let Some(deg) = in_degree.get_mut(dep) {
*deg -= 1;
if *deg == 0 {
queue.push(dep.clone());
}
}
}
}
}
if result.len() != self.nodes.len() {
return Err("cycle detected in dependency graph".to_string());
}
Ok(result)
}
/// Groups tasks into parallelizable batches.
pub fn get_parallel_batches(&self) -> Result<Vec<Vec<String>>, String> {
let mut in_degree: HashMap<String, usize> = HashMap::new();
let mut remaining = self.nodes.keys().cloned().collect::<HashSet<_>>();
for id in self.nodes.keys() {
in_degree.insert(id.clone(), 0);
}
for tos in self.edges.values() {
for to in tos {
*in_degree.entry(to.clone()).or_default() += 1;
}
}
let mut batches = Vec::new();
while !remaining.is_empty() {
let batch: Vec<String> = remaining
.iter()
.filter(|id| in_degree.get(*id).copied().unwrap_or(0) == 0)
.cloned()
.collect();
if batch.is_empty() {
return Err("cycle detected in dependency graph".to_string());
}
for node in &batch {
remaining.remove(node);
if let Some(deps) = self.edges.get(node) {
for dep in deps {
if let Some(deg) = in_degree.get_mut(dep) {
*deg -= 1;
}
}
}
}
batches.push(batch);
}
Ok(batches)
}
/// Gets a node by ID.
pub fn get_node(&self, id: &str) -> Option<&Node> {
self.nodes.get(id)
}
/// Iterates over all nodes.
pub fn nodes(&self) -> impl Iterator<Item = &Node> {
self.nodes.values()
}
}
impl Default for DependencyGraph {
fn default() -> Self {
Self::new()
}
}

View file

@ -0,0 +1,194 @@
//! Task execution engine.
use super::dag::{DependencyGraph, Node, TaskData};
use rayon::prelude::*;
use std::sync::{Arc, Mutex};
use thiserror::Error;
/// Execution errors.
#[derive(Error, Debug)]
pub enum ExecutionError {
#[error("task failed: {task_id}: {message}")]
TaskFailed { task_id: String, message: String },
#[error("io error: {0}")]
IoError(#[from] std::io::Error),
}
/// Result type for task execution.
pub type TaskResult = Result<(), ExecutionError>;
/// Handler for task types.
pub trait TaskHandler: Send + Sync {
/// Handles dotfile deployment.
fn handle_dotfile(
&self,
source: &std::path::Path,
target: &std::path::Path,
template: bool,
) -> TaskResult;
/// Handles package installation.
fn handle_package(&self, name: &str, manager: &str) -> TaskResult;
/// Handles secret decryption.
fn handle_secret(
&self,
source: &std::path::Path,
target: &std::path::Path,
) -> TaskResult;
/// Handles hook execution.
fn handle_hook(&self, command: &str) -> TaskResult;
}
/// Executes tasks from a dependency graph.
pub struct Executor<H: TaskHandler> {
graph: DependencyGraph,
handler: Arc<H>,
dry_run: bool,
}
impl<H: TaskHandler + 'static> Executor<H> {
/// Creates a new executor.
pub fn new(graph: DependencyGraph, handler: H) -> Self {
Self {
graph,
handler: Arc::new(handler),
dry_run: false,
}
}
/// Sets dry run mode.
pub fn dry_run(mut self, dry_run: bool) -> Self {
self.dry_run = dry_run;
self
}
/// Executes tasks sequentially.
pub fn execute_sequential(&self) -> Result<ExecutionReport, ExecutionError> {
let order = self
.graph
.topological_sort()
.map_err(|e| ExecutionError::TaskFailed {
task_id: "scheduler".to_string(),
message: e,
})?;
let mut report = ExecutionReport::new();
for task_id in order {
if let Some(node) = self.graph.get_node(&task_id) {
let result = self.execute_node(node);
report.record(&task_id, result.is_ok());
result?;
}
}
Ok(report)
}
/// Executes tasks in parallel batches.
pub fn execute_parallel(&self) -> Result<ExecutionReport, ExecutionError> {
let batches = self
.graph
.get_parallel_batches()
.map_err(|e| ExecutionError::TaskFailed {
task_id: "scheduler".to_string(),
message: e,
})?;
let report = Arc::new(Mutex::new(ExecutionReport::new()));
let errors = Arc::new(Mutex::new(Vec::new()));
for batch in batches {
let batch_errors: Vec<ExecutionError> = batch
.par_iter()
.filter_map(|task_id| {
if let Some(node) = self.graph.get_node(task_id) {
let result = self.execute_node(node);
report.lock().unwrap().record(task_id, result.is_ok());
result.err()
} else {
None
}
})
.collect();
if !batch_errors.is_empty() {
errors.lock().unwrap().extend(batch_errors);
break;
}
}
let errors = Arc::try_unwrap(errors).unwrap().into_inner().unwrap();
if let Some(err) = errors.into_iter().next() {
return Err(err);
}
Ok(Arc::try_unwrap(report).unwrap().into_inner().unwrap())
}
fn execute_node(&self, node: &Node) -> TaskResult {
if self.dry_run {
return Ok(());
}
match &node.data {
TaskData::Dotfile {
source,
target,
template,
} => self.handler.handle_dotfile(source, target, *template),
TaskData::Package { name, manager } => {
self.handler.handle_package(name, manager)
}
TaskData::Secret { source, target } => {
self.handler.handle_secret(source, target)
}
TaskData::Hook { command } => self.handler.handle_hook(command),
TaskData::Custom(_) => Ok(()),
}
}
}
/// Execution result summary.
#[derive(Debug, Default)]
pub struct ExecutionReport {
pub succeeded: Vec<String>,
pub failed: Vec<String>,
}
impl ExecutionReport {
/// Creates an empty report.
pub fn new() -> Self {
Self::default()
}
/// Records a task result.
pub fn record(&mut self, task_id: &str, success: bool) {
if success {
self.succeeded.push(task_id.to_string());
} else {
self.failed.push(task_id.to_string());
}
}
/// Returns the total task count.
pub fn total(&self) -> usize {
self.succeeded.len() + self.failed.len()
}
/// Returns the success rate as a fraction.
pub fn success_rate(&self) -> f64 {
if self.total() == 0 {
1.0
} else {
self.succeeded.len() as f64 / self.total() as f64
}
}
}

View file

@ -0,0 +1,11 @@
//! Task planning and execution.
pub mod dag;
pub mod executor;
pub mod scheduler;
pub use dag::DependencyGraph;
pub use executor::Executor;
pub use scheduler::{
validate_dotfile_targets, DotfileConflict, DotfileValidation, DotfileWarning, Scheduler,
};

View file

@ -0,0 +1,382 @@
//! Task scheduling from evaluation results.
use super::dag::{DependencyGraph, TaskData, TaskType};
use crate::evaluator::{DotfileConfig, EvalResult};
use std::path::Path;
/// Builds a dependency graph from evaluation results.
pub struct Scheduler {
graph: DependencyGraph,
}
impl Scheduler {
/// Creates an empty scheduler.
pub fn new() -> Self {
Self {
graph: DependencyGraph::new(),
}
}
/// Creates a scheduler from evaluation results.
pub fn from_eval_result(result: &EvalResult) -> Self {
let mut scheduler = Self::new();
for (i, dotfile) in result.dotfiles.iter().enumerate() {
let id = format!("dotfile_{}", i);
scheduler.graph.add_node(
id,
TaskType::Dotfile,
TaskData::Dotfile {
source: dotfile.source.clone(),
target: dotfile.target.clone(),
template: dotfile.template,
},
);
}
for (i, package) in result.packages.iter().enumerate() {
let id = format!("package_{}", i);
let name = package.default.clone().unwrap_or_default();
scheduler.graph.add_node(
id,
TaskType::Package,
TaskData::Package {
name,
manager: "default".to_string(),
},
);
}
for (i, secret) in result.secrets.iter().enumerate() {
let id = format!("secret_{}", i);
scheduler.graph.add_node(
id,
TaskType::Secret,
TaskData::Secret {
source: secret.source.clone(),
target: secret.target.clone(),
},
);
}
for (i, hook) in result.hooks.iter().enumerate() {
let id = format!("hook_{}", i);
scheduler.graph.add_node(
id,
TaskType::Hook,
TaskData::Hook {
command: hook.run.clone(),
},
);
}
scheduler
}
/// Returns the built dependency graph.
pub fn build_graph(self) -> DependencyGraph {
self.graph
}
/// Returns task IDs in execution order.
pub fn get_execution_order(&self) -> Result<Vec<String>, String> {
self.graph.topological_sort()
}
/// Returns tasks grouped into parallel batches.
pub fn get_parallel_batches(&self) -> Result<Vec<Vec<String>>, String> {
self.graph.get_parallel_batches()
}
}
impl Default for Scheduler {
fn default() -> Self {
Self::new()
}
}
/// Conflict detected between dotfile entries.
#[derive(Debug, Clone)]
pub enum DotfileConflict {
/// Same source and target (duplicate entry).
Duplicate {
index_a: usize,
index_b: usize,
},
/// Overlapping directories with no distinguishing settings (likely redundant).
RedundantOverlap {
parent_index: usize,
child_index: usize,
},
}
/// Warning about dotfile configuration.
#[derive(Debug, Clone)]
pub struct DotfileWarning {
pub message: String,
pub index_a: usize,
pub index_b: usize,
}
/// Result of validating dotfile targets.
#[derive(Debug)]
pub struct DotfileValidation {
/// Indices in dependency order (respecting target relationships).
pub ordered_indices: Vec<usize>,
/// Errors that prevent deployment.
pub errors: Vec<DotfileConflict>,
/// Warnings that should be shown to user.
pub warnings: Vec<DotfileWarning>,
}
/// Validates dotfile targets and returns proper execution order.
///
/// Detects:
/// - Duplicate entries (same source + same target) → Error
/// - Same target with different source → OK, add dependency (later depends on earlier)
/// - Overlapping directories (both dirs, one target is ancestor) with same settings → Warning
/// - Overlapping directories with different settings → OK, add dependency
/// - Directory + file inside → OK, add dependency
pub fn validate_dotfile_targets(
dotfiles: &[DotfileConfig],
source_dir: &Path,
) -> DotfileValidation {
let mut errors = Vec::new();
let mut warnings = Vec::new();
let mut graph = DependencyGraph::new();
// Add all dotfiles as nodes
for (i, dotfile) in dotfiles.iter().enumerate() {
let id = format!("dotfile_{}", i);
graph.add_node(
id,
TaskType::Dotfile,
TaskData::Dotfile {
source: dotfile.source.clone(),
target: dotfile.target.clone(),
template: dotfile.template,
},
);
}
// Check all pairs for conflicts
for i in 0..dotfiles.len() {
for j in (i + 1)..dotfiles.len() {
let a = &dotfiles[i];
let b = &dotfiles[j];
let target_a = &a.target;
let target_b = &b.target;
// Check for same exact target
if target_a == target_b {
if a.source == b.source {
// Same source + same target = duplicate
errors.push(DotfileConflict::Duplicate {
index_a: i,
index_b: j,
});
} else {
// Different source + same target = override, j depends on i
graph.add_edge(&format!("dotfile_{}", i), &format!("dotfile_{}", j));
}
continue;
}
// Check if one target is ancestor of the other
let a_is_ancestor = target_b.starts_with(target_a) && target_a != target_b;
let b_is_ancestor = target_a.starts_with(target_b) && target_a != target_b;
if a_is_ancestor {
// a's target is ancestor of b's target, so a must run first
let full_source_a = source_dir.join(&a.source);
let full_source_b = source_dir.join(&b.source);
let both_dirs = full_source_a.is_dir() && full_source_b.is_dir();
if both_dirs && is_redundant_overlap(a, b) {
warnings.push(DotfileWarning {
message: format!(
"overlapping directories with same settings: '{}' contains '{}'",
a.source.display(),
b.source.display()
),
index_a: i,
index_b: j,
});
}
// Add edge: a runs before b
graph.add_edge(&format!("dotfile_{}", i), &format!("dotfile_{}", j));
} else if b_is_ancestor {
// b's target is ancestor of a's target, so b must run first
let full_source_a = source_dir.join(&a.source);
let full_source_b = source_dir.join(&b.source);
let both_dirs = full_source_a.is_dir() && full_source_b.is_dir();
if both_dirs && is_redundant_overlap(b, a) {
warnings.push(DotfileWarning {
message: format!(
"overlapping directories with same settings: '{}' contains '{}'",
b.source.display(),
a.source.display()
),
index_a: j,
index_b: i,
});
}
// Add edge: b runs before a
graph.add_edge(&format!("dotfile_{}", j), &format!("dotfile_{}", i));
}
}
}
// Get execution order via topological sort
let ordered_indices = match graph.topological_sort() {
Ok(ids) => ids
.into_iter()
.filter_map(|id| id.strip_prefix("dotfile_").and_then(|s| s.parse().ok()))
.collect(),
Err(_) => {
// Cycle detected - shouldn't happen with our edge rules, but fallback to original order
(0..dotfiles.len()).collect()
}
};
DotfileValidation {
ordered_indices,
errors,
warnings,
}
}
/// Checks if the child dotfile has no distinguishing settings from parent.
fn is_redundant_overlap(parent: &DotfileConfig, child: &DotfileConfig) -> bool {
child.permissions.is_empty()
&& child.owner.is_none()
&& !child.template
&& child.deploy == parent.deploy
&& child.link_patterns.is_empty()
&& child.copy_patterns.is_empty()
}
#[cfg(test)]
mod tests {
use super::*;
use crate::evaluator::DeployMode;
use std::path::PathBuf;
use tempfile::TempDir;
fn make_dotfile(source: &str, target: &str) -> DotfileConfig {
DotfileConfig {
source: PathBuf::from(source),
target: PathBuf::from(target),
template: false,
permissions: Vec::new(),
owner: None,
deploy: DeployMode::Copy,
link_patterns: Vec::new(),
copy_patterns: Vec::new(),
}
}
#[test]
fn test_duplicate_entry_error() {
let temp = TempDir::new().unwrap();
let dotfiles = vec![
make_dotfile("config/app.conf", "/home/user/.config/app.conf"),
make_dotfile("config/app.conf", "/home/user/.config/app.conf"),
];
let result = validate_dotfile_targets(&dotfiles, temp.path());
assert_eq!(result.errors.len(), 1);
match &result.errors[0] {
DotfileConflict::Duplicate { index_a, index_b } => {
assert_eq!(*index_a, 0);
assert_eq!(*index_b, 1);
}
_ => panic!("expected Duplicate error"),
}
}
#[test]
fn test_same_target_different_source_ok() {
let temp = TempDir::new().unwrap();
let dotfiles = vec![
make_dotfile("config/app.conf", "/home/user/.config/app.conf"),
make_dotfile("templates/app.conf", "/home/user/.config/app.conf"),
];
let result = validate_dotfile_targets(&dotfiles, temp.path());
assert!(result.errors.is_empty());
// Second entry should come after first
assert_eq!(result.ordered_indices, vec![0, 1]);
}
#[test]
fn test_directory_file_override_ordering() {
let temp = TempDir::new().unwrap();
// Create a directory to simulate real filesystem
std::fs::create_dir_all(temp.path().join("config/nvim")).unwrap();
std::fs::write(temp.path().join("config/nvim/init.lua"), "").unwrap();
let mut file_dotfile = make_dotfile("config/nvim/init.lua", "/home/user/.config/nvim/init.lua");
file_dotfile.template = true;
let dotfiles = vec![
// File with template (declared first)
file_dotfile,
// Directory (declared second)
make_dotfile("config/nvim", "/home/user/.config/nvim"),
];
let result = validate_dotfile_targets(&dotfiles, temp.path());
assert!(result.errors.is_empty());
// Directory should run first (index 1), then file (index 0)
assert_eq!(result.ordered_indices, vec![1, 0]);
}
#[test]
fn test_overlapping_dirs_with_different_settings_no_warning() {
let temp = TempDir::new().unwrap();
// Create directories
std::fs::create_dir_all(temp.path().join("config/nvim/lua")).unwrap();
let mut child_dotfile = make_dotfile("config/nvim/lua", "/home/user/.config/nvim/lua");
child_dotfile.owner = Some("root".to_string());
let dotfiles = vec![
make_dotfile("config/nvim", "/home/user/.config/nvim"),
child_dotfile,
];
let result = validate_dotfile_targets(&dotfiles, temp.path());
assert!(result.errors.is_empty());
assert!(result.warnings.is_empty()); // No warning because child has different settings
}
#[test]
fn test_overlapping_dirs_same_settings_warning() {
let temp = TempDir::new().unwrap();
// Create directories
std::fs::create_dir_all(temp.path().join("config/nvim/lua")).unwrap();
let dotfiles = vec![
make_dotfile("config/nvim", "/home/user/.config/nvim"),
make_dotfile("config/nvim/lua", "/home/user/.config/nvim/lua"),
];
let result = validate_dotfile_targets(&dotfiles, temp.path());
assert!(result.errors.is_empty());
assert_eq!(result.warnings.len(), 1);
assert!(result.warnings[0].message.contains("overlapping directories"));
}
}

View file

@ -0,0 +1,820 @@
//! Static type checker for the doot language.
use crate::ast::*;
use crate::types::*;
use ariadne::{Color, Label, Report, ReportKind, Source};
use std::collections::HashMap;
use thiserror::Error;
/// Type checking errors.
#[derive(Error, Debug)]
pub enum TypeError {
#[error("undefined variable: {0}")]
UndefinedVariable(String, std::ops::Range<usize>),
#[error("undefined type: {0}")]
UndefinedType(String, std::ops::Range<usize>),
#[error("type mismatch: expected {expected}, got {got}")]
TypeMismatch {
expected: String,
got: String,
span: std::ops::Range<usize>,
},
#[error("cannot call non-function type: {0}")]
NotCallable(String, std::ops::Range<usize>),
#[error("field {field} not found on type {ty}")]
FieldNotFound {
ty: String,
field: String,
span: std::ops::Range<usize>,
},
#[error("wrong number of arguments: expected {expected}, got {got}")]
WrongArity {
expected: usize,
got: usize,
span: std::ops::Range<usize>,
},
}
impl TypeError {
/// Prints a formatted error report to stderr.
pub fn report(&self, source: &str, filename: &str) {
let (msg, span) = match self {
TypeError::UndefinedVariable(name, span) => {
(format!("undefined variable: {}", name), span.clone())
}
TypeError::UndefinedType(name, span) => {
(format!("undefined type: {}", name), span.clone())
}
TypeError::TypeMismatch { expected, got, span } => {
(format!("expected {}, got {}", expected, got), span.clone())
}
TypeError::NotCallable(ty, span) => {
(format!("cannot call non-function type: {}", ty), span.clone())
}
TypeError::FieldNotFound { ty, field, span } => {
(format!("field {} not found on {}", field, ty), span.clone())
}
TypeError::WrongArity { expected, got, span } => {
(format!("expected {} arguments, got {}", expected, got), span.clone())
}
};
Report::build(ReportKind::Error, filename, span.start)
.with_message(self.to_string())
.with_label(
Label::new((filename, span))
.with_message(msg)
.with_color(Color::Red),
)
.finish()
.print((filename, Source::from(source)))
.ok();
}
}
/// Static type checker.
pub struct TypeChecker {
env: TypeEnv,
errors: Vec<TypeError>,
}
impl TypeChecker {
/// Creates a new type checker with built-in types.
pub fn new() -> Self {
Self {
env: TypeEnv::new(),
errors: Vec::new(),
}
}
/// Type checks a program, returning errors if any.
pub fn check(&mut self, program: &Program) -> Result<(), Vec<TypeError>> {
for stmt in &program.statements {
self.check_statement(stmt);
}
if self.errors.is_empty() {
Ok(())
} else {
Err(std::mem::take(&mut self.errors))
}
}
fn check_statement(&mut self, stmt: &Spanned<Statement>) {
match &stmt.node {
Statement::VarDecl(decl) => {
let inferred = self.infer_expr(&decl.value, &stmt.span);
if let Some(ref ty_ann) = decl.ty {
let expected = self.resolve_type(ty_ann);
if !expected.is_compatible(&inferred) {
self.errors.push(TypeError::TypeMismatch {
expected: expected.display(),
got: inferred.display(),
span: stmt.span.clone(),
});
}
self.env.define(decl.name.clone(), expected);
} else {
self.env.define(decl.name.clone(), inferred);
}
}
Statement::FnDecl(decl) => {
let params: Vec<(String, Type)> = decl
.params
.iter()
.map(|p| (p.name.clone(), self.resolve_type(&p.ty)))
.collect();
let return_type = decl
.return_type
.as_ref()
.map(|t| self.resolve_type(t))
.unwrap_or(Type::None);
self.env.define_function(
decl.name.clone(),
FunctionType {
params: params.clone(),
return_type: return_type.clone(),
is_async: decl.is_async,
},
);
self.env.push_scope();
for (name, ty) in params {
self.env.define(name, ty);
}
if decl.params.iter().any(|p| p.name == "self") {
// Method context
}
for body_stmt in &decl.body {
self.check_statement(body_stmt);
}
self.env.pop_scope();
}
Statement::StructDecl(decl) => {
let mut fields = HashMap::new();
for field in &decl.fields {
let ty = self.resolve_type(&field.ty);
fields.insert(field.name.clone(), ty);
}
let mut methods = HashMap::new();
for method in &decl.methods {
let params: Vec<(String, Type)> = method
.params
.iter()
.map(|p| (p.name.clone(), self.resolve_type(&p.ty)))
.collect();
let return_type = method
.return_type
.as_ref()
.map(|t| self.resolve_type(t))
.unwrap_or(Type::None);
methods.insert(
method.name.clone(),
FunctionType {
params,
return_type,
is_async: method.is_async,
},
);
}
self.env.define_struct(
decl.name.clone(),
StructType {
name: decl.name.clone(),
fields,
methods,
},
);
}
Statement::EnumDecl(decl) => {
let mut variants = HashMap::new();
for variant in &decl.variants {
let fields = variant
.fields
.as_ref()
.map(|fs| fs.iter().map(|t| self.resolve_type(t)).collect());
variants.insert(variant.name.clone(), fields);
}
self.env.define_enum(
decl.name.clone(),
EnumType {
name: decl.name.clone(),
variants,
},
);
}
Statement::TypeAlias(alias) => {
let ty = self.resolve_type(&alias.ty);
self.env.define(alias.name.clone(), ty);
}
Statement::ForLoop(for_loop) => {
let iter_ty = self.infer_expr(&for_loop.iter, &stmt.span);
let elem_ty = match iter_ty {
Type::List(inner) => *inner,
Type::Str => Type::Str,
_ => Type::Any,
};
self.env.push_scope();
self.env.define(for_loop.var.clone(), elem_ty);
for body_stmt in &for_loop.body {
self.check_statement(body_stmt);
}
self.env.pop_scope();
}
Statement::If(if_stmt) => {
let cond_ty = self.infer_expr(&if_stmt.condition, &stmt.span);
if !cond_ty.is_compatible(&Type::Bool) {
self.errors.push(TypeError::TypeMismatch {
expected: "bool".to_string(),
got: cond_ty.display(),
span: stmt.span.clone(),
});
}
self.env.push_scope();
for body_stmt in &if_stmt.then_body {
self.check_statement(body_stmt);
}
self.env.pop_scope();
if let Some(ref else_body) = if_stmt.else_body {
self.env.push_scope();
for body_stmt in else_body {
self.check_statement(body_stmt);
}
self.env.pop_scope();
}
}
Statement::Dotfile(dotfile) => {
self.infer_expr(&dotfile.source, &stmt.span);
self.infer_expr(&dotfile.target, &stmt.span);
if let Some(ref when) = dotfile.when {
let when_ty = self.infer_expr(when, &stmt.span);
if !when_ty.is_compatible(&Type::Bool) {
self.errors.push(TypeError::TypeMismatch {
expected: "bool".to_string(),
got: when_ty.display(),
span: stmt.span.clone(),
});
}
}
}
Statement::Package(pkg) => {
// Package names are converted to strings at runtime, so skip type checking
// for the default value. Only check the 'when' condition if present.
if let Some(ref when) = pkg.when {
let when_ty = self.infer_expr(when, &stmt.span);
if !when_ty.is_compatible(&Type::Bool) {
self.errors.push(TypeError::TypeMismatch {
expected: "bool".to_string(),
got: when_ty.display(),
span: stmt.span.clone(),
});
}
}
}
Statement::Expr(expr) => {
self.infer_expr(expr, &stmt.span);
}
_ => {}
}
}
fn infer_expr(&mut self, expr: &Expr, span: &std::ops::Range<usize>) -> Type {
match expr {
Expr::Literal(lit) => match lit {
Literal::Int(_) => Type::Int,
Literal::Float(_) => Type::Float,
Literal::Str(_) => Type::Str,
Literal::Bool(_) => Type::Bool,
Literal::None => Type::None,
},
Expr::Ident(name) => {
if let Some(ty) = self.env.lookup(name) {
ty.clone()
} else if let Some(ft) = self.env.functions.get(name) {
Type::Function(
ft.params.iter().map(|(_, t)| t.clone()).collect(),
Box::new(ft.return_type.clone()),
)
} else {
self.errors.push(TypeError::UndefinedVariable(name.clone(), span.clone()));
Type::Unknown
}
}
Expr::Binary(left, op, right) => {
let left_ty = self.infer_expr(left, span);
let right_ty = self.infer_expr(right, span);
match op {
BinOp::Add | BinOp::Sub | BinOp::Mul | BinOp::Div | BinOp::Mod => {
if left_ty.is_numeric() && right_ty.is_numeric() {
if matches!(left_ty, Type::Float) || matches!(right_ty, Type::Float) {
Type::Float
} else {
Type::Int
}
} else if matches!(op, BinOp::Add)
&& (left_ty.is_compatible(&Type::Str) || right_ty.is_compatible(&Type::Str))
{
Type::Str
} else {
self.errors.push(TypeError::TypeMismatch {
expected: "numeric".to_string(),
got: format!("{} and {}", left_ty.display(), right_ty.display()),
span: span.clone(),
});
Type::Unknown
}
}
BinOp::Eq | BinOp::NotEq | BinOp::Lt | BinOp::Gt | BinOp::LtEq | BinOp::GtEq => {
Type::Bool
}
BinOp::And | BinOp::Or => {
if !left_ty.is_compatible(&Type::Bool) {
self.errors.push(TypeError::TypeMismatch {
expected: "bool".to_string(),
got: left_ty.display(),
span: span.clone(),
});
}
if !right_ty.is_compatible(&Type::Bool) {
self.errors.push(TypeError::TypeMismatch {
expected: "bool".to_string(),
got: right_ty.display(),
span: span.clone(),
});
}
Type::Bool
}
BinOp::PathJoin => Type::Path,
BinOp::NullCoalesce => {
if let Type::Optional(inner) = left_ty {
if inner.is_compatible(&right_ty) {
*inner
} else {
right_ty
}
} else {
left_ty
}
}
}
}
Expr::Unary(op, expr) => {
let ty = self.infer_expr(expr, span);
match op {
UnaryOp::Neg => {
if !ty.is_numeric() {
self.errors.push(TypeError::TypeMismatch {
expected: "numeric".to_string(),
got: ty.display(),
span: span.clone(),
});
}
ty
}
UnaryOp::Not => {
if !ty.is_compatible(&Type::Bool) {
self.errors.push(TypeError::TypeMismatch {
expected: "bool".to_string(),
got: ty.display(),
span: span.clone(),
});
}
Type::Bool
}
}
}
Expr::Call(callee, args) => {
// Check for built-in functions first (before inferring callee type)
if let Expr::Ident(name) = callee.as_ref() {
let builtin_ty = self.infer_builtin_call(name, args, span);
if builtin_ty != Type::Unknown {
return builtin_ty;
}
}
let callee_ty = self.infer_expr(callee, span);
match callee_ty {
Type::Function(params, ret) => {
if params.len() != args.len() {
self.errors.push(TypeError::WrongArity {
expected: params.len(),
got: args.len(),
span: span.clone(),
});
}
for (arg, param_ty) in args.iter().zip(params.iter()) {
let arg_ty = self.infer_expr(arg, span);
if !arg_ty.is_compatible(param_ty) {
self.errors.push(TypeError::TypeMismatch {
expected: param_ty.display(),
got: arg_ty.display(),
span: span.clone(),
});
}
}
*ret
}
Type::Unknown | Type::Any => Type::Any,
_ => {
self.errors.push(TypeError::NotCallable(callee_ty.display(), span.clone()));
Type::Unknown
}
}
}
Expr::MethodCall(obj, method, args) => {
let obj_ty = self.infer_expr(obj, span);
match obj_ty {
Type::Struct(ref st) => {
if let Some(ft) = st.methods.get(method) {
for (arg, (_, param_ty)) in args.iter().zip(ft.params.iter().skip(1)) {
let arg_ty = self.infer_expr(arg, span);
if !arg_ty.is_compatible(param_ty) {
self.errors.push(TypeError::TypeMismatch {
expected: param_ty.display(),
got: arg_ty.display(),
span: span.clone(),
});
}
}
ft.return_type.clone()
} else {
self.errors.push(TypeError::FieldNotFound {
ty: st.name.clone(),
field: method.clone(),
span: span.clone(),
});
Type::Unknown
}
}
Type::List(_) => self.infer_list_method(method, args, span),
Type::Str => self.infer_str_method(method, args, span),
_ => Type::Any,
}
}
Expr::Field(obj, field) => {
let obj_ty = self.infer_expr(obj, span);
match obj_ty {
Type::Struct(st) => {
if let Some(field_ty) = st.fields.get(field) {
field_ty.clone()
} else {
self.errors.push(TypeError::FieldNotFound {
ty: st.name.clone(),
field: field.clone(),
span: span.clone(),
});
Type::Unknown
}
}
_ => Type::Any,
}
}
Expr::Index(obj, idx) => {
let obj_ty = self.infer_expr(obj, span);
let idx_ty = self.infer_expr(idx, span);
match obj_ty {
Type::List(inner) => {
if !idx_ty.is_compatible(&Type::Int) {
self.errors.push(TypeError::TypeMismatch {
expected: "int".to_string(),
got: idx_ty.display(),
span: span.clone(),
});
}
*inner
}
Type::Str => Type::Str,
_ => Type::Any,
}
}
Expr::List(items) => {
if items.is_empty() {
Type::List(Box::new(Type::Any))
} else {
let first_ty = self.infer_expr(&items[0], span);
for item in items.iter().skip(1) {
let item_ty = self.infer_expr(item, span);
if !item_ty.is_compatible(&first_ty) {
self.errors.push(TypeError::TypeMismatch {
expected: first_ty.display(),
got: item_ty.display(),
span: span.clone(),
});
}
}
Type::List(Box::new(first_ty))
}
}
Expr::EnumVariant(enum_name, _variant) => {
if let Some(et) = self.env.enums.get(enum_name) {
Type::Enum(et.clone())
} else {
self.errors.push(TypeError::UndefinedType(enum_name.clone(), span.clone()));
Type::Unknown
}
}
Expr::StructInit(struct_name, fields) => {
if let Some(st) = self.env.structs.get(struct_name).cloned() {
for (field_name, field_expr) in fields {
if let Some(expected_ty) = st.fields.get(field_name) {
let actual_ty = self.infer_expr(field_expr, span);
if !actual_ty.is_compatible(expected_ty) {
self.errors.push(TypeError::TypeMismatch {
expected: expected_ty.display(),
got: actual_ty.display(),
span: span.clone(),
});
}
} else {
self.errors.push(TypeError::FieldNotFound {
ty: struct_name.clone(),
field: field_name.clone(),
span: span.clone(),
});
}
}
Type::Struct(st)
} else {
self.errors.push(TypeError::UndefinedType(struct_name.clone(), span.clone()));
Type::Unknown
}
}
Expr::If(cond, then_expr, else_expr) => {
let cond_ty = self.infer_expr(cond, span);
if !cond_ty.is_compatible(&Type::Bool) {
self.errors.push(TypeError::TypeMismatch {
expected: "bool".to_string(),
got: cond_ty.display(),
span: span.clone(),
});
}
let then_ty = self.infer_expr(then_expr, span);
if let Some(else_expr) = else_expr {
let else_ty = self.infer_expr(else_expr, span);
if then_ty.is_compatible(&else_ty) {
then_ty
} else {
Type::Union(vec![then_ty, else_ty])
}
} else {
Type::Optional(Box::new(then_ty))
}
}
Expr::Lambda(params, body) => {
self.env.push_scope();
let param_types: Vec<Type> = params
.iter()
.map(|p| {
let ty = self.resolve_type(&p.ty);
self.env.define(p.name.clone(), ty.clone());
ty
})
.collect();
let return_ty = self.infer_expr(body, span);
self.env.pop_scope();
Type::Function(param_types, Box::new(return_ty))
}
Expr::Await(expr) => {
let ty = self.infer_expr(expr, span);
ty
}
Expr::Path(left, right) => {
self.infer_expr(left, span);
self.infer_expr(right, span);
Type::Path
}
Expr::HomePath(_) => Type::Path,
Expr::Interpolated(parts) => {
for part in parts {
if let InterpolatedPart::Expr(expr) = part {
self.infer_expr(expr, span);
}
}
Type::Str
}
}
}
fn infer_builtin_call(
&mut self,
name: &str,
args: &[Expr],
span: &std::ops::Range<usize>,
) -> Type {
match name {
"map" | "filter" => {
if args.len() >= 1 {
let list_ty = self.infer_expr(&args[0], span);
if let Type::List(inner) = list_ty {
if name == "filter" {
return Type::List(inner);
}
return Type::List(Box::new(Type::Any));
}
}
Type::List(Box::new(Type::Any))
}
"fold" => Type::Any,
"len" => Type::Int,
"first" | "last" => {
if args.len() >= 1 {
let list_ty = self.infer_expr(&args[0], span);
if let Type::List(inner) = list_ty {
return Type::Optional(inner);
}
}
Type::Optional(Box::new(Type::Any))
}
"contains" => Type::Bool,
"join" | "upper" | "lower" | "trim" | "replace" | "format" => Type::Str,
"split" => Type::List(Box::new(Type::Str)),
"starts_with" | "ends_with" => Type::Bool,
"read_file" | "read_file_lines" => Type::Str,
"file_exists" | "dir_exists" | "is_symlink" => Type::Bool,
"list_dir" | "glob" | "walk_dir" => Type::List(Box::new(Type::Path)),
"home" | "config_dir" | "config_path" | "data_dir" | "cache_dir" | "temp_dir" | "temp_file" => Type::Path,
"path_join" | "path_parent" | "path_filename" | "path_extension" | "read_link" => Type::Path,
"fetch" | "fetch_json" | "fetch_bytes" | "post" | "post_json" => Type::Any,
"download" => Type::Bool,
"exec" | "shell" => Type::Str,
"exec_with_status" => Type::Int,
"which" => Type::Optional(Box::new(Type::Path)),
"to_json" | "to_toml" | "to_yaml" => Type::Str,
"from_json" | "from_toml" | "from_yaml" => Type::Any,
"hash_file" | "hash_str" => Type::Str,
"encrypt_age" | "decrypt_age" => Type::Str,
"env" => Type::Optional(Box::new(Type::Str)),
"unwrap" => {
if args.len() >= 1 {
let opt_ty = self.infer_expr(&args[0], span);
if let Type::Optional(inner) = opt_ty {
return *inner;
}
}
Type::Any
}
"unwrap_or" => {
if args.len() >= 2 {
self.infer_expr(&args[1], span)
} else {
Type::Any
}
}
"is_some" | "is_none" => Type::Bool,
"all" | "race" => Type::Any,
"seq" | "batch" => {
if args.len() >= 1 {
self.infer_expr(&args[0], span)
} else {
Type::Any
}
}
"flatten" | "concat" | "unique" | "sort" | "reverse" => {
if args.len() >= 1 {
self.infer_expr(&args[0], span)
} else {
Type::List(Box::new(Type::Any))
}
}
"zip" | "enumerate" => Type::List(Box::new(Type::Any)),
"sort_by" => {
if args.len() >= 1 {
self.infer_expr(&args[0], span)
} else {
Type::List(Box::new(Type::Any))
}
}
// Debug/print functions return None
"print" | "println" => Type::None,
"dbg" => {
// dbg returns the last argument for chaining
if let Some(last) = args.last() {
self.infer_expr(last, span)
} else {
Type::None
}
}
// Not a builtin - return Unknown so normal lookup continues
_ => Type::Unknown,
}
}
fn infer_list_method(
&mut self,
method: &str,
_args: &[Expr],
_span: &std::ops::Range<usize>,
) -> Type {
match method {
"len" => Type::Int,
"first" | "last" => Type::Optional(Box::new(Type::Any)),
"contains" => Type::Bool,
"map" | "filter" | "sort" | "reverse" | "unique" => Type::List(Box::new(Type::Any)),
"fold" => Type::Any,
"join" => Type::Str,
_ => Type::Any,
}
}
fn infer_str_method(
&mut self,
method: &str,
_args: &[Expr],
_span: &std::ops::Range<usize>,
) -> Type {
match method {
"len" => Type::Int,
"upper" | "lower" | "trim" | "replace" => Type::Str,
"split" => Type::List(Box::new(Type::Str)),
"starts_with" | "ends_with" | "contains" => Type::Bool,
_ => Type::Any,
}
}
fn resolve_type(&self, ty: &TypeAnnotation) -> Type {
match ty {
TypeAnnotation::Simple(name) => match name.as_str() {
"int" => Type::Int,
"float" => Type::Float,
"str" => Type::Str,
"bool" => Type::Bool,
"path" => Type::Path,
"any" => Type::Any,
_ => {
if let Some(st) = self.env.structs.get(name) {
Type::Struct(st.clone())
} else if let Some(et) = self.env.enums.get(name) {
Type::Enum(et.clone())
} else {
Type::Unknown
}
}
},
TypeAnnotation::List(inner) => Type::List(Box::new(self.resolve_type(inner))),
TypeAnnotation::Optional(inner) => Type::Optional(Box::new(self.resolve_type(inner))),
TypeAnnotation::Function(params, ret) => Type::Function(
params.iter().map(|p| self.resolve_type(p)).collect(),
Box::new(self.resolve_type(ret)),
),
TypeAnnotation::Union(types) => {
Type::Union(types.iter().map(|t| self.resolve_type(t)).collect())
}
TypeAnnotation::Literal(lit) => match lit {
Literal::Str(_) => Type::Str,
Literal::Int(_) => Type::Int,
Literal::Float(_) => Type::Float,
Literal::Bool(_) => Type::Bool,
Literal::None => Type::None,
},
}
}
}
impl Default for TypeChecker {
fn default() -> Self {
Self::new()
}
}

View file

@ -0,0 +1,192 @@
//! Type system for the doot language.
use std::collections::HashMap;
/// Runtime and static types in doot.
#[derive(Clone, Debug, PartialEq)]
pub enum Type {
Int,
Float,
Str,
Bool,
Path,
None,
List(Box<Type>),
Optional(Box<Type>),
Function(Vec<Type>, Box<Type>),
Struct(StructType),
Enum(EnumType),
Union(Vec<Type>),
Any,
Unknown,
}
impl Type {
/// Returns true if this is an int or float type.
pub fn is_numeric(&self) -> bool {
matches!(self, Type::Int | Type::Float)
}
/// Checks if this type can be used where `other` is expected.
pub fn is_compatible(&self, other: &Type) -> bool {
match (self, other) {
(Type::Any, _) | (_, Type::Any) => true,
(Type::Unknown, _) | (_, Type::Unknown) => true,
(Type::Int, Type::Int) => true,
(Type::Float, Type::Float) => true,
(Type::Int, Type::Float) | (Type::Float, Type::Int) => true,
(Type::Str, Type::Str) => true,
(Type::Str, Type::Path) | (Type::Path, Type::Str) => true,
(Type::Path, Type::Path) => true,
(Type::Bool, Type::Bool) => true,
(Type::None, Type::None) => true,
(Type::None, Type::Optional(_)) | (Type::Optional(_), Type::None) => true,
(Type::List(a), Type::List(b)) => a.is_compatible(b),
(Type::Optional(a), Type::Optional(b)) => a.is_compatible(b),
(Type::Optional(a), b) => a.is_compatible(b),
(a, Type::Optional(b)) => a.is_compatible(b),
(Type::Function(a_params, a_ret), Type::Function(b_params, b_ret)) => {
a_params.len() == b_params.len()
&& a_params.iter().zip(b_params.iter()).all(|(a, b)| a.is_compatible(b))
&& a_ret.is_compatible(b_ret)
}
(Type::Struct(a), Type::Struct(b)) => a.name == b.name,
(Type::Enum(a), Type::Enum(b)) => a.name == b.name,
(Type::Union(types), other) | (other, Type::Union(types)) => {
types.iter().any(|t| t.is_compatible(other))
}
_ => false,
}
}
/// Returns a human-readable representation of this type.
pub fn display(&self) -> String {
match self {
Type::Int => "int".to_string(),
Type::Float => "float".to_string(),
Type::Str => "str".to_string(),
Type::Bool => "bool".to_string(),
Type::Path => "path".to_string(),
Type::None => "none".to_string(),
Type::List(inner) => format!("[{}]", inner.display()),
Type::Optional(inner) => format!("{}?", inner.display()),
Type::Function(params, ret) => {
let params_str = params.iter().map(|p| p.display()).collect::<Vec<_>>().join(", ");
format!("fn({}) -> {}", params_str, ret.display())
}
Type::Struct(s) => s.name.clone(),
Type::Enum(e) => e.name.clone(),
Type::Union(types) => types.iter().map(|t| t.display()).collect::<Vec<_>>().join(" | "),
Type::Any => "any".to_string(),
Type::Unknown => "unknown".to_string(),
}
}
}
/// Struct type with fields and methods.
#[derive(Clone, Debug, PartialEq)]
pub struct StructType {
pub name: String,
pub fields: HashMap<String, Type>,
pub methods: HashMap<String, FunctionType>,
}
/// Enum type with named variants.
#[derive(Clone, Debug, PartialEq)]
pub struct EnumType {
pub name: String,
pub variants: HashMap<String, Option<Vec<Type>>>,
}
/// Function signature type.
#[derive(Clone, Debug, PartialEq)]
pub struct FunctionType {
pub params: Vec<(String, Type)>,
pub return_type: Type,
pub is_async: bool,
}
/// Type environment with scoped bindings.
#[derive(Clone, Debug, Default)]
pub struct TypeEnv {
scopes: Vec<HashMap<String, Type>>,
pub structs: HashMap<String, StructType>,
pub enums: HashMap<String, EnumType>,
pub functions: HashMap<String, FunctionType>,
}
impl TypeEnv {
/// Creates a new type environment with built-in types.
pub fn new() -> Self {
let mut env = Self {
scopes: vec![HashMap::new()],
structs: HashMap::new(),
enums: HashMap::new(),
functions: HashMap::new(),
};
env.register_builtins();
env
}
fn register_builtins(&mut self) {
let mut os_variants = HashMap::new();
os_variants.insert("Linux".to_string(), None);
os_variants.insert("MacOS".to_string(), None);
os_variants.insert("Windows".to_string(), None);
self.enums.insert(
"Os".to_string(),
EnumType {
name: "Os".to_string(),
variants: os_variants,
},
);
self.define("os".to_string(), Type::Enum(self.enums["Os"].clone()));
self.define("distro".to_string(), Type::Str);
self.define("pkg_manager".to_string(), Type::Str);
self.define("hostname".to_string(), Type::Str);
self.define("arch".to_string(), Type::Str);
}
/// Enters a new scope.
pub fn push_scope(&mut self) {
self.scopes.push(HashMap::new());
}
/// Exits the current scope.
pub fn pop_scope(&mut self) {
self.scopes.pop();
}
/// Defines a variable in the current scope.
pub fn define(&mut self, name: String, ty: Type) {
if let Some(scope) = self.scopes.last_mut() {
scope.insert(name, ty);
}
}
/// Looks up a variable by name through all scopes.
pub fn lookup(&self, name: &str) -> Option<&Type> {
for scope in self.scopes.iter().rev() {
if let Some(ty) = scope.get(name) {
return Some(ty);
}
}
None
}
/// Registers a struct type.
pub fn define_struct(&mut self, name: String, st: StructType) {
self.structs.insert(name, st);
}
/// Registers an enum type.
pub fn define_enum(&mut self, name: String, et: EnumType) {
self.enums.insert(name, et);
}
/// Registers a function type.
pub fn define_function(&mut self, name: String, ft: FunctionType) {
self.functions.insert(name, ft);
}
}