Major code clean up and overhaul

This commit is contained in:
Ian Wijma 2024-04-03 23:11:53 +11:00
parent 7d7db72a7b
commit 43dfbfd016
4 changed files with 195 additions and 181 deletions

View File

@ -1,41 +1,215 @@
use std::collections::HashMap;
use std::fmt::Debug;
use std::fs::{canonicalize, read_to_string};
use std::path::{Path, PathBuf};
use clap::Args;
use crate::utils::config::{parse_config, validate_config};
use crate::utils::file_resolvers::resolve_configuration_file;
use crate::utils::tasks::run_task;
use glob::glob;
use serde::Deserialize;
#[derive(Args, Debug)]
pub struct Arguments {
#[arg()]
command: String,
#[arg(long, default_value = ".")]
#[arg(help = "Which task to run")]
task_name: String,
#[arg(long, default_value = ".", help = "Which directory to use as entry, defaults to the current directory")]
entry: String,
}
pub fn run (arguments: &Arguments) -> Result<(), String> {
let Arguments { entry, command } = arguments;
let Arguments { entry, task_name: _task_name } = arguments;
let target = resolve_configuration_file(entry)?;
// Resolve the entry path
let entry_config_path: PathBuf = resolve_config_path(entry)?;
println!("entry_config_path: {:?}", entry_config_path);
let config = parse_config(&target)?;
// Discover all config paths
let config_paths: Vec<PathBuf> = discover_config_paths(entry_config_path)?;
println!("config_paths: {:?}", config_paths);
match validate_config(config.clone()) {
Ok(_) => {}
Err(err) => return Err(err)
}
// Parse config file content
let config_files: Vec<ConfigFile> = read_config_files(config_paths)?;
println!("config_files: {:?}", config_files);
for config in config.clone().iter() {
match config.get_task(command) {
None => {}
Some(task) => {
match run_task(&task) {
Ok(_) => {}
Err(err) => return Err(err)
}
}
}
}
// Parse config files
let configs: Vec<Config> = parse_config_files(config_files)?;
println!("configs: {:?}", configs);
println!("{:?}", config);
// Resolve dependencies based on the directory structure
// (In the future this will be configurable based on a dependency config field)
// let config_structure: ConfigStructure = resolve_config_structure(configs);
// Gather the tasks from the config
// let task_structure: TaskStructure = resolve_task_structure(config_structure, task_name);
// Run the commands, one by one
// > In the future this is configurable on the rask level and maybe on the config file level
// > Initially it fails the whole command if one task fails, but will also be configurable in the future
// let task_exit: TaskExit = run_task_structure(task_structure);
Ok(())
}
#[derive(Debug, Clone)]
enum TaskType {
SHELL
}
#[derive(Debug, Clone)]
#[allow(dead_code)]
struct ConfigTask {
task_type: TaskType,
content: String
}
type ConfigTasks = HashMap<String, ConfigTask>;
#[derive(Debug, Clone)]
#[allow(dead_code)]
struct Config {
name: String,
tasks: HashMap<String, ConfigTask>,
path: PathBuf,
}
fn parse_config_files(config_files: Vec<ConfigFile>) -> Result<Vec<Config>, String> {
let mut configs: Vec<Config> = vec![];
for config_file in config_files {
let config = parse_config_file(config_file)?;
configs.push(config);
}
Ok(configs)
}
fn parse_config_file(config_file: ConfigFile) -> Result<Config, String> {
let ConfigFile { name, tasks: config_file_tasks, _file_path: path, .. } = config_file;
let tasks = parse_config_tasks(config_file_tasks)?;
let config: Config = Config {name, tasks, path};
Ok(config)
}
fn parse_config_tasks(tasks: ConfigFileTasks) -> Result<ConfigTasks, String> {
let mut config_tasks: ConfigTasks = HashMap::new();
for (key, value) in tasks {
let config_task: ConfigTask = ConfigTask {
task_type: TaskType::SHELL,
content: value
};
config_tasks.insert(key, config_task);
}
Ok(config_tasks)
}
fn read_config_files(paths: Vec<PathBuf>) -> Result<Vec<ConfigFile>, String> {
let mut configs_files: Vec<ConfigFile> = vec![];
for path in paths {
let config_file = read_config_file(path)?;
configs_files.push(config_file);
}
Ok(configs_files)
}
fn discover_config_paths(path: PathBuf) -> Result<Vec<PathBuf>, String> {
let mut found_config_paths: Vec<PathBuf> = vec![path.clone()];
// Read config
let mut path_stack: Vec<PathBuf> = vec![path.clone()];
while !path_stack.is_empty() {
let ConfigFile { directories, _file_path, .. } = read_config_file(path_stack.pop().unwrap())?;
// Extract directories
let config_directory = _file_path.parent().ok_or("Failed to get parent directory")?;
for directory in directories {
let mut pattern: PathBuf = config_directory.to_path_buf();
pattern.push(&directory);
if !pattern.ends_with(".yaml") {
pattern.push("rask.yaml");
}
// Find config files based on the pattern in the directories value
let pattern_string: &str = pattern.to_str().unwrap();
for pattern_results in glob(pattern_string).map_err(|e| format!("Failed to read glob pattern: {}", e))? {
if let Ok(found_config_path) = pattern_results {
// Only add if the path was not already processed, preventing loops.
if !found_config_paths.contains(&found_config_path) {
found_config_paths.push(found_config_path.clone());
path_stack.push(found_config_path.clone());
}
}
}
}
}
Ok(found_config_paths)
}
fn read_file_content (path: PathBuf) -> Result<String, String> {
match read_to_string(path) {
Ok(content) => Ok(content),
Err(err) => Err(format!("Failed to read file: {}", err)),
}
}
type ConfigFileTasks = HashMap<String, String>;
#[derive(Debug, Deserialize, Default)]
struct ConfigFile {
name: String,
#[serde(default)]
directories: Vec<String>,
#[serde(default)]
tasks: ConfigFileTasks,
#[serde(default)]
_file_path: PathBuf,
}
fn read_config_file(path: PathBuf) -> Result<ConfigFile, String> {
let content = read_file_content(path.clone())?;
let mut config_file: ConfigFile = serde_yaml::from_str(&content).expect(format!("Failed to parse YAML from \"{:?}\"", path).as_str());
config_file._file_path = path.clone();
Ok(config_file)
}
fn resolve_config_path<P: AsRef<Path> + Debug + Clone + Copy>(path: P) -> Result<PathBuf, String> {
let full_path = match canonicalize(path) {
Ok(full_path) => full_path,
Err(_) => return Err(format!("Target does not exists: {:?}", path.clone()))
};
if full_path.is_dir() {
let config_file = find_config_file(full_path)?;
return Ok(config_file)
}
Ok(full_path)
}
const CONFIG_FILENAMES: [&str; 1] = ["rask.yaml"];
fn find_config_file(directory_path: PathBuf) -> Result<PathBuf, String> {
if !directory_path.is_dir() {
return Err(format!("\"{:?}\" is not a directory", directory_path))
}
for filename in CONFIG_FILENAMES {
let mut possible_config_file = directory_path.clone();
possible_config_file.push(filename);
match possible_config_file.exists() {
true => return Ok(possible_config_file),
false => {}
}
}
Err(format!("Unable to find a config file (\"{:?}\") in {:?}", CONFIG_FILENAMES, directory_path))
}

View File

@ -1,128 +0,0 @@
use std::path::PathBuf;
use std::fs::File;
use std::io::BufReader;
use serde::Deserialize;
use std::collections::HashMap;
pub fn validate_config(config: Config) -> Result<bool, String> {
let mut names: Vec<String> = vec![];
for config in config.iter() {
let Config { name, path, .. } = config;
if names.contains(&name) {
return Err(format!("Duplicate config name {} found: {:?}", name, path));
}
names.push(name);
}
Ok(true)
}
#[derive(Debug, Deserialize, Default)]
struct ConfigFile {
name: String,
#[serde(default)]
directories: Vec<String>,
#[serde(default)]
tasks: HashMap<String, String>,
}
fn read_config_file(path_buf: &PathBuf) -> Result<ConfigFile, String> {
let file = File::open(path_buf).expect("Failed to read file");
let reader = BufReader::new(file);
let config: ConfigFile = serde_yaml::from_reader(reader).expect("Failed to parse YAML");
Ok(config)
}
#[derive(Debug, Clone)]
pub struct ConfigTask {
tag: String,
command: String
}
#[derive(Debug, Clone)]
pub struct Config {
name: String,
tasks: Vec<ConfigTask>,
path: PathBuf,
sub_configs: Vec<Config>,
}
impl Config {
pub fn iter(self) -> ConfigIterator {
return ConfigIterator::new(self.clone());
}
pub fn get_task(&self, task_name: &String) -> Option<String> {
for task in self.clone().tasks {
if task.tag == *task_name {
return Some(task.command)
}
}
None
}
}
pub struct ConfigIterator {
stack: Vec<Config>
}
impl ConfigIterator {
pub fn new(config: Config) -> ConfigIterator {
let mut stack = vec![config];
ConfigIterator{ stack }
}
}
impl Iterator for ConfigIterator {
type Item = Config;
fn next(&mut self) -> Option<Self::Item> {
let next_config = self.stack.pop()?;
self.stack.extend(next_config.sub_configs.iter().rev().map(|sub_config| sub_config.clone()));
Some(next_config)
}
}
pub fn parse_config(path: &PathBuf) -> Result<Config, String> {
let config_file = read_config_file(path)?;
let name = config_file.name;
let tasks = config_file.tasks
.iter()
.map(|(tag, command)| ConfigTask{tag: tag.clone(), command: command.clone()})
.collect();
let mut sub_configs: Vec<Config> = Vec::new();
let parent_dir = path.parent().ok_or("Failed to get parent directory")?;
for directory in config_file.directories {
let mut pattern: PathBuf = parent_dir.to_path_buf();
pattern.push(&directory);
if !pattern.ends_with(".yaml") {
pattern.push("rask.yaml");
}
for entry in glob::glob(pattern.to_str().unwrap()).map_err(|e| format!("Failed to read glob pattern: {}", e))? {
if let Ok(config_path) = entry {
let sub_config = parse_config(&config_path)?;
sub_configs.push(sub_config);
}
}
}
let config = Config{name, tasks, sub_configs, path: path.clone()};
Ok(config)
}

View File

@ -1,23 +0,0 @@
use std::path::{Path, PathBuf};
use std::fs::canonicalize;
const DEFAULT_FILENAME: &str = "rask.yaml";
pub fn resolve_configuration_file(target: &String) -> Result<PathBuf, String> {
let target_path = Path::new(target);
let mut target = match canonicalize(target_path) {
Ok(target) => target,
Err(_) => return Err(format!("Target does not exists: {:?}", target))
};
if target.is_dir() {
target.push(DEFAULT_FILENAME);
}
if !target.exists() {
return Err(format!("Target does not exists: {:?}", target))
}
Ok(target)
}

View File

@ -1,9 +0,0 @@
use std::process::Command;
pub fn run_task(task: &String) -> Result<(), String> {
let mut command = Command::new(task);
match command.spawn().unwrap().wait() {
Ok(_) => Ok(()),
Err(_) => Err(format!("Task failed: {}", task))
}
}