Compare commits

..

2 Commits

4 changed files with 153 additions and 22 deletions

View File

@ -1,4 +1,5 @@
{
"license": "UNLICENSED",
"scripts": {
"dev": "echo \"Hello from npm\""
}

View File

@ -1,4 +1,5 @@
{
"license": "UNLICENSED",
"scripts": {
"dev": "echo \"Hello from pnpm\""
}

View File

@ -1,4 +1,5 @@
{
"license": "UNLICENSED",
"scripts": {
"dev": "echo \"Hello from yarn\""
}

View File

@ -2,6 +2,10 @@ use std::collections::HashMap;
use std::fmt::Debug;
use std::fs::{canonicalize, read_to_string};
use std::path::{Path, PathBuf};
use std::process::Command;
use std::thread;
use std::thread::JoinHandle;
use std::time::Instant;
use clap::Args;
use glob::glob;
use globset::{Glob, GlobSetBuilder};
@ -18,6 +22,9 @@ pub struct Arguments {
pub fn run (arguments: &Arguments) -> Result<(), String> {
let Arguments { entry, task_name } = arguments;
// Start the timer
let start_time = Instant::now();
// Resolve the entry path
let entry_config_path: PathBuf = resolve_config_path(entry)?;
@ -35,24 +42,143 @@ pub fn run (arguments: &Arguments) -> Result<(), String> {
let config_structure: ConfigStructure = resolve_config_structure(&entry_config_path, configs)?;
// Gather the tasks from the config
let task_structure: TaskStructure = resolve_task_structure(config_structure, task_name);
eprintln!("task_structure = {:?}", task_structure);
let ordered_tasks: OrderedTasks = resolve_task_order(config_structure, task_name)?;
// Run the commands, one by one
// > In the future this is configurable on the rask level and maybe on the config file level
// > Initially it fails the whole command if one task fails, but will also be configurable in the future
// let task_exit: TaskExit = run_task_structure(task_structure);
let task_exit: TaskExit = run_task_structure(&ordered_tasks)?;
let task_amount = ordered_tasks.len();
let execution_time = start_time.elapsed().as_secs_f32();
let formatted_execution_time = (execution_time * 100.0).round() / 100.0;
match task_exit {
TaskExit::SUCCESS => println!("{}", format!("Successfully executed {} tasks within {} seconds", task_amount, formatted_execution_time)),
TaskExit::FAILURE => println!("{}", format!("Failed after executing {} tasks within {} seconds", task_amount, formatted_execution_time)),
}
Ok(())
}
#[derive(Debug, Clone)]
struct TaskStructure {
fn run_task_structure(ordered_tasks: &OrderedTasks) -> Result<TaskExit, String> {
let highest_order = find_highest_order(&ordered_tasks)?;
for order in (0..=highest_order).rev() {
match run_task_order(&ordered_tasks, order) {
Ok(_) => {}
Err(err) => {
println!("{}", err);
return Ok(TaskExit::FAILURE);
}
}
}
Ok(TaskExit::SUCCESS)
}
fn resolve_task_structure(config_structure: ConfigStructure, task_name: &String) -> TaskStructure {
todo!()
fn run_task_order(ordered_tasks: &OrderedTasks, order: u64) -> Result<(), String> {
let mut tasks: Vec<&Task> = vec![];
for ordered_task in ordered_tasks {
let OrderedTask { task, order: task_order } = ordered_task;
if *task_order == order {
tasks.push(task);
}
}
let mut task_threads: Vec<JoinHandle<bool>> = vec![];
for task in tasks {
let task_thread = execute_task(task)?;
task_threads.push(task_thread);
}
for task_thread in task_threads {
if let Ok(success) = task_thread.join() {
if !success {
return Err("Command execution failed.".to_string());
}
} else {
return Err("Thread panicked.".to_string());
}
}
Ok(())
}
fn execute_task(task: &Task) -> Result<JoinHandle<bool>, String> {
let task_thread = thread::spawn({
let task = task.clone();
move || {
let status = Command::new("sh")
.arg("-c")
.arg(&task.command)
.current_dir(&task.directory)
.status()
.expect("Failed to execute command");
status.success()
}
});
Ok(task_thread)
}
fn find_highest_order(ordered_tasks: &OrderedTasks) -> Result<u64, String> {
let mut highest_order: u64 = 0u64;
for ordered_task in ordered_tasks {
if ordered_task.order > highest_order {
highest_order = ordered_task.order;
}
}
Ok(highest_order)
}
#[derive(Debug, Clone)]
enum TaskExit {
SUCCESS,
FAILURE
}
#[derive(Debug, Clone)]
struct Task {
command: String,
directory: PathBuf,
}
#[derive(Debug, Clone)]
struct OrderedTask {
task: Task,
order: u64
}
type OrderedTasks = Vec<OrderedTask>;
fn resolve_task_order(config_structure: ConfigStructure, task_name: &String) -> Result<OrderedTasks, String> {
let mut ordered_tasks: OrderedTasks = vec![];
order_tasks(&mut ordered_tasks, config_structure, task_name, 0);
Ok(ordered_tasks)
}
fn order_tasks(ordered_tasks: &mut OrderedTasks, config_structure: ConfigStructure, task_name: &String, index: u64) {
let ConfigStructure { config, children } = config_structure;
let Config { tasks, dir_path, .. } = config;
match tasks.get(task_name) {
None => {}
Some(&ref config_task) => ordered_tasks.push(OrderedTask{
task: Task {
command: config_task.content.clone(),
directory: dir_path,
},
order: index
})
}
for child in children {
order_tasks(ordered_tasks, child, task_name, index+1);
}
}
#[derive(Debug, Clone)]
@ -65,7 +191,7 @@ fn resolve_config_structure(entry_config_path: &PathBuf, configs: Vec<Config>) -
let mut path_map: HashMap<PathBuf, Config> = HashMap::new();
for config in configs {
path_map.insert(config.clone().path, config);
path_map.insert(config.clone().file_path, config);
}
let config_structure: ConfigStructure = construct_config_structure(entry_config_path, &path_map)?;
@ -147,7 +273,8 @@ struct Config {
name: String,
task_engine: TaskEngine,
tasks: HashMap<String, ConfigTask>,
path: PathBuf,
file_path: PathBuf,
dir_path: PathBuf,
directories: ConfigDirectories,
}
@ -163,17 +290,17 @@ fn parse_config_files(config_files: Vec<ConfigFile>) -> Result<Vec<Config>, Stri
}
fn parse_config_file(config_file: ConfigFile) -> Result<Config, String> {
let ConfigFile { name, tasks: config_file_tasks, _file_path: file_path, _dir_path: dir_path, directories, task_engine } = config_file;
let ConfigFile { name, tasks: config_file_tasks, __file_path: file_path, __dir_path: dir_path, directories, task_engine } = config_file;
let tasks: ConfigTasks = match task_engine {
TaskEngine::COMPOSER => parse_composer_tasks(dir_path)?,
TaskEngine::NPM => parse_node_tasks(dir_path, "npm")?,
TaskEngine::YARN => parse_node_tasks(dir_path, "yarn")?,
TaskEngine::PNPM => parse_node_tasks(dir_path, "pnmp")?,
TaskEngine::COMPOSER => parse_composer_tasks(&dir_path)?,
TaskEngine::NPM => parse_node_tasks(&dir_path, "npm")?,
TaskEngine::YARN => parse_node_tasks(&dir_path, "yarn")?,
TaskEngine::PNPM => parse_node_tasks(&dir_path, "pnpm")?,
TaskEngine::NONE => parse_config_tasks(config_file_tasks)?,
};
let config: Config = Config { name, tasks, path: file_path, directories, task_engine };
let config: Config = Config { name, tasks, file_path, dir_path, directories, task_engine };
Ok(config)
}
@ -184,7 +311,7 @@ struct PackageJsonFile {
scripts: HashMap<String, String>,
}
fn parse_node_tasks(dir_path: PathBuf, prefix: &str) -> Result<ConfigTasks, String> {
fn parse_node_tasks(dir_path: &PathBuf, prefix: &str) -> Result<ConfigTasks, String> {
let mut file_path = dir_path.clone();
file_path.push("package.json");
let content = read_file_content(file_path)?;
@ -215,7 +342,7 @@ struct ComposerJsonFile {
scripts: HashMap<String, ComposerJsonScriptValue>,
}
fn parse_composer_tasks(dir_path: PathBuf) -> Result<ConfigTasks, String> {
fn parse_composer_tasks(dir_path: &PathBuf) -> Result<ConfigTasks, String> {
let mut file_path = dir_path.clone();
file_path.push("composer.json");
let content = read_file_content(file_path)?;
@ -265,7 +392,7 @@ fn discover_config_paths(path: &PathBuf) -> Result<Vec<PathBuf>, String> {
// Read config
let mut path_stack: Vec<PathBuf> = vec![path.clone()];
while !path_stack.is_empty() {
let ConfigFile { directories, _file_path, .. } = read_config_file(path_stack.pop().unwrap())?;
let ConfigFile { directories, __file_path: _file_path, .. } = read_config_file(path_stack.pop().unwrap())?;
// Extract directories
let config_directory = _file_path.parent().ok_or("Failed to get parent directory")?;
@ -318,10 +445,11 @@ struct ConfigFile {
directories: ConfigDirectories,
#[serde(default)]
tasks: ConfigFileTasks,
// The following fields are not part of the yaml file.
#[serde(default)]
_file_path: PathBuf,
__file_path: PathBuf,
#[serde(default)]
_dir_path: PathBuf,
__dir_path: PathBuf,
}
fn read_config_file(path: PathBuf) -> Result<ConfigFile, String> {
@ -329,8 +457,8 @@ fn read_config_file(path: PathBuf) -> Result<ConfigFile, String> {
let mut config_file: ConfigFile = serde_yaml::from_str(&content).expect(format!("Failed to parse YAML from \"{:?}\"", path).as_str());
config_file._file_path = path.clone();
config_file._dir_path = path.parent().unwrap().to_path_buf();
config_file.__file_path = path.clone();
config_file.__dir_path = path.parent().unwrap().to_path_buf();
Ok(config_file)
}