Added init command

General code cleanup
Fixed the Automatic task discovery, not discovering rask config tasks.
This commit is contained in:
Ian Wijma 2024-04-06 20:14:17 +11:00
parent d8addbdb7f
commit 4fb6c7b497
9 changed files with 118 additions and 32 deletions

View File

@ -2,3 +2,4 @@ name: nested/folder
tasks: tasks:
dev: echo 'Hello from nested/folder?' dev: echo 'Hello from nested/folder?'
meme: echo 'Meme from nested/folder?'

2
examples/init/.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
*
!.gitignore

51
src/commands/init.rs Normal file
View File

@ -0,0 +1,51 @@
use clap::Args;
use crate::utils::file::{ConfigFile, parse_path_string, write_config_file};
#[derive(Args, Debug)]
pub struct Arguments {
#[arg(long, default_value = ".", help = "Which directory to use as entry, defaults to the current directory")]
entry: String,
#[arg(help = "then name of the config file, defaults to the directory name", default_value = "")]
name: String,
}
pub fn execute(arguments: &Arguments) -> Result<(), String> {
let Arguments { entry, name } = arguments;
let mut path = parse_path_string(entry)?;
if path.is_dir() {
path.push("rask.yaml")
}
if path.exists() {
return Err(format!("Rask already initialised at {:?}", path));
}
let mut config_name = name.clone(); // Use clone to avoid modifying the original input
if config_name.is_empty() {
config_name = path.parent()
.unwrap()
.file_name()
.unwrap()
.to_str()
.unwrap()
.to_string();
}
let config_file: ConfigFile = ConfigFile {
name: config_name,
task_engine: Default::default(),
directories: vec![],
tasks: Default::default(),
__file_path: Default::default(),
__dir_path: Default::default(),
};
write_config_file(path.clone(), config_file)?;
// NOTE: We could improve the init command by adding a reverse search for a parent rake file
println!("Rask initialised: {:?}", path);
Ok(())
}

View File

@ -40,8 +40,8 @@ fn get_config_tasks(configs: &Vec<Config>) -> Result<Vec<String>, String> {
let mut tasks: Vec<String> = vec![]; let mut tasks: Vec<String> = vec![];
for config in configs { for config in configs {
for configTask in &config.tasks { for config_task in &config.tasks {
let ConfigTask { key, .. } = configTask; let ConfigTask { key, .. } = config_task;
if !tasks.contains(&key) { if !tasks.contains(&key) {
tasks.push(key.clone()); tasks.push(key.clone());
} }

View File

@ -1,2 +1,3 @@
pub mod run; pub mod run;
pub mod list; pub mod list;
pub mod init;

View File

@ -113,7 +113,7 @@ fn execute_task(task: &Task) -> Result<JoinHandle<bool>, String> {
.arg(command) .arg(command)
.current_dir(directory) .current_dir(directory)
.status() .status()
.expect("Failed to execute command"); .map_err(|err| err.to_string());
status.success() status.success()
} }
}); });

View File

@ -2,6 +2,7 @@ use std::process::exit;
use clap::{Parser, Subcommand}; use clap::{Parser, Subcommand};
use commands::run; use commands::run;
use commands::list; use commands::list;
use commands::init;
mod commands; mod commands;
mod utils; mod utils;
@ -10,6 +11,7 @@ mod utils;
enum Command { enum Command {
Run(run::Arguments), Run(run::Arguments),
List(list::Arguments), List(list::Arguments),
Init(init::Arguments),
} }
#[derive(Parser, Debug)] #[derive(Parser, Debug)]
@ -25,6 +27,7 @@ fn main() {
let result = match &arguments.command { let result = match &arguments.command {
Command::Run(arguments) => { run::execute(arguments) }, Command::Run(arguments) => { run::execute(arguments) },
Command::List(arguments) => { list::execute(arguments) }, Command::List(arguments) => { list::execute(arguments) },
Command::Init(arguments) => { init::execute(arguments) },
}; };
match result { match result {

View File

@ -1,6 +1,5 @@
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::fmt::Debug; use std::fmt::Debug;
use std::fs::canonicalize;
use std::collections::HashMap; use std::collections::HashMap;
use globset::{Glob, GlobSetBuilder}; use globset::{Glob, GlobSetBuilder};
use serde::Deserialize; use serde::Deserialize;
@ -172,7 +171,7 @@ fn parse_config_file(config_file: ConfigFile) -> Result<Config, String> {
TaskEngine::NPM => parse_package_json_tasks(&dir_path, TaskType::NPM)?, TaskEngine::NPM => parse_package_json_tasks(&dir_path, TaskType::NPM)?,
TaskEngine::YARN => parse_package_json_tasks(&dir_path, TaskType::YARN)?, TaskEngine::YARN => parse_package_json_tasks(&dir_path, TaskType::YARN)?,
TaskEngine::NONE => parse_config_tasks(config_file_tasks)?, TaskEngine::NONE => parse_config_tasks(config_file_tasks)?,
TaskEngine::AUTO => parse_discovered_tasks(&dir_path)?, TaskEngine::AUTO => parse_discovered_tasks(&dir_path, config_file_tasks)?,
}; };
let config: Config = Config { name, tasks, file_path, dir_path, directories }; let config: Config = Config { name, tasks, file_path, dir_path, directories };
@ -181,12 +180,11 @@ fn parse_config_file(config_file: ConfigFile) -> Result<Config, String> {
} }
const PACKAGE_JSON_FILE: &str = "package.json"; const PACKAGE_JSON_FILE: &str = "package.json";
const NPM_LOCK_FILE: &str = "package.lock";
const YARN_LOCK_FILE: &str = "yarn.lock"; const YARN_LOCK_FILE: &str = "yarn.lock";
const COMPOSER_JSON_FILE: &str = "composer.json"; const COMPOSER_JSON_FILE: &str = "composer.json";
fn parse_discovered_tasks(dir_path: &PathBuf) -> Result<ConfigTasks, String> { fn parse_discovered_tasks(dir_path: &PathBuf, config_file_tasks: ConfigFileTasks) -> Result<ConfigTasks, String> {
let mut config_tasks: ConfigTasks = vec![]; let mut config_tasks: ConfigTasks = parse_config_tasks(config_file_tasks)?;
// Gathering facts // Gathering facts
let has_composer_json = dir_path.join(COMPOSER_JSON_FILE).exists(); let has_composer_json = dir_path.join(COMPOSER_JSON_FILE).exists();
@ -200,7 +198,7 @@ fn parse_discovered_tasks(dir_path: &PathBuf) -> Result<ConfigTasks, String> {
} }
if has_package_json { if has_package_json {
let mut package_config_tasks: ConfigTasks; let package_config_tasks: ConfigTasks;
if has_yarn_lock { if has_yarn_lock {
package_config_tasks = parse_package_json_tasks(dir_path, TaskType::YARN)?; package_config_tasks = parse_package_json_tasks(dir_path, TaskType::YARN)?;
@ -331,10 +329,7 @@ fn get_config_glob_pattern(root_path: &Path, glob_pattern: &String) -> PathBuf {
} }
pub fn resolve_config_path<P: AsRef<Path> + Debug + Clone + Copy>(path: P) -> Result<PathBuf, String> { pub fn resolve_config_path<P: AsRef<Path> + Debug + Clone + Copy>(path: P) -> Result<PathBuf, String> {
let full_path = match canonicalize(path) { let full_path = file::parse_path_string(path)?;
Ok(full_path) => full_path,
Err(_) => return Err(format!("Target does not exists: {:?}", path.clone()))
};
if full_path.is_dir() { if full_path.is_dir() {
let config_file = find_config_file(full_path)?; let config_file = find_config_file(full_path)?;

View File

@ -1,7 +1,8 @@
use std::path::PathBuf; use std::path::{Path, PathBuf};
use std::fs::read_to_string; use std::fs::{canonicalize, read_to_string, write};
use std::collections::HashMap; use std::collections::HashMap;
use serde::Deserialize; use serde::{Deserialize, Serialize};
use std::fmt::Debug;
pub fn read_file_content (path: PathBuf) -> Result<String, String> { pub fn read_file_content (path: PathBuf) -> Result<String, String> {
match read_to_string(path) { match read_to_string(path) {
@ -10,10 +11,16 @@ pub fn read_file_content (path: PathBuf) -> Result<String, String> {
} }
} }
pub fn write_file_content(file_path: &PathBuf, content: &str) -> Result<(), String> {
write(file_path, content).map_err(|err| format!("Failed to write to file: {}", err))?;
Ok(())
}
pub fn read_json_file<T: for<'a> Deserialize<'a>>(file_path: &PathBuf) -> Result<T, String> { pub fn read_json_file<T: for<'a> Deserialize<'a>>(file_path: &PathBuf) -> Result<T, String> {
let content = read_file_content(file_path.clone())?; let content = read_file_content(file_path.clone())?;
let file_content: T = serde_json::from_str::<T>(&content).expect(format!("Failed to read the file: \"{:?}\"", file_path).as_str()); let file_content: T = serde_json::from_str::<T>(&content).map_err(|err| err.to_string());
Ok(file_content) Ok(file_content)
} }
@ -21,44 +28,57 @@ pub fn read_json_file<T: for<'a> Deserialize<'a>>(file_path: &PathBuf) -> Result
fn read_yaml_file<T: for<'a> Deserialize<'a>>(file_path: &PathBuf) -> Result<T, String> { fn read_yaml_file<T: for<'a> Deserialize<'a>>(file_path: &PathBuf) -> Result<T, String> {
let content = read_file_content(file_path.clone())?; let content = read_file_content(file_path.clone())?;
let file_content: T = serde_yaml::from_str::<T>(&content).expect(format!("Failed to read the file: \"{:?}\"", file_path).as_str()); let file_content: T = serde_yaml::from_str::<T>(&content).map_err(|err| err.to_string())?;
Ok(file_content) Ok(file_content)
} }
#[derive(Debug, Clone, Default, Deserialize)] pub fn write_yaml_file<T: Serialize + Debug>(file_path: &PathBuf, data: &T) -> Result<(), String> {
let yaml_content = serde_yaml::to_string(data).map_err(|err| err.to_string())?;
write_file_content(file_path, &yaml_content)
}
#[derive(Debug, Clone, Default, Deserialize, Serialize)]
#[serde(rename_all = "lowercase")]
pub enum TaskEngine { pub enum TaskEngine {
#[serde(rename = "composer")]
COMPOSER, COMPOSER,
#[serde(rename = "npm")]
NPM, NPM,
#[serde(rename = "yarn")]
YARN, YARN,
#[serde(rename = "none")]
NONE, NONE,
#[serde(rename = "auto")]
#[default] #[default]
AUTO, AUTO,
} }
pub type ConfigFileTasks = HashMap<String, String>; pub type ConfigFileTasks = HashMap<String, String>;
#[derive(Debug, Deserialize, Clone, Default)] #[derive(Debug, Clone, Default, Deserialize, Serialize)]
pub struct ConfigFile { pub struct ConfigFile {
pub(crate) name: String, pub(crate) name: String,
#[serde(default)] #[serde(default, skip_serializing_if = "is_auto_task_engine")]
pub(crate) task_engine: TaskEngine, pub(crate) task_engine: TaskEngine,
#[serde(default)] #[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) directories: Vec<String>, pub(crate) directories: Vec<String>,
#[serde(default)] #[serde(default, skip_serializing_if = "ConfigFileTasks::is_empty")]
pub(crate) tasks: ConfigFileTasks, pub(crate) tasks: ConfigFileTasks,
// The following fields are not part of the yaml file. // The following fields are not part of the yaml file.
#[serde(default)] #[serde(default, skip_serializing_if = "skip_path")]
pub(crate) __file_path: PathBuf, pub(crate) __file_path: PathBuf,
#[serde(default)] #[serde(default, skip_serializing_if = "skip_path")]
pub(crate) __dir_path: PathBuf, pub(crate) __dir_path: PathBuf,
} }
fn is_auto_task_engine(value: &TaskEngine) -> bool {
match value {
TaskEngine::AUTO => true,
_ => false,
}
}
fn skip_path(path: &PathBuf) -> bool {
path.to_str().map_or(true, |s| s.is_empty())
}
pub fn read_config_file(config_file_path: PathBuf) -> Result<ConfigFile, String> { pub fn read_config_file(config_file_path: PathBuf) -> Result<ConfigFile, String> {
let mut config_file = read_yaml_file::<ConfigFile>(&config_file_path)?; let mut config_file = read_yaml_file::<ConfigFile>(&config_file_path)?;
@ -68,3 +88,16 @@ pub fn read_config_file(config_file_path: PathBuf) -> Result<ConfigFile, String>
Ok(config_file) Ok(config_file)
} }
pub fn write_config_file(config_file_path: PathBuf, config_file: ConfigFile) -> Result<(), String> {
write_yaml_file::<ConfigFile>(&config_file_path, &config_file)
}
pub fn parse_path_string<P: AsRef<Path> + Debug + Clone + Copy>(path: P) -> Result<PathBuf, String> {
let full_path = match canonicalize(path) {
Ok(full_path) => full_path,
Err(_) => return Err(format!("Target does not exists: {:?}", path.clone()))
};
Ok(full_path)
}