Added npm, yarn, pnpm and composer task_engines.

Added configTask parsers for the newly added task_engines.
Added examples for the new task_engines.
This commit is contained in:
Ian Wijma 2024-04-05 00:03:11 +11:00
parent aba453b571
commit 12a243796c
12 changed files with 277 additions and 29 deletions

74
Cargo.lock generated
View File

@ -2,6 +2,15 @@
# It is not intended for manual editing.
version = 3
[[package]]
name = "aho-corasick"
version = "1.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916"
dependencies = [
"memchr",
]
[[package]]
name = "anstream"
version = "0.6.13"
@ -56,6 +65,16 @@ version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1"
[[package]]
name = "bstr"
version = "1.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05efc5cfd9110c8416e471df0e96702d58690178e206e61b7173706673c93706"
dependencies = [
"memchr",
"serde",
]
[[package]]
name = "clap"
version = "4.5.4"
@ -127,6 +146,19 @@ version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
[[package]]
name = "globset"
version = "0.4.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57da3b9b5b85bd66f31093f8c408b90a74431672542466497dcbdfdc02034be1"
dependencies = [
"aho-corasick",
"bstr",
"log",
"regex-automata",
"regex-syntax",
]
[[package]]
name = "hashbrown"
version = "0.14.3"
@ -167,6 +199,18 @@ version = "0.4.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c"
[[package]]
name = "log"
version = "0.4.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c"
[[package]]
name = "memchr"
version = "2.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d"
[[package]]
name = "proc-macro2"
version = "1.0.79"
@ -191,10 +235,29 @@ version = "0.1.0"
dependencies = [
"clap",
"glob",
"globset",
"serde",
"serde_json",
"serde_yaml",
]
[[package]]
name = "regex-automata"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "86b83b8b9847f9bf95ef68afb0b8e6cdb80f498442f5179a29fad448fcc1eaea"
dependencies = [
"aho-corasick",
"memchr",
"regex-syntax",
]
[[package]]
name = "regex-syntax"
version = "0.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56"
[[package]]
name = "rustix"
version = "0.38.32"
@ -234,6 +297,17 @@ dependencies = [
"syn",
]
[[package]]
name = "serde_json"
version = "1.0.115"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "12dc5c46daa8e9fdf4f5e71b6cf9a53f2487da0e86e55808e2d35539666497dd"
dependencies = [
"itoa",
"ryu",
"serde",
]
[[package]]
name = "serde_yaml"
version = "0.9.34+deprecated"

View File

@ -8,7 +8,9 @@ edition = "2021"
[dependencies]
clap = { version = "4.4.8", features = ["derive", "unicode", "wrap_help"] }
glob = "0.3.1"
globset = "0.4.14"
serde = { version = "1.0.197", features = ["derive"] }
serde_json = "1.0.115"
serde_yaml = "0.9.34"
[profile.dev]
@ -18,4 +20,4 @@ opt-level = 0
strip = true # Automatically strip symbols from the binary.
opt-level = "z" # Optimize for size.
lto = true
codegen-units = 1
codegen-units = 1

View File

@ -6,6 +6,7 @@ directories:
- hello
- project-*
- nested
- task_engine/**
tasks:
dev: echo 'Hello from main!'

View File

@ -0,0 +1,10 @@
{
"scripts": {
"dev": "echo \"Hello from composer\"",
"test": "echo \"Testing from composer\"",
"multi": [
"@dev",
"@test"
]
}
}

View File

@ -0,0 +1,3 @@
name: composer
task_engine: composer

View File

@ -0,0 +1,5 @@
{
"scripts": {
"dev": "echo \"Hello from npm\""
}
}

View File

@ -0,0 +1,3 @@
name: npm
task_engine: npm

View File

@ -0,0 +1,5 @@
{
"scripts": {
"dev": "echo \"Hello from pnpm\""
}
}

View File

@ -0,0 +1,3 @@
name: pnpm
task_engine: pnpm

View File

@ -0,0 +1,5 @@
{
"scripts": {
"dev": "echo \"Hello from yarn\""
}
}

View File

@ -0,0 +1,3 @@
name: yarn
task_engine: yarn

View File

@ -4,6 +4,7 @@ use std::fs::{canonicalize, read_to_string};
use std::path::{Path, PathBuf};
use clap::Args;
use glob::glob;
use globset::{Glob, GlobSetBuilder};
use serde::Deserialize;
#[derive(Args, Debug)]
@ -15,28 +16,27 @@ pub struct Arguments {
}
pub fn run (arguments: &Arguments) -> Result<(), String> {
let Arguments { entry, task_name: _task_name } = arguments;
let Arguments { entry, task_name } = arguments;
// Resolve the entry path
let entry_config_path: PathBuf = resolve_config_path(entry)?;
// Discover all config paths
let config_paths: Vec<PathBuf> = discover_config_paths(entry_config_path)?;
let config_paths: Vec<PathBuf> = discover_config_paths(&entry_config_path)?;
// Parse config file content
let config_files: Vec<ConfigFile> = read_config_files(config_paths)?;
// Parse config files
let configs: Vec<Config> = parse_config_files(config_files)?;
println!("configs: {:?}", configs);
// Resolve dependencies based on the directory structure
// (In the future this will be configurable based on a dependency config field)
let config_structure: ConfigStructure = resolve_config_structure(configs)?;
println!("config_structure: {:?}", config_structure);
let config_structure: ConfigStructure = resolve_config_structure(&entry_config_path, configs)?;
// Gather the tasks from the config
// let task_structure: TaskStructure = resolve_task_structure(config_structure, task_name);
let task_structure: TaskStructure = resolve_task_structure(config_structure, task_name);
eprintln!("task_structure = {:?}", task_structure);
// Run the commands, one by one
// > In the future this is configurable on the rask level and maybe on the config file level
@ -47,22 +47,65 @@ pub fn run (arguments: &Arguments) -> Result<(), String> {
}
#[derive(Debug, Clone)]
struct ConfigStructure {
config: Config,
child: HashMap<String, ConfigStructure>
struct TaskStructure {
}
fn resolve_config_structure(_configs: Vec<Config>) -> Result<ConfigStructure, String> {
let _path_map: HashMap<PathBuf, Config> = HashMap::new();
fn resolve_task_structure(config_structure: ConfigStructure, task_name: &String) -> TaskStructure {
todo!()
}
#[derive(Debug, Clone)]
struct ConfigStructure {
config: Config,
children: Vec<ConfigStructure>
}
fn resolve_config_structure(entry_config_path: &PathBuf, configs: Vec<Config>) -> Result<ConfigStructure, String> {
let mut path_map: HashMap<PathBuf, Config> = HashMap::new();
for config in configs {
path_map.insert(config.clone().path, config);
}
let config_structure: ConfigStructure = construct_config_structure(entry_config_path, &path_map)?;
Ok(config_structure)
}
fn construct_config_structure(config_path: &PathBuf, config_path_map: &HashMap<PathBuf, Config>) -> Result<ConfigStructure, String> {
let config = config_path_map.get(config_path).ok_or("Unknown config path")?;
let paths: Vec<PathBuf> = config_path_map.keys().cloned().collect();
let Config { directories, .. } = config;
let config_directory: &Path = config_path.parent().unwrap();
let mut child_paths: Vec<PathBuf> = vec![];
for directory in directories {
let path_pattern: PathBuf = get_config_glob_pattern(config_directory, &directory);
// TODO: Maybe abstract?
let pattern = match Glob::new(path_pattern.to_str().unwrap()) {
Ok(pattern) => pattern,
Err(err) => return Err(format!("Failed to create glob pattern: {:?}", err)),
};
let mut builder = GlobSetBuilder::new();
builder.add(pattern);
let glob_set = builder.build().unwrap();
for path in &paths {
if glob_set.is_match(path) {
child_paths.push(path.to_path_buf());
}
}
}
// TODO: Create a recursive method that creates the config structure based on the directories.
let config_structure = ConfigStructure {
config: Config{
name: "".to_string(),
tasks: Default::default(),
path: Default::default(),
},
child: HashMap::new()
config: config.clone(),
children: child_paths
.iter()
.map(|path| construct_config_structure(path, config_path_map).unwrap())
.collect()
};
Ok(config_structure)
@ -81,13 +124,31 @@ struct ConfigTask {
}
type ConfigTasks = HashMap<String, ConfigTask>;
type ConfigDirectories = Vec<String>;
#[derive(Debug, Clone, Default, Deserialize)]
enum TaskEngine {
#[serde(rename = "composer")]
COMPOSER,
#[serde(rename = "npm")]
NPM,
#[serde(rename = "yarn")]
YARN,
#[serde(rename = "pnpm")]
PNPM,
#[serde(rename = "none")]
#[default]
NONE
}
#[derive(Debug, Clone)]
#[allow(dead_code)]
struct Config {
name: String,
task_engine: TaskEngine,
tasks: HashMap<String, ConfigTask>,
path: PathBuf,
directories: ConfigDirectories,
}
fn parse_config_files(config_files: Vec<ConfigFile>) -> Result<Vec<Config>, String> {
@ -102,15 +163,76 @@ fn parse_config_files(config_files: Vec<ConfigFile>) -> Result<Vec<Config>, Stri
}
fn parse_config_file(config_file: ConfigFile) -> Result<Config, String> {
let ConfigFile { name, tasks: config_file_tasks, _file_path: path, .. } = config_file;
let ConfigFile { name, tasks: config_file_tasks, _file_path: file_path, _dir_path: dir_path, directories, task_engine } = config_file;
let tasks = parse_config_tasks(config_file_tasks)?;
let tasks: ConfigTasks = match task_engine {
TaskEngine::COMPOSER => parse_composer_tasks(dir_path)?,
TaskEngine::NPM => parse_node_tasks(dir_path, "npm")?,
TaskEngine::YARN => parse_node_tasks(dir_path, "yarn")?,
TaskEngine::PNPM => parse_node_tasks(dir_path, "pnmp")?,
TaskEngine::NONE => parse_config_tasks(config_file_tasks)?,
};
let config: Config = Config {name, tasks, path};
let config: Config = Config { name, tasks, path: file_path, directories, task_engine };
Ok(config)
}
#[derive(Debug, Clone, Deserialize, Default)]
struct PackageJsonFile {
#[serde(default)]
scripts: HashMap<String, String>,
}
fn parse_node_tasks(dir_path: PathBuf, prefix: &str) -> Result<ConfigTasks, String> {
let mut file_path = dir_path.clone();
file_path.push("package.json");
let content = read_file_content(file_path)?;
let package_json: PackageJsonFile = serde_json::from_str(&content).expect(format!("Failed to package.json from \"{:?}\"", dir_path).as_str());
let mut config_tasks: ConfigTasks = HashMap::new();
for key in package_json.scripts.keys() {
config_tasks.insert(key.clone(), ConfigTask {
task_type: TaskType::SHELL,
content: format!("{:?} run {:?}", prefix, key)
});
}
Ok(config_tasks)
}
#[derive(Debug, Clone, Deserialize)]
#[serde(untagged)]
enum ComposerJsonScriptValue {
Single(String),
Multiple(Vec<String>),
}
#[derive(Debug, Clone, Deserialize, Default)]
struct ComposerJsonFile {
#[serde(default)]
scripts: HashMap<String, ComposerJsonScriptValue>,
}
fn parse_composer_tasks(dir_path: PathBuf) -> Result<ConfigTasks, String> {
let mut file_path = dir_path.clone();
file_path.push("composer.json");
let content = read_file_content(file_path)?;
let package_json: ComposerJsonFile = serde_json::from_str(&content).expect(format!("Failed to composer.json from \"{:?}\"", dir_path).as_str());
let mut config_tasks: ConfigTasks = HashMap::new();
for key in package_json.scripts.keys() {
config_tasks.insert(key.clone(), ConfigTask {
task_type: TaskType::SHELL,
content: format!("composer run {:?}", key)
});
}
Ok(config_tasks)
}
fn parse_config_tasks(tasks: ConfigFileTasks) -> Result<ConfigTasks, String> {
let mut config_tasks: ConfigTasks = HashMap::new();
@ -137,7 +259,7 @@ fn read_config_files(paths: Vec<PathBuf>) -> Result<Vec<ConfigFile>, String> {
Ok(configs_files)
}
fn discover_config_paths(path: PathBuf) -> Result<Vec<PathBuf>, String> {
fn discover_config_paths(path: &PathBuf) -> Result<Vec<PathBuf>, String> {
let mut found_config_paths: Vec<PathBuf> = vec![path.clone()];
// Read config
@ -148,11 +270,7 @@ fn discover_config_paths(path: PathBuf) -> Result<Vec<PathBuf>, String> {
// Extract directories
let config_directory = _file_path.parent().ok_or("Failed to get parent directory")?;
for directory in directories {
let mut pattern: PathBuf = config_directory.to_path_buf();
pattern.push(&directory);
if !pattern.ends_with(".yaml") {
pattern.push("rask.yaml");
}
let pattern = get_config_glob_pattern(config_directory, &directory);
// Find config files based on the pattern in the directories value
let pattern_string: &str = pattern.to_str().unwrap();
@ -171,6 +289,17 @@ fn discover_config_paths(path: PathBuf) -> Result<Vec<PathBuf>, String> {
Ok(found_config_paths)
}
fn get_config_glob_pattern(root_path: &Path, glob_pattern: &String) -> PathBuf {
let mut pattern: PathBuf = root_path.to_path_buf();
pattern.push(glob_pattern);
if !pattern.ends_with(".yaml") {
pattern.push("rask.yaml");
}
pattern
}
fn read_file_content (path: PathBuf) -> Result<String, String> {
match read_to_string(path) {
Ok(content) => Ok(content),
@ -184,11 +313,15 @@ type ConfigFileTasks = HashMap<String, String>;
struct ConfigFile {
name: String,
#[serde(default)]
directories: Vec<String>,
task_engine: TaskEngine,
#[serde(default)]
directories: ConfigDirectories,
#[serde(default)]
tasks: ConfigFileTasks,
#[serde(default)]
_file_path: PathBuf,
#[serde(default)]
_dir_path: PathBuf,
}
fn read_config_file(path: PathBuf) -> Result<ConfigFile, String> {
@ -197,6 +330,7 @@ fn read_config_file(path: PathBuf) -> Result<ConfigFile, String> {
let mut config_file: ConfigFile = serde_yaml::from_str(&content).expect(format!("Failed to parse YAML from \"{:?}\"", path).as_str());
config_file._file_path = path.clone();
config_file._dir_path = path.parent().unwrap().to_path_buf();
Ok(config_file)
}