Skip to content

Commit

Permalink
glob removed
Browse files Browse the repository at this point in the history
  • Loading branch information
BiswajitThakur committed Aug 11, 2024
1 parent ca31ca4 commit f151641
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 16 deletions.
3 changes: 1 addition & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,7 @@ edition = "2021"
html = ["regex"]

[dependencies]
clap = { version = "4.5.13", features = ["cargo", "derive"] }
glob = "0.3.1"
clap = { version = "4.5.13", features = ["cargo"] }
regex = { version = "1.5.5", optional = true }

[lib]
Expand Down
21 changes: 7 additions & 14 deletions src/cli.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,7 @@ use std::io::{self, Read, Write};
use std::path::{Path, PathBuf};

use clap::builder::PossibleValue;
use clap::{arg, command, value_parser, Arg, ArgAction, ValueEnum};
use glob::glob;
use clap::{command, value_parser, Arg, ArgAction, ValueEnum};

extern crate minifier;
use minifier::{css, js, json};
Expand Down Expand Up @@ -38,27 +37,21 @@ type will detect via extension of input file.
.value_parser(value_parser!(PathBuf)),
)
.arg(
arg!(<FILE>)
Arg::new("FILE")
.help("Input Files...")
.num_args(1..)
.value_parser(value_parser!(String))
.value_parser(value_parser!(PathBuf))
.action(ArgAction::Append),
)
.get_matches();
let args: Vec<&str> = matches
.get_many::<String>("FILE")
let args: Vec<&PathBuf> = matches
.get_many::<PathBuf>("FILE")
.unwrap_or_default()
.map(|v| v.as_str())
.collect::<Vec<_>>();
let ext: Option<&FileType> = matches.get_one::<FileType>("FileType");
let out: Option<&PathBuf> = matches.get_one::<PathBuf>("output");
for i in args {
for entry in glob(i).expect("Failed to read glob pattern") {
match entry {
Ok(path) => write_out_file(&path, out, ext),
Err(e) => println!("{:?}", e),
}
}
for path in args.into_iter() {
write_out_file(path, out, ext);
}
}
}
Expand Down

0 comments on commit f151641

Please sign in to comment.