diff --git a/src/parser/peg.rs b/src/parser/peg.rs index d957e804ba462e743d0f04905a25641253cbf9f4..7968ccecac0bbd095a5991b0e1aacf418d15f513 100644 --- a/src/parser/peg.rs +++ b/src/parser/peg.rs @@ -1,10 +1,10 @@ -use flow_control::Statement; - -use std::process::Command; +use std::io::{stderr, Write}; +use flow_control::Statement; use self::grammar::parse_; - -use glob::glob; +use directory_stack::DirectoryStack; +use shell::Job; +use variables::Variables; #[derive(Debug, PartialEq, Clone, Copy)] pub enum RedirectFrom { Stdout, Stderr, Both} @@ -33,62 +33,11 @@ impl Pipeline { } } - pub fn expand_globs(&mut self) { - let jobs = self.jobs.drain(..).map(|mut job| { + pub fn expand(&mut self, variables: &Variables, dir_stack: &DirectoryStack) { + for job in &mut self.jobs { + job.expand(variables, dir_stack); job.expand_globs(); - job - }).collect(); - self.jobs = jobs; - } -} - -#[derive(Debug, PartialEq, Clone, Copy)] -pub enum JobKind { And, Background, Last, Or, Pipe } - -#[derive(Debug, PartialEq, Clone)] -pub struct Job { - pub command: String, - pub args: Vec<String>, - pub kind: JobKind, -} - -impl Job { - pub fn new(args: Vec<String>, kind: JobKind) -> Self { - let command = args[0].clone(); - Job { - command: command, - args: args, - kind: kind, - } - } - - pub fn expand_globs(&mut self) { - let mut new_args: Vec<String> = vec![]; - for arg in self.args.drain(..) { - let mut pushed_glob = false; - if arg.contains(|chr| chr == '?' || chr == '*' || chr == '[') { - if let Ok(expanded) = glob(&arg) { - for path in expanded.filter_map(Result::ok) { - pushed_glob = true; - new_args.push(path.to_string_lossy().into_owned()); - } - } - } - if !pushed_glob { - new_args.push(arg); - } - } - self.args = new_args; - } - - pub fn build_command(&self) -> Command { - let mut command = Command::new(&self.command); - for i in 1..self.args.len() { - if let Some(arg) = self.args.get(i) { - command.arg(arg); - } } - command } } @@ -96,7 +45,8 @@ pub fn parse(code: &str) -> Statement { match parse_(code) { Ok(code_ok) => code_ok, Err(err) => { - println!("ion: Syntax {}",err); + let stderr = stderr(); + let _ = writeln!(stderr.lock(), "ion: Syntax {}", err); Statement::Pipelines(vec![]) } } @@ -109,6 +59,7 @@ mod tests { use super::grammar::*; use super::*; use flow_control::Statement; + use shell::JobKind; #[test] fn full_script() { diff --git a/src/parser/pipelines.rs b/src/parser/pipelines.rs index fa5777b469dd949b6d50b848324595129c3bc6da..b324f1d27b0398c418ff0235a158c7d1516a71e3 100644 --- a/src/parser/pipelines.rs +++ b/src/parser/pipelines.rs @@ -4,7 +4,8 @@ // - Implement Stderr Piping // - Fix the cyclomatic complexity issue -use parser::peg::{Job, JobKind, Pipeline, Redirection, RedirectFrom}; +use parser::peg::{Pipeline, Redirection, RedirectFrom}; +use shell::{Job, JobKind}; const BACKSLASH: u8 = 1; const SINGLE_QUOTE: u8 = 2; @@ -291,7 +292,8 @@ pub fn collect(pipelines: &mut Vec<Pipeline>, possible_error: &mut Option<&str>, #[cfg(test)] mod tests { use flow_control::Statement; - use parser::peg::{parse, JobKind, RedirectFrom, Redirection}; + use parser::peg::{parse, RedirectFrom, Redirection}; + use shell::JobKind; #[test] fn stderr_redirection() { diff --git a/src/pipe.rs b/src/pipe.rs index b3ce9dce9533ea4f1f7145f505dcf1579bd68dd2..cb5d5f5e71f631e76b22bf49a6c65fd54fa02017 100644 --- a/src/pipe.rs +++ b/src/pipe.rs @@ -4,26 +4,29 @@ use std::os::unix::io::{FromRawFd, AsRawFd, IntoRawFd}; use std::fs::{File, OpenOptions}; use std::thread; +use shell::JobKind; use status::*; -use parser::peg::{Pipeline, JobKind, RedirectFrom}; +use parser::peg::{Pipeline, RedirectFrom}; -pub fn execute_pipeline(pipeline: Pipeline) -> i32 { +pub fn execute_pipeline(pipeline: &mut Pipeline) -> i32 { // Generate a list of commands from the given pipeline let mut piped_commands: Vec<(Command, JobKind)> = pipeline.jobs .iter().map(|job| (job.build_command(), job.kind)).collect(); - if let (Some(stdin), Some(command)) = (pipeline.stdin, piped_commands.first_mut()) { - match File::open(&stdin.file) { - Ok(file) => unsafe { command.0.stdin(Stdio::from_raw_fd(file.into_raw_fd())); }, - Err(err) => { - let stderr = io::stderr(); - let mut stderr = stderr.lock(); - let _ = writeln!(stderr, "ion: failed to redirect stdin into {}: {}", stdin.file, err); + if let Some(ref stdin) = pipeline.stdin { + if let Some(command) = piped_commands.first_mut() { + match File::open(&stdin.file) { + Ok(file) => unsafe { command.0.stdin(Stdio::from_raw_fd(file.into_raw_fd())); }, + Err(err) => { + let stderr = io::stderr(); + let mut stderr = stderr.lock(); + let _ = writeln!(stderr, "ion: failed to redirect stdin into {}: {}", stdin.file, err); + } } } } - if let Some(stdout) = pipeline.stdout { + if let Some(ref stdout) = pipeline.stdout { if let Some(mut command) = piped_commands.last_mut() { let file = if stdout.append { OpenOptions::new().write(true).append(true).open(&stdout.file) diff --git a/src/shell/flow.rs b/src/shell/flow.rs index 53f578b684685378e74925b54930d64c37bfdb4b..419a8b80a85efd2638de0ce17cd62c4e2f00a573 100644 --- a/src/shell/flow.rs +++ b/src/shell/flow.rs @@ -153,8 +153,8 @@ impl FlowLogic for Shell { }); }, Statement::Pipelines(mut pipelines) => { - for pipeline in pipelines.drain(..) { - self.run_pipeline(&pipeline, false); + for mut pipeline in pipelines.drain(..) { + self.run_pipeline(&mut pipeline, false); } }, Statement::Break => { @@ -166,8 +166,8 @@ impl FlowLogic for Shell { false } - fn execute_while(&mut self, expression: Pipeline, statements: Vec<Statement>) { - while self.run_pipeline(&expression, false) == Some(SUCCESS) { + fn execute_while(&mut self, mut expression: Pipeline, statements: Vec<Statement>) { + while self.run_pipeline(&mut expression, false) == Some(SUCCESS) { // Cloning is needed so the statement can be re-iterated again if needed. if self.execute_statements(statements.clone()) { break @@ -196,14 +196,14 @@ impl FlowLogic for Shell { } } - fn execute_if(&mut self, expression: Pipeline, success: Vec<Statement>, + fn execute_if(&mut self, mut expression: Pipeline, success: Vec<Statement>, mut else_if: Vec<ElseIf>, failure: Vec<Statement>) -> bool { - match self.run_pipeline(&expression, false) { + match self.run_pipeline(&mut expression, false) { Some(SUCCESS) => self.execute_statements(success), _ => { - for elseif in else_if.drain(..) { - if self.run_pipeline(&elseif.expression, false) == Some(SUCCESS) { + for mut elseif in else_if.drain(..) { + if self.run_pipeline(&mut elseif.expression, false) == Some(SUCCESS) { return self.execute_statements(elseif.success); } } @@ -307,8 +307,8 @@ impl FlowLogic for Shell { // Simply executes a provide pipeline, immediately. Statement::Pipelines(mut pipelines) => { // Immediately execute the command as it has no dependents. - for pipeline in pipelines.drain(..) { - let _ = self.run_pipeline(&pipeline, false); + for mut pipeline in pipelines.drain(..) { + let _ = self.run_pipeline(&mut pipeline, false); } }, // At this level, else and else if keywords are forbidden. diff --git a/src/shell/job.rs b/src/shell/job.rs new file mode 100644 index 0000000000000000000000000000000000000000..5160764c9c08af2c48e309299046c5c3e8d0105b --- /dev/null +++ b/src/shell/job.rs @@ -0,0 +1,100 @@ +use std::io::{self, Write}; +use std::iter; +use std::process::Command; + +use directory_stack::DirectoryStack; +use glob::glob; +use parser::expand_string; +use parser::shell_expand::ExpandErr; +use variables::Variables; + +#[derive(Debug, PartialEq, Clone, Copy)] +pub enum JobKind { And, Background, Last, Or, Pipe } + +#[derive(Debug, PartialEq, Clone)] +pub struct Job { + pub command: String, + pub args: Vec<String>, + pub kind: JobKind, +} + +impl Job { + pub fn new(args: Vec<String>, kind: JobKind) -> Self { + let command = args[0].clone(); + Job { + command: command, + args: args, + kind: kind, + } + } + + pub fn expand_globs(&mut self) { + let mut new_args: Vec<String> = vec![]; + for arg in self.args.drain(..) { + let mut pushed_glob = false; + if arg.contains(|chr| chr == '?' || chr == '*' || chr == '[') { + if let Ok(expanded) = glob(&arg) { + for path in expanded.filter_map(Result::ok) { + pushed_glob = true; + new_args.push(path.to_string_lossy().into_owned()); + } + } + } + if !pushed_glob { + new_args.push(arg); + } + } + self.args = new_args; + } + + /// Takes the current job's arguments and expands them, one argument at a + /// time, returning a new `Job` with the expanded arguments. + pub fn expand(&mut self, variables: &Variables, dir_stack: &DirectoryStack) { + // Expand each of the current job's arguments using the `expand_string` method. + // If an error occurs, mark that error and break; + let mut expanded: Vec<String> = Vec::new(); + let mut nth_argument = 0; + let mut error_occurred = None; + for (job, result) in self.args.iter().map(|argument| expand_string(argument, variables, dir_stack)).enumerate() { + match result { + Ok(expanded_string) => expanded.push(expanded_string), + Err(cause) => { + nth_argument = job; + error_occurred = Some(cause); + expanded = vec!["".to_owned()]; + break + } + } + } + + // If an error was detected, handle that error. + if let Some(cause) = error_occurred { + match cause { + ExpandErr::UnmatchedBraces(position) => { + let original = self.args.join(" "); + let n_chars = self.args.iter().take(nth_argument) + .fold(0, |total, arg| total + 1 + arg.len()) + position; + let stderr = io::stderr(); + let _ = writeln!(&mut stderr.lock(), "ion: expand error: unmatched braces\n{}\n{}^", + original, iter::repeat("-").take(n_chars).collect::<String>()); + }, + ExpandErr::InnerBracesNotImplemented => { + let stderr = io::stderr(); + let _ = writeln!(&mut stderr.lock(), "ion: expand error: inner braces not yet implemented"); + } + } + } + + self.args = expanded; + } + + pub fn build_command(&self) -> Command { + let mut command = Command::new(&self.command); + for i in 1..self.args.len() { + if let Some(arg) = self.args.get(i) { + command.arg(arg); + } + } + command + } +} diff --git a/src/shell/mod.rs b/src/shell/mod.rs index 3a845ce489c1339f4cb864709d3f55de91675584..ce9e6ba77d4e7325ce40765461df2dd850a4402d 100644 --- a/src/shell/mod.rs +++ b/src/shell/mod.rs @@ -1,7 +1,9 @@ mod history; +mod job; mod flow; pub use self::history::ShellHistory; +pub use self::job::{Job, JobKind}; pub use self::flow::FlowLogic; use std::collections::HashMap; @@ -274,9 +276,8 @@ impl Shell { /// Executes a pipeline and returns the final exit status of the pipeline. /// To avoid infinite recursion when using aliases, the noalias boolean will be set the true /// if an alias branch was executed. - fn run_pipeline(&mut self, pipeline: &Pipeline, noalias: bool) -> Option<i32> { - let mut pipeline = self.variables.expand_pipeline(pipeline, &self.directory_stack); - pipeline.expand_globs(); + fn run_pipeline(&mut self, pipeline: &mut Pipeline, noalias: bool) -> Option<i32> { + pipeline.expand(&self.variables, &self.directory_stack); let command_start_time = SystemTime::now(); @@ -294,8 +295,8 @@ impl Shell { for statement in StatementSplitter::new(&alias).map(parse) { match statement { - Statement::Pipelines(mut pipelines) => for pipeline in pipelines.drain(..) { - exit_status = self.run_pipeline(&pipeline, true); + Statement::Pipelines(mut pipelines) => for mut pipeline in pipelines.drain(..) { + exit_status = self.run_pipeline(&mut pipeline, true); }, _ => { exit_status = Some(FAILURE); diff --git a/src/variables.rs b/src/variables.rs index cecccbe0cb0d64d895ad481b0756bb4551b7b17c..6966674cf5bbcb2e18590ec24812040d778c01ab 100644 --- a/src/variables.rs +++ b/src/variables.rs @@ -1,16 +1,11 @@ use std::collections::BTreeMap; use std::env; use std::fmt; -use std::io::{self, Write}; -use std::iter; use std::path::PathBuf; use std::process; use directory_stack::DirectoryStack; use liner::Context; -use parser::expand_string; -use parser::peg::{Pipeline, Job}; -use parser::shell_expand::ExpandErr; use status::{SUCCESS, FAILURE}; pub struct Variables { @@ -91,55 +86,6 @@ impl Variables { self.variables.keys().cloned().chain(env::vars().map(|(k, _)| k)).collect() } - pub fn expand_pipeline(&self, pipeline: &Pipeline, dir_stack: &DirectoryStack) -> Pipeline { - // TODO don't copy everything - // TODO ugh, I made it worse - Pipeline::new(pipeline.jobs.iter().map(|job| self.expand_job(job, dir_stack)).collect(), - pipeline.stdin.clone(), - pipeline.stdout.clone()) - } - - /// Takes the current job's arguments and expands them, one argument at a - /// time, returning a new `Job` with the expanded arguments. - pub fn expand_job(&self, job: &Job, dir_stack: &DirectoryStack) -> Job { - // Expand each of the current job's arguments using the `expand_string` method. - // If an error occurs, mark that error and break; - let mut expanded: Vec<String> = Vec::new(); - let mut nth_argument = 0; - let mut error_occurred = None; - for (job, result) in job.args.iter().map(|argument| expand_string(argument, self, dir_stack)).enumerate() { - match result { - Ok(expanded_string) => expanded.push(expanded_string), - Err(cause) => { - nth_argument = job; - error_occurred = Some(cause); - expanded = vec!["".to_owned()]; - break - } - } - } - - // If an error was detected, handle that error. - if let Some(cause) = error_occurred { - match cause { - ExpandErr::UnmatchedBraces(position) => { - let original = job.args.join(" "); - let n_chars = job.args.iter().take(nth_argument) - .fold(0, |total, arg| total + 1 + arg.len()) + position; - let stderr = io::stderr(); - let _ = writeln!(&mut stderr.lock(), "ion: expand error: unmatched braces\n{}\n{}^", - original, iter::repeat("-").take(n_chars).collect::<String>()); - }, - ExpandErr::InnerBracesNotImplemented => { - let stderr = io::stderr(); - let _ = writeln!(&mut stderr.lock(), "ion: expand error: inner braces not yet implemented"); - } - } - } - - Job::new(expanded, job.kind) - } - pub fn is_valid_variable_character(c: char) -> bool { c.is_alphanumeric() || c == '_' || c == '?' }