#include "lexical_analysis.h" #include int is_double_quotes_pair(struct param_type params) { return !(params.double_quotes_counter % 2); } int escape_double_quotes_or_backslash(int ch, struct param_type params) { return params.escape_sequences && (ch == double_quotes || ch == backslash); } int double_quotes_again(int ch, struct param_type params) { return ch == double_quotes && !params.is_word && params.stored_symbol == '"'; } int check_separation(int ch, struct param_type params) { return (ch == whitespace || ch == tab) && params.is_word && !params.escape_sequences; } int ignore_spaces(int ch, struct param_type params) { return (ch == whitespace || ch == tab) && !params.escape_sequences; } int change_mode(int ch, struct param_type params) { return ch == '"' && !params.escape_sequences; } int start_escape_sequence(int ch, struct param_type params) { return ch == backslash && !params.escape_sequences; } int is_empty_word(int ch, struct param_type params) { return (ch == whitespace || ch == tab) && !params.is_word && params.empty_word_flag; } int command_execution_condition(struct param_type *params) { return (params->tokens != '<' && params->tokens != '>' && params->tokens != append && !params->pipeline) || (params->pipeline && params->tokens == '&'); } int is_first_special_token_character(int ch) { return ch == '<' || ch == '>' || ch == '&' || ch == '|' || ch == ';'; } int excessive_words(int ch, struct param_type *params) { int next_ch; if(filename_waiting(params)) { if(ch == new_line) return 0; while((next_ch = getchar()) != new_line) { if(next_ch == ' ') continue; if(!is_first_special_token_character(next_ch)) { params->wrong_command = err_extra_chars_after_filename; return 1; } else break; } ungetc(next_ch, stdin); } return 0; } void add_word_or_filename(struct w_queue *word_chain, struct dynamic_array *tmp_word, struct param_type *params) { /* filenames */ if(filename_waiting(params) && !params->wrong_command) add_filename(tmp_word, params); /* execute command */ else if(params->is_word) add_word(word_chain, tmp_word, params); } int validate_redirections(int ch, int next_ch, struct param_type *params) { return (ch == '<' && params->streams.input_stream == NULL) || ((ch == '>' || (ch == '>' && next_ch == '>')) && params->streams.output_stream == NULL && params->streams.output_stream_to_append == NULL); } int is_double_token(struct param_type *params) { return params->tokens == and || params->tokens == or || params->tokens == append; } int is_special_token(int ch, int next_ch) { return (ch == '&' && next_ch == '&') || (ch == '|' && ch == '|') || ch == '&' || ch == ';' || ch == '|'; } int is_redirect_token(int ch, int next_ch) { return ch == '<' || ch == '>' || (ch == '>' && next_ch == '>'); } /* * redirection token verification */ int stream_redirect_tokens(struct w_queue *word_chain, struct dynamic_array *tmp_word, int ch, struct param_type *params, struct readline_type *readline) { int next_ch; if(ch == '>') next_ch = readline->arr[readline->considered_index + 1]; if(is_redirect_token(ch, next_ch)) { add_word_or_filename(word_chain, tmp_word, params); if(params->wrong_command) return 0; if(validate_redirections(ch, next_ch, params)) { params->tokens = (ch == '>' && next_ch == '>') ? append : ch; if(is_double_token(params)) ++readline->considered_index; return 1; } else params->wrong_command = err_redirect_stream_again; } return 0; } /* * the first element of the pipeline outputs to a file or * redirects to a file in the middle of the pipeline */ int wrong_streams_redirection(struct param_type *params) { return (!params->pipeline && (params->tokens == '>' || params->tokens == append)) || (params->pipeline && (params->tokens == '>' || params->tokens == append || params->tokens == '<')); } int pipeline_token_processing(struct w_queue *word_chain, struct c_queue *cmdlines, struct dynamic_array *tmp_word, struct param_type *params) { char **cmdline = NULL; if(is_stream_redirection_set(params) && wrong_streams_redirection(params)) { params->wrong_command = err_redirect_stream_in_pipeline; return 0; } params->tokens = '|'; params->pipeline = 1; cmdline = create_cmdline(word_chain, w_queue_get_word_count(word_chain)); c_queue_push(cmdlines, cmdline); w_queue_clear(word_chain); dynarr_reset_array(tmp_word); return 1; } /* * verification of special tokens (|, &, &&, ||), except redirection tokens */ int special_tokens(struct w_queue *word_chain, struct c_queue *cmdlines, struct dynamic_array *tmp_word, int ch, struct param_type *params, struct readline_type *readline) { int next_ch, i; if(ch == '|' || ch == '&') next_ch = readline->arr[readline->considered_index + 1]; if(is_special_token(ch, next_ch)) { add_word_or_filename(word_chain, tmp_word, params); if(params->wrong_command) return 0; if(ch == '|' && next_ch == '|') params->tokens = or; else if(ch == '|') { if(!pipeline_token_processing(word_chain, cmdlines, tmp_word, params)) return 0; } else if(ch == '&' && next_ch == '&') params->tokens = and; else if(ch == '&') { for(i = readline->considered_index+1; readline->arr[i] != new_line; ++i) { if(readline->arr[i] != whitespace && readline->arr[i] != tab) { params->wrong_command = err_bg_process; return 0; } } params->tokens = '&'; } if(is_double_token(params)) ++readline->considered_index; return 1; } return 0; }