back to scratko.xyz
summaryrefslogtreecommitdiff
path: root/lexical_analysis.c
diff options
context:
space:
mode:
authorscratko <m@scratko.xyz>2024-06-20 18:38:11 +0300
committerscratko <m@scratko.xyz>2024-11-23 20:59:45 +0300
commit54679d85b1f2c1349bcbbc76b10d57a1e5137f23 (patch)
tree030bb7951a9a16dcdb8b1f47d1a5ff09dba8276c /lexical_analysis.c
parentff38bddd4253b5adf08a84df34bfae32c8ae988d (diff)
downloadshell-54679d85b1f2c1349bcbbc76b10d57a1e5137f23.tar.gz
shell-54679d85b1f2c1349bcbbc76b10d57a1e5137f23.tar.bz2
shell-54679d85b1f2c1349bcbbc76b10d57a1e5137f23.zip
Shell-edit releaseshell-edit
Autocomplete program and file names (tab). Moving the cursor to edit commands (left and right arrows). Deleting a character in a command (backspace).
Diffstat (limited to 'lexical_analysis.c')
-rw-r--r--lexical_analysis.c28
1 files changed, 16 insertions, 12 deletions
diff --git a/lexical_analysis.c b/lexical_analysis.c
index 4349a69..5db57cb 100644
--- a/lexical_analysis.c
+++ b/lexical_analysis.c
@@ -123,11 +123,13 @@ int is_redirect_token(int ch, int next_ch)
*/
int stream_redirect_tokens(struct w_queue *word_chain,
struct dynamic_array *tmp_word, int ch,
- struct param_type *params)
+ struct param_type *params,
+ struct readline_type *readline)
{
int next_ch;
- next_ch = getchar();
- ungetc(next_ch, stdin);
+
+ if(ch == '>')
+ next_ch = readline->arr[readline->considered_index + 1];
if(is_redirect_token(ch, next_ch)) {
add_word_or_filename(word_chain, tmp_word, params);
@@ -138,7 +140,7 @@ int stream_redirect_tokens(struct w_queue *word_chain,
if(validate_redirections(ch, next_ch, params)) {
params->tokens = (ch == '>' && next_ch == '>') ? append : ch;
if(is_double_token(params))
- getchar();
+ ++readline->considered_index;
return 1;
} else
params->wrong_command = err_redirect_stream_again;
@@ -178,7 +180,7 @@ int pipeline_token_processing(struct w_queue *word_chain,
c_queue_push(cmdlines, cmdline);
w_queue_clear(word_chain);
- dynarr_drop_word(tmp_word);
+ dynarr_reset_array(tmp_word);
return 1;
}
@@ -187,11 +189,12 @@ int pipeline_token_processing(struct w_queue *word_chain,
*/
int special_tokens(struct w_queue *word_chain, struct c_queue *cmdlines,
struct dynamic_array *tmp_word, int ch,
- struct param_type *params)
+ struct param_type *params, struct readline_type *readline)
{
- int next_ch;
- next_ch = getchar();
- ungetc(next_ch, stdin);
+ int next_ch, i;
+
+ if(ch == '|' || ch == '&')
+ next_ch = readline->arr[readline->considered_index + 1];
if(is_special_token(ch, next_ch)) {
add_word_or_filename(word_chain, tmp_word, params);
@@ -208,8 +211,9 @@ int special_tokens(struct w_queue *word_chain, struct c_queue *cmdlines,
} else if(ch == '&' && next_ch == '&')
params->tokens = and;
else if(ch == '&') {
- while((ch = getchar()) != new_line) {
- if(ch != whitespace && ch != tab) {
+ for(i = readline->considered_index+1; readline->arr[i] != new_line;
+ ++i) {
+ if(readline->arr[i] != whitespace && readline->arr[i] != tab) {
params->wrong_command = err_bg_process;
return 0;
}
@@ -217,7 +221,7 @@ int special_tokens(struct w_queue *word_chain, struct c_queue *cmdlines,
params->tokens = '&';
}
if(is_double_token(params))
- getchar();
+ ++readline->considered_index;
return 1;
}
return 0;