back to scratko.xyz
summaryrefslogtreecommitdiff
path: root/lexical_analysis.c
diff options
context:
space:
mode:
authorscratko <m@scratko.xyz>2024-06-20 18:38:11 +0300
committerscratko <m@scratko.xyz>2024-06-23 23:25:46 +0300
commit74c6a747a58b38131534556ab95fa4dd4b514780 (patch)
tree68dae86f9ee994c2bc13681703a8c69e0e2b5f51 /lexical_analysis.c
parent8f4f87eabec13330a2b3a974975053c1e4632a11 (diff)
downloadshell-74c6a747a58b38131534556ab95fa4dd4b514780.tar.gz
shell-74c6a747a58b38131534556ab95fa4dd4b514780.tar.bz2
shell-74c6a747a58b38131534556ab95fa4dd4b514780.zip
Shell-edit releaseshell-edit
Autocomplete program and file names (tab). Moving the cursor to edit commands (left and right arrows). Deleting a character in a command (backspace).
Diffstat (limited to 'lexical_analysis.c')
-rw-r--r--lexical_analysis.c28
1 files changed, 16 insertions, 12 deletions
diff --git a/lexical_analysis.c b/lexical_analysis.c
index 4349a69..5db57cb 100644
--- a/lexical_analysis.c
+++ b/lexical_analysis.c
@@ -123,11 +123,13 @@ int is_redirect_token(int ch, int next_ch)
*/
int stream_redirect_tokens(struct w_queue *word_chain,
struct dynamic_array *tmp_word, int ch,
- struct param_type *params)
+ struct param_type *params,
+ struct readline_type *readline)
{
int next_ch;
- next_ch = getchar();
- ungetc(next_ch, stdin);
+
+ if(ch == '>')
+ next_ch = readline->arr[readline->considered_index + 1];
if(is_redirect_token(ch, next_ch)) {
add_word_or_filename(word_chain, tmp_word, params);
@@ -138,7 +140,7 @@ int stream_redirect_tokens(struct w_queue *word_chain,
if(validate_redirections(ch, next_ch, params)) {
params->tokens = (ch == '>' && next_ch == '>') ? append : ch;
if(is_double_token(params))
- getchar();
+ ++readline->considered_index;
return 1;
} else
params->wrong_command = err_redirect_stream_again;
@@ -178,7 +180,7 @@ int pipeline_token_processing(struct w_queue *word_chain,
c_queue_push(cmdlines, cmdline);
w_queue_clear(word_chain);
- dynarr_drop_word(tmp_word);
+ dynarr_reset_array(tmp_word);
return 1;
}
@@ -187,11 +189,12 @@ int pipeline_token_processing(struct w_queue *word_chain,
*/
int special_tokens(struct w_queue *word_chain, struct c_queue *cmdlines,
struct dynamic_array *tmp_word, int ch,
- struct param_type *params)
+ struct param_type *params, struct readline_type *readline)
{
- int next_ch;
- next_ch = getchar();
- ungetc(next_ch, stdin);
+ int next_ch, i;
+
+ if(ch == '|' || ch == '&')
+ next_ch = readline->arr[readline->considered_index + 1];
if(is_special_token(ch, next_ch)) {
add_word_or_filename(word_chain, tmp_word, params);
@@ -208,8 +211,9 @@ int special_tokens(struct w_queue *word_chain, struct c_queue *cmdlines,
} else if(ch == '&' && next_ch == '&')
params->tokens = and;
else if(ch == '&') {
- while((ch = getchar()) != new_line) {
- if(ch != whitespace && ch != tab) {
+ for(i = readline->considered_index+1; readline->arr[i] != new_line;
+ ++i) {
+ if(readline->arr[i] != whitespace && readline->arr[i] != tab) {
params->wrong_command = err_bg_process;
return 0;
}
@@ -217,7 +221,7 @@ int special_tokens(struct w_queue *word_chain, struct c_queue *cmdlines,
params->tokens = '&';
}
if(is_double_token(params))
- getchar();
+ ++readline->considered_index;
return 1;
}
return 0;