1 Commits

Author SHA1 Message Date
1c418dded0 Merge pull request 'save commit' (#1) from solo into feature/executor
Reviewed-on: #1
2026-02-09 20:48:16 +01:00
2 changed files with 45 additions and 131 deletions

View File

@@ -6,7 +6,7 @@
/* By: sede-san <sede-san@student.42madrid.com +#+ +:+ +#+ */ /* By: sede-san <sede-san@student.42madrid.com +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */ /* +#+#+#+#+#+ +#+ */
/* Created: 2025/10/22 19:03:51 by sede-san #+# #+# */ /* Created: 2025/10/22 19:03:51 by sede-san #+# #+# */
/* Updated: 2026/02/09 21:19:02 by sede-san ### ########.fr */ /* Updated: 2026/02/09 20:36:19 by sede-san ### ########.fr */
/* */ /* */
/* ************************************************************************** */ /* ************************************************************************** */
@@ -17,6 +17,12 @@
# include "core.h" # include "core.h"
# include "builtins.h" # include "builtins.h"
# define PIPE_STR "|"
# define REDIRECT_IN_STR "<"
# define REDIRECT_OUT_STR ">"
# define APPEND_STR ">>"
# define HEREDOC_STR "<<"
# define TOKENS_COUNT 5 # define TOKENS_COUNT 5
typedef enum e_token_type typedef enum e_token_type

View File

@@ -6,7 +6,7 @@
/* By: sede-san <sede-san@student.42madrid.com +#+ +:+ +#+ */ /* By: sede-san <sede-san@student.42madrid.com +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */ /* +#+#+#+#+#+ +#+ */
/* Created: 2026/02/09 18:56:41 by sede-san #+# #+# */ /* Created: 2026/02/09 18:56:41 by sede-san #+# #+# */
/* Updated: 2026/02/09 23:09:28 by sede-san ### ########.fr */ /* Updated: 2026/02/09 20:42:50 by sede-san ### ########.fr */
/* */ /* */
/* ************************************************************************** */ /* ************************************************************************** */
@@ -15,157 +15,65 @@
static t_token *tokenize(const char *line, size_t *start); static t_token *tokenize(const char *line, size_t *start);
static t_token_type get_token_type(const char *str); static t_token_type get_token_type(const char *str);
static t_token *token_new(t_token_type type, const char *text);
static void token_clear(t_token *token);
static t_token *read_token(t_token_type type, const char *line, size_t *i);
static t_token *read_word(const char *line, size_t *i);
static inline bool is_meta(char c);
/** t_list *lex(
* @brief Converts a command line string into a list of tokens. const char *line
* ) {
* @return A list of tokens or NULL on error. t_list *tokens;
*/
t_list *lex(
const char *line)
{
t_list *tokens;
t_token *token; t_token *token;
size_t i; size_t i;
tokens = NULL; tokens = NULL;
i = 0; i = 0;
while (line[i] != '\0') while (line[i] != '\0')
{ {
// ignore spaces
while (ft_isspace(line[i])) while (ft_isspace(line[i]))
i++; i++;
if (line[i] == '\0') // create token
break;
token = tokenize(line, &i); token = tokenize(line, &i);
ft_lstadd_back(&tokens, ft_lstnew(token)); // add token to list
if (token == NULL) if (token != NULL)
{ ft_lstadd_back(&tokens, ft_lstnew(token));
ft_lstclear(&tokens, (void (*)(void *))token_clear);
return (NULL);
}
} }
return (tokens); return (tokens);
} }
/** static t_token *tokenize(const char *line, size_t *start) {
* @return A new token or NULL on error. t_token *token;
*/ t_token_type type;
static t_token *tokenize(
const char *line,
size_t *start)
{
t_token *token;
t_token_type type;
token = NULL;
if (line == NULL || line[*start] == '\0') if (line == NULL || line[*start] == '\0')
return (NULL); return (NULL);
type = get_token_type(line + *start); type = get_token_type(line + *start);
if (type != TOKEN_WORD) (void)type;
token = read_token(type, line, start); // if (type != TOKEN_WORD)
else // token = token_new(type, NULL);
token = read_word(line, start); // else
// token = read_word(line, start);
// if (token == NULL)
// (*start) += ft_strlen(token->value);
return (token); return (token);
} }
static t_token_type get_token_type( static t_token_type get_token_type(const char *str)
const char *str
)
{ {
if (str == NULL || str[0] == '\0') size_t i;
return (TOKEN_WORD); static const t_map_entry tokens[TOKENS_COUNT] = {
if (str[0] == '|') {PIPE_STR, (void *)TOKEN_PIPE},
return (TOKEN_PIPE); {REDIRECT_IN_STR, (void *)TOKEN_REDIRECT_IN},
if (str[0] == '<') {REDIRECT_OUT_STR, (void *)TOKEN_REDIRECT_OUT},
{APPEND_STR, (void *)TOKEN_APPEND},
{HEREDOC_STR, (void *)TOKEN_HEREDOC}
};
i = 0;
while (i < TOKENS_COUNT)
{ {
if (str[1] == '<') if (ft_strcmp(str, tokens[i].key) == 0)
return (TOKEN_HEREDOC); return ((t_token_type)tokens[i].value);
return (TOKEN_REDIRECT_IN); i++;
}
if (str[0] == '>')
{
if (str[1] == '>')
return (TOKEN_APPEND);
return (TOKEN_REDIRECT_OUT);
} }
return (TOKEN_WORD); return (TOKEN_WORD);
} }
static t_token *token_new(
t_token_type type,
const char *text)
{
t_token *token;
token = (t_token *)malloc(sizeof(t_token));
if (token == NULL)
return (NULL);
ft_putendl("malloc");
token->type = type;
token->value = text;
if (token->type == TOKEN_WORD && token->value == NULL)
{
free(token);
return (NULL);
}
return (token);
}
static void token_clear(
t_token *token)
{
if (token != NULL)
{
if (token->value != NULL)
{
free(token->value);
ft_putendl("free");
}
free(token);
ft_putendl("free");
}
}
static t_token *read_token(
t_token_type type,
const char *line,
size_t *i)
{
while (ft_isspace(line[*i]) || is_meta(line[*i]))
(*i)++;
return (token_new(type, NULL));
}
static t_token *read_word(
const char *line,
size_t *i)
{
const size_t start = *i;
bool in_single_quote;
bool in_double_quote;
in_single_quote = false;
in_double_quote = false;
while (line[*i] != '\0')
{
char c = line[*i];
(void)c;
if (line[*i] == '\'' && !in_double_quote)
in_single_quote = !in_single_quote;
else if (line[*i] == '"' && !in_single_quote)
in_double_quote = !in_double_quote;
else if (!in_single_quote && !in_double_quote && (isspace(line[*i]) || is_meta(line[*i])))
break;
(*i)++;
}
return (token_new(TOKEN_WORD, ft_substr(line, start, *i - start)));
}
static inline bool is_meta(char c)
{
return (c == '|' || c == '<' || c == '>');
}