Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Added support for java & python #66

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -12,4 +12,4 @@ if [ `uname` = "Darwin" ]; then
CFLAGS+=" -framework OpenGL"
fi

$CC $CFLAGS `pkg-config --cflags $PKGS` -o ded $SRC $LIBS `pkg-config --libs $PKGS`
$CC $CFLAGS `pkg-config --cflags $PKGS` -g -o ded $SRC $LIBS `pkg-config --libs $PKGS`
2 changes: 1 addition & 1 deletion src/editor.c
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@ void editor_retokenize(Editor *e)
// Syntax Highlighting
{
e->tokens.count = 0;
Lexer l = lexer_new(e->atlas, e->data.items, e->data.count);
Lexer l = lexer_new(e->atlas, e->data.items, e->data.count, e->file_path);
Token t = lexer_next(&l);
while (t.kind != TOKEN_END) {
da_append(&e->tokens, t);
Expand Down
61 changes: 52 additions & 9 deletions src/lexer.c
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
#include <stdbool.h>
#include <ctype.h>
#include <string.h>
#include <stdlib.h>
#include "common.h"
#include "lexer.h"

Expand All @@ -19,7 +20,12 @@ Literal_Token literal_tokens[] = {
};
#define literal_tokens_count (sizeof(literal_tokens)/sizeof(literal_tokens[0]))

const char *keywords[] = {
const char *jKeywords[] = {
"abstract", "assert", "boolean", "break", "byte", "case", "catch", "char", "class", "const", "continue", "default", "do", "double", "else", "enum", "extends", "final", "finally", "float", "for", "goto", "if", "implements", "import", "instanceof", "int", "interface", "long", "native", "new", "package", "private", "protected", "public", "return", "short", "static", "super", "switch", "synchronized", "this", "throw", "throws", "transient", "try", "void", "volatile", "while", "non-sealed", "open", "opens", "permits", "provides", "record", "sealed", "to", "transitive", "uses", "var", "with", "yield", "true", "false", "null", "const", "goto", "strictfp",
};
#define jKeywords_count (sizeof(jKeywords)/sizeof(jKeywords[0]))

const char *cKeywords[] = {
"auto", "break", "case", "char", "const", "continue", "default", "do", "double",
"else", "enum", "extern", "float", "for", "goto", "if", "int", "long", "register",
"return", "short", "signed", "sizeof", "static", "struct", "switch", "typedef",
Expand All @@ -34,7 +40,12 @@ const char *keywords[] = {
"template", "this", "thread_local", "throw", "true", "try", "typeid", "typename",
"using", "virtual", "wchar_t", "xor", "xor_eq",
};
#define keywords_count (sizeof(keywords)/sizeof(keywords[0]))
#define cKeywords_count (sizeof(cKeywords)/sizeof(cKeywords[0]))

const char *pyKeywords[] = {
"False", "None", "True", "and", "as", "assert", "async", "await", "break", "class", "continue", "def", "del", "elif", "else", "except", "finally", "for", "from", "global", "if", "import", "in", "is", "lambda", "nonlocal", "not", "or", "pass", "raise", "return", "try", "while", "with", "yield",
};
#define pyKeywords_count (sizeof(pyKeywords)/sizeof(pyKeywords[0]))

const char *token_kind_name(Token_Kind kind)
{
Expand Down Expand Up @@ -65,12 +76,16 @@ const char *token_kind_name(Token_Kind kind)
return NULL;
}

Lexer lexer_new(Free_Glyph_Atlas *atlas, const char *content, size_t content_len)
Lexer lexer_new(Free_Glyph_Atlas *atlas, const char *content, size_t content_len, String_Builder file_path)
{
Lexer l = {0};
l.atlas = atlas;
l.content = content;
l.content_len = content_len;
if (file_path.items != NULL) {
l.file_path.items = (char*) malloc(sizeof(char*) * (strlen(file_path.items) + 1));
strcpy(l.file_path.items, file_path.items);
}
return l;
}

Expand Down Expand Up @@ -202,15 +217,43 @@ Token lexer_next(Lexer *l)
lexer_chop_char(l, 1);
token.text_len += 1;
}

if (l->file_path.items == NULL)
return token;

const char* file_ext;
const char* filename = l->file_path.items;
const char *dot = strrchr(filename, '.');
if(!dot || dot == filename)
file_ext = "";
else
file_ext = dot + 1;

for (size_t i = 0; i < keywords_count; ++i) {
size_t keyword_len = strlen(keywords[i]);
if (keyword_len == token.text_len && memcmp(keywords[i], token.text, keyword_len) == 0) {
token.kind = TOKEN_KEYWORD;
break;
if (strcmp(file_ext, "java") == 0) {
for (size_t i = 0; i < jKeywords_count; ++i) {
size_t keyword_len = strlen(jKeywords[i]);
if (keyword_len == token.text_len && memcmp(jKeywords[i], token.text, keyword_len) == 0) {
token.kind = TOKEN_KEYWORD;
break;
}
}
} else if (strcmp(file_ext, "py") == 0) {
for (size_t i = 0; i < pyKeywords_count; ++i) {
size_t keyword_len = strlen(pyKeywords[i]);
if (keyword_len == token.text_len && memcmp(pyKeywords[i], token.text, keyword_len) == 0) {
token.kind = TOKEN_KEYWORD;
break;
}
}
} else {
for (size_t i = 0; i < cKeywords_count; ++i) {
size_t keyword_len = strlen(cKeywords[i]);
if (keyword_len == token.text_len && memcmp(cKeywords[i], token.text, keyword_len) == 0) {
token.kind = TOKEN_KEYWORD;
break;
}
}
}

return token;
}

Expand Down
4 changes: 3 additions & 1 deletion src/lexer.h
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
#include <stddef.h>
#include "./la.h"
#include "./free_glyph.h"
#include "./common.h"

typedef enum {
TOKEN_END = 0,
Expand Down Expand Up @@ -37,9 +38,10 @@ typedef struct {
size_t line;
size_t bol;
float x;
String_Builder file_path;
} Lexer;

Lexer lexer_new(Free_Glyph_Atlas *atlas, const char *content, size_t content_len);
Lexer lexer_new(Free_Glyph_Atlas *atlas, const char *content, size_t content_len, String_Builder file_path);
Token lexer_next(Lexer *l);

#endif // LEXER_H_