Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 28 additions & 0 deletions example.jai
Original file line number Diff line number Diff line change
Expand Up @@ -151,6 +151,30 @@ big_boy_parsing :: ()

d = ["string a", "string b"]
c = "why not another string"

//
// If an @ is followed by a single chracter it will be interpreted as a string, with that character acting as
// the delimiter for it instead of the usual "" chars. This exists because it's useful when your string contains
// many characters that you would usually have to escape with backlash. This kind of syntax is analogous to the
// #string directive, check 005_strings.jai in the how_to's if you want to know more. You can use a backlash
// in front of your delimiter to include it inside your string.
//
e = @#"Wow! This is going to be so useful!" - You, the user, for the \#69105 time.#

//
// If you start a new line right after declaring your delimiter it will not be included by default!
//
f = @#
Something amorphous and consummate
existed before Heaven and Earth.
Solitude! Vast!
Standing alone, unaltering.
Going everywhere, yet unthreatened.
It can be considered the Mother of the World.
I don't know its name, so I designate it, “Tao”.
Compelled to consider it, name it “the Great”.
#

test_enum = VALUE_C // You can put enum names directly, the name will be matched and filled with the value

test_enum_flags = THESE | ARE | FLAGS | 0xF000 // You can put flags into an enum flags with '|'
Expand Down Expand Up @@ -210,6 +234,8 @@ big_boy_unparsing :: ()
test.b = -123.123;
test.c = "a string";
test.d = .["a", "b", "c", "easy as", "one", "two", "three"];
test.e = "Yet another normal string";
test.f = "Nothing fancy happens with here strings when unparsing!";
test.variant = 0xFFAA;
test.array_of_structs = .[ .{}, .{}, .{data_1 = 123, data_4 = 100, some_strings = .["some", "strings"]}];
test.one_of_these.data_1 = 11111;
Expand Down Expand Up @@ -253,6 +279,8 @@ Big_Boy_Test :: struct
b2 := 1.0;
c := "just a string";
d : [] string;
f := "here be another string";
e := "watch me get much fancier soon";
variant : A_Testing_Variant = xx 1122334455;
array_of_structs : [] Some_More_Data;
one_of_these : Some_More_Data;
Expand Down
24 changes: 23 additions & 1 deletion tokenizer.jai
Original file line number Diff line number Diff line change
Expand Up @@ -203,6 +203,29 @@ tokenize_one_token :: (tokenizer : *Tokenizer, tokens_to_ignore : [] Token.Type
result_token.text.count -= 2;
result_token.text.data += 1;
}
else if get(tokenizer) == #char "@"
{
advance();
delimiter := get(tokenizer);
advance();

offset := 0;
if get(tokenizer) == #char "\n" then offset = 1;
else if get(tokenizer) == #char "\r" && peek(tokenizer) == #char "\n" then offset = 2; // Windows, why you playin'
for 0..offset-1 advance();

while true
{
if get(tokenizer) == #char "\\" then advance();
else if get(tokenizer) == delimiter then break;
advance();
}
advance();

found_token(String);
result_token.text.count -= (3 + offset);
result_token.text.data += (2 + offset);
}
else if is_operator(tokenizer, "(") { advance(); found_token(Open_Parentheses); }
else if is_operator(tokenizer, ")") { advance(); found_token(Close_Parentheses); }
else if is_operator(tokenizer, "{") { advance(); found_token(Open_Braces); }
Expand All @@ -213,7 +236,6 @@ tokenize_one_token :: (tokenizer : *Tokenizer, tokens_to_ignore : [] Token.Type
else if is_operator(tokenizer, ">") { advance(); found_token(More_Than); }
else if is_operator(tokenizer, "?") { advance(); found_token(Interrogation); }
else if is_operator(tokenizer, "!") { advance(); found_token(Exclamation); }
else if is_operator(tokenizer, "@") { advance(); found_token(At_Sign); }
else if is_operator(tokenizer, "=") { advance(); found_token(Equals); }
else if is_operator(tokenizer, "+") { advance(); found_token(Plus); }
else if is_operator(tokenizer, "-") { advance(); found_token(Minus); }
Expand Down