Skip to content

Commit dbde972

Browse files
committed
add lines to tokens
1 parent 25487cc commit dbde972

File tree

2 files changed

+43
-30
lines changed

2 files changed

+43
-30
lines changed

src/compiler/lexer.ts

Lines changed: 41 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -59,31 +59,32 @@ function isInt(src: string) {
5959
* @param watchMode Whether is it watch mode or not. Errors will throw an error instead of exiting if this value is set to `true`.
6060
* @returns A list of tokens generated from source string.
6161
* @author efekos
62-
* @version 1.0.0
62+
* @version 1.0.1
6363
* @since 0.0.1-alpha
6464
*/
6565
export function tokenizeSyx(source: string, watchMode: boolean): Token[] {
6666
const tokens: Token[] = [];
6767
const src = source.split('');
6868
let curPos = -1;
69+
let curLine = 0;
6970

7071
while (src.length > 0) {
7172
if (!isSkippable(src[0])) log.debug(`Parsing token: '${src[0]}'`);
72-
if (src[0] === '(') tokens.push({ type: TokenType.OpenParen, value: src.shift(), pos:++curPos, end:curPos });
73-
else if (src[0] === ')') tokens.push({ type: TokenType.CloseParen, value: src.shift(), pos:++curPos, end: curPos });
74-
else if (src[0] === '{') tokens.push({ type: TokenType.OpenBrace, value: src.shift() ,pos:++curPos,end:curPos});
75-
else if (src[0] === '}') tokens.push({ type: TokenType.CloseBrace, value: src.shift() ,pos:++curPos,end:curPos});
76-
else if (src[0] === '[') tokens.push({ type: TokenType.OpenSquare, value: src.shift() ,pos:++curPos,end:curPos});
77-
else if (src[0] === ']') tokens.push({ type: TokenType.CloseSquare, value: src.shift() ,pos:++curPos,end:curPos});
78-
else if (src[0] === ',') tokens.push({ type: TokenType.Comma, value: src.shift() ,pos:++curPos,end:curPos});
79-
else if (src[0] === ';') tokens.push({ type: TokenType.Semicolon, value: src.shift() ,pos:++curPos,end:curPos});
80-
else if (src[0] === '<') tokens.push({ type: TokenType.OpenDiamond, value: src.shift() ,pos:++curPos,end:curPos});
81-
else if (src[0] === '>') tokens.push({ type: TokenType.CloseDiamond, value: src.shift() ,pos:++curPos,end:curPos});
82-
else if (src[0] === '\'') tokens.push({ type: TokenType.SingleQuote, value: src.shift() ,pos:++curPos,end:curPos});
83-
else if (src[0] === '"') tokens.push({ type: TokenType.DoubleQuote, value: src.shift() ,pos:++curPos,end:curPos});
84-
else if (src[0] === '|') tokens.push({ type: TokenType.VarSeperator, value: src.shift() ,pos:++curPos,end:curPos});
73+
if (src[0] === '(') tokens.push({ type: TokenType.OpenParen, value: src.shift(), pos: ++curPos, end: curPos, line: curLine });
74+
else if (src[0] === ')') tokens.push({ type: TokenType.CloseParen, value: src.shift(), pos: ++curPos, end: curPos, line: curLine });
75+
else if (src[0] === '{') tokens.push({ type: TokenType.OpenBrace, value: src.shift(), pos: ++curPos, end: curPos, line: curLine });
76+
else if (src[0] === '}') tokens.push({ type: TokenType.CloseBrace, value: src.shift(), pos: ++curPos, end: curPos, line: curLine });
77+
else if (src[0] === '[') tokens.push({ type: TokenType.OpenSquare, value: src.shift(), pos: ++curPos, end: curPos, line: curLine });
78+
else if (src[0] === ']') tokens.push({ type: TokenType.CloseSquare, value: src.shift(), pos: ++curPos, end: curPos, line: curLine });
79+
else if (src[0] === ',') tokens.push({ type: TokenType.Comma, value: src.shift(), pos: ++curPos, end: curPos, line: curLine });
80+
else if (src[0] === ';') tokens.push({ type: TokenType.Semicolon, value: src.shift(), pos: ++curPos, end: curPos, line: curLine });
81+
else if (src[0] === '<') tokens.push({ type: TokenType.OpenDiamond, value: src.shift(), pos: ++curPos, end: curPos, line: curLine });
82+
else if (src[0] === '>') tokens.push({ type: TokenType.CloseDiamond, value: src.shift(), pos: ++curPos, end: curPos, line: curLine });
83+
else if (src[0] === '\'') tokens.push({ type: TokenType.SingleQuote, value: src.shift(), pos: ++curPos, end: curPos, line: curLine });
84+
else if (src[0] === '"') tokens.push({ type: TokenType.DoubleQuote, value: src.shift(), pos: ++curPos, end: curPos, line: curLine });
85+
else if (src[0] === '|') tokens.push({ type: TokenType.VarSeperator, value: src.shift(), pos: ++curPos, end: curPos, line: curLine });
8586
else if (src[0] === '+' && chars.includes(src[1])) {
86-
if (src[1] === 's') tokens.push({ type: TokenType.WhitespaceIdentifier, value: '+s' ,pos:++curPos,end:curPos+1});
87+
if (src[1] === 's') tokens.push({ type: TokenType.WhitespaceIdentifier, value: '+s', pos: ++curPos, end: curPos + 1, line: curLine });
8788
else (watchMode ? log.thrower : log.exit).error(`${chalk.gray(curPos)} Unexpected identifier: '${src[1]}'`);
8889
src.shift(); src.shift();
8990
} else if (isInt(src[0])) {
@@ -95,7 +96,7 @@ export function tokenizeSyx(source: string, watchMode: boolean): Token[] {
9596
curPos++;
9697
}
9798

98-
tokens.push({ type: TokenType.IntNumber, value: ident, pos:startPos,end: curPos });
99+
tokens.push({ type: TokenType.IntNumber, value: ident, pos: startPos, end: curPos, line: curLine });
99100
} else if (isAlphabetic(src[0])) {
100101
log.debug('Found identifier');
101102
let ident = '';
@@ -107,12 +108,17 @@ export function tokenizeSyx(source: string, watchMode: boolean): Token[] {
107108

108109
const reserved = keywords[ident];
109110
if (reserved !== undefined) log.debug(`Found keyword: '${reserved}'`);
110-
tokens.push({ type: reserved ?? TokenType.Identifier, value: ident,pos:startPos,end:curPos });
111-
} else if (isSkippable(src[0])) { log.debug('Found skippable char'); src.shift(); curPos++; }
112-
else tokens.push({ type: TokenType.Raw, value: src.shift(),pos:++curPos,end:curPos });
111+
tokens.push({ type: reserved ?? TokenType.Identifier, value: ident, pos: startPos, end: curPos, line: curLine });
112+
} else if (isSkippable(src[0])) {
113+
log.debug('Found skippable char');
114+
src.shift();
115+
curPos++;
116+
if (src[0] === '\n') curLine++;
117+
}
118+
else tokens.push({ type: TokenType.Raw, value: src.shift(), pos: ++curPos, end: curPos, line: curLine });
113119
}
114120

115-
tokens.push({ type: TokenType.EndOfFile, value: 'EOF',pos:source.length,end:source.length });
121+
tokens.push({ type: TokenType.EndOfFile, value: 'EOF', pos: source.length, end: source.length, line: curLine });
116122
return tokens;
117123
}
118124

@@ -121,20 +127,21 @@ export function tokenizeSyx(source: string, watchMode: boolean): Token[] {
121127
* @param source Source string.
122128
* @returns A list of tokens generated from th esource file.
123129
* @author efekos
124-
* @version 1.0.0
130+
* @version 1.0.1
125131
* @since 0.0.1-alpha
126132
*/
127133
export function tokenizeSys(source: string): Token[] {
128134
const src = source.split('');
129135
const tokens: Token[] = [];
130-
136+
131137
let curPos = -1;
138+
let curLine = 0;
132139

133140
while (src.length > 0 && `${src[0]}${src[1]}${src[2]}` !== ':::') {
134141
if (!isSkippable(src[0])) log.debug(`Parsing tokenmm: '${src[0]}'`);
135-
if (src[0] === ';') tokens.push({ type: TokenType.Semicolon, value: src.shift(), pos:++curPos, end:curPos });
136-
else if (src[0] === '\'') tokens.push({ type: TokenType.SingleQuote, value: src.shift(), pos:++curPos, end:curPos });
137-
else if (src[0] === '"') tokens.push({ type: TokenType.DoubleQuote, value: src.shift(), pos:++curPos, end:curPos });
142+
if (src[0] === ';') tokens.push({ type: TokenType.Semicolon, value: src.shift(), pos: ++curPos, end: curPos, line: curLine });
143+
else if (src[0] === '\'') tokens.push({ type: TokenType.SingleQuote, value: src.shift(), pos: ++curPos, end: curPos, line: curLine });
144+
else if (src[0] === '"') tokens.push({ type: TokenType.DoubleQuote, value: src.shift(), pos: ++curPos, end: curPos, line: curLine });
138145
else if (isAlphabetic(src[0])) {
139146
log.debug('Found identifier');
140147
let ident = '';
@@ -146,12 +153,17 @@ export function tokenizeSys(source: string): Token[] {
146153

147154
const reserved = keywords[ident];
148155
if (reserved !== undefined) log.debug(`Found keyword: '${reserved}'`);
149-
tokens.push({ type: reserved ?? TokenType.Identifier, value: ident, pos:startPost, end:curPos });
150-
} else if (isSkippable(src[0])) { log.debug('Found skippable char'); src.shift(); curPos++; }
151-
else tokens.push({ type: TokenType.Raw, value: src.shift(), pos:++curPos,end:curPos });
156+
tokens.push({ type: reserved ?? TokenType.Identifier, value: ident, pos: startPost, end: curPos, line: curLine });
157+
} else if (isSkippable(src[0])) {
158+
log.debug('Found skippable char');
159+
src.shift();
160+
curPos++;
161+
if(src[0]==='\n') curLine++;
162+
}
163+
else tokens.push({ type: TokenType.Raw, value: src.shift(), pos: ++curPos, end: curPos, line: curLine });
152164

153165
}
154166

155-
tokens.push({ type: TokenType.EndOfFile, value: 'eof', pos:++curPos,end:curPos });
167+
tokens.push({ type: TokenType.EndOfFile, value: 'eof', pos: ++curPos, end: curPos, line: curLine });
156168
return tokens;
157169
}

src/compiler/types.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ export enum TokenType {
3939

4040
/**
4141
* Base token interface.
42-
* @version 1.0.1
42+
* @version 1.0.2
4343
* @since 0.0.1-alpha
4444
* @author efekos
4545
*/
@@ -48,6 +48,7 @@ export interface Token {
4848
value: string;
4949
pos:number;
5050
end:number;
51+
line:number;
5152
}
5253

5354
/**

0 commit comments

Comments
 (0)