Skip to content

Commit

Permalink
Pretty errors; refactoring
Browse files Browse the repository at this point in the history
  • Loading branch information
ivanjermakov committed Jun 5, 2023
1 parent 83500dd commit fd45c0c
Show file tree
Hide file tree
Showing 8 changed files with 96 additions and 31 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,4 @@
node_modules
dist
pnpm-lock.yaml
data/test.no
23 changes: 23 additions & 0 deletions src/error.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import { TokenName } from './parser/parser'
import { indexToLocation, LocationRange, prettyIndex, prettyLocation } from './location'
import { Source } from './source'

export interface SyntaxErrorInfo {
expect: TokenName[],
got: TokenName,
location: LocationRange
}

export const prettySyntaxError = (error: SyntaxErrorInfo): string =>
`syntax error: expected \`${error.expect}\`, got \`${error.got}\``

export const prettySourceMessage = (message: string, index: number, source: Source): string => {
const location = indexToLocation(index, source)
const locationStr = `${location ? `${source.filename}:${prettyLocation(location)}` : '<unknwon location>'}`
const indent = ' '.repeat(4)
return `\
${prettyIndex(index, source)}
${message}
${indent}at ${locationStr}
`
}
12 changes: 7 additions & 5 deletions src/index.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import { compactToken, flattenToken, parse } from './parser/parser'
import { tokenize } from './lexer/lexer'
import { inspect } from 'util'
import { readFileSync } from 'fs'
import { join, resolve } from 'path'
import { prettySourceMessage, prettySyntaxError } from './error'

const version = JSON.parse(readFileSync(join(__dirname, '..', 'package.json')).toString()).version

Expand All @@ -17,14 +17,16 @@ if (!path) {
process.exit()
}

const code = readFileSync(resolve(path)).toString()
const source = { str: readFileSync(resolve(path)).toString(), filename: path }

const token = parse(tokenize(code))
const token = parse(tokenize(source.str))
if (token === true) {
throw Error('parsing error: skipped root')
console.error('parsing error: skipped root')
process.exit(1)
}
if ('expect' in token) {
throw Error(`parsing error: ${inspect(token, { depth: null, colors: true })}`)
console.error(prettySourceMessage(prettySyntaxError(token), token.location.start, source))
process.exit(1)
}

console.dir(compactToken(flattenToken(token)), { depth: null, colors: true, compact: true })
19 changes: 7 additions & 12 deletions src/lexer/lexer.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import { LocationRange } from '../location'

export const lexerTokenNames = <const>[
// keywords
'type-keyword',
Expand Down Expand Up @@ -54,15 +56,9 @@ export type LexerTokenName = typeof lexerTokenNames[number]
export interface LexerToken {
name: LexerTokenName
value: string
location: TokenLocation
}

export interface TokenLocation {
start: number
end: number
location: LocationRange
}


export const constTokenMap: Map<LexerTokenName, string> = new Map([
['type-keyword', 'type'],
['kind-keyword', 'kind'],
Expand Down Expand Up @@ -101,6 +97,10 @@ export const constTokenMap: Map<LexerTokenName, string> = new Map([
['equals', '='],
])

export const isWhitespace = (char: string): boolean => char === ' ' || char === '\t'

export const isNewline = (char: string): boolean => char === '\n' || char === '\r'

export const tokenize = (code: String): LexerToken[] => {
const pos = { pos: 0 }
const chars = code.split('')
Expand Down Expand Up @@ -248,11 +248,6 @@ const createToken = (
return { name, value, location: { start, end: pos.pos - 1 } }
}

const isWhitespace = (char: string): boolean => char === ' ' || char === '\t'

const isNewline = (char: string): boolean => char === '\n' || char === '\r'


const isAlpha = (char: string): boolean =>
(char >= 'A' && char <= 'Z') ||
(char >= 'a' && char <= 'z') ||
Expand Down
44 changes: 44 additions & 0 deletions src/location.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
import { isNewline } from './lexer/lexer'
import { Source } from './source'

export interface LocationRange {
start: number
end: number
}

export interface Location {
line: number,
column: number
}

export const indexToLocation = (index: number, source: Source): Location | undefined => {
let line = 0
let column = 0
for (let i = 0; i <= index; i++) {
if (i === index) {
return { line, column }
}
if (isNewline(source.str[i])) {
line++
column = 0
} else {
column++
}
}
return undefined
}

export const prettyIndex = (index: number, source: Source): string => {
const start = indexToLocation(index, source)
if (!start) return '<outside of a file>'

const lines = source.str.split('\n')
const line = lines[start.line]
const highlight = ' '.repeat(start.column) + '^'
const lineNum = `${start.line + 1} | `
return `\
${lineNum}${line}
${' '.repeat(lineNum.length)}${highlight}`
}

export const prettyLocation = (location: Location): string => `${location.line + 1}:${location.column + 1}`
11 changes: 6 additions & 5 deletions src/parser/parser.spec.ts
Original file line number Diff line number Diff line change
@@ -1,17 +1,18 @@
import { tokenize } from '../lexer/lexer'
import { expect } from '@jest/globals'
import { compactToken, flattenToken, parse, ParserTokenName, prettySyntaxError, Token } from './parser'
import { compactToken, flattenToken, parse, ParserTokenName, Token } from './parser'
import { prettySyntaxError } from '../error'

describe('parser', () => {

const parseToken = (code: string, root: ParserTokenName = 'program'): Token => {
const tokens = tokenize(code)
const parseToken = (source: string, root: ParserTokenName = 'program'): Token => {
const tokens = tokenize(source)
const token = parse(tokens, root)
if (token === true) {
throw Error('parsing error: skipped root')
throw Error('skipped root')
}
if ('expect' in token) {
throw Error(`parsing error: ${prettySyntaxError(token)}`)
throw Error(prettySyntaxError(token))
}
return flattenToken(token)
}
Expand Down
13 changes: 4 additions & 9 deletions src/parser/parser.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
import { LexerToken, LexerTokenName, TokenLocation } from '../lexer/lexer'
import { LexerToken, LexerTokenName } from '../lexer/lexer'
import { readFileSync } from 'fs'
import { join } from 'path'
import { SyntaxErrorInfo } from '../error'
import { LocationRange } from '../location'

export const parserTokenNames = <const>[
'program',
Expand Down Expand Up @@ -29,7 +31,7 @@ export type ParserTokenName = typeof parserTokenNames[number]

export interface ParserToken {
name: ParserTokenName,
location: TokenLocation,
location: LocationRange,
nodes: Token[]
}

Expand All @@ -48,12 +50,6 @@ export interface Rule {

export type ParseBranch = TokenName[]

export interface SyntaxErrorInfo {
expect: TokenName[],
got: TokenName,
location: TokenLocation
}

const rawRules = JSON.parse(readFileSync(join(__dirname, '..', 'grammar.json')).toString()).rules
export const rules: Map<ParserTokenName, Rule> = new Map(rawRules.map((r: Rule) => [r.name, r]))

Expand Down Expand Up @@ -149,4 +145,3 @@ export const compactToken = (token: Token): any => {
}
}

export const prettySyntaxError = (error: SyntaxErrorInfo): string => `Expected ${error.expect}, got: ${error.got} at ${error.location}`
4 changes: 4 additions & 0 deletions src/source.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
export interface Source {
str: string
filename: string
}

0 comments on commit fd45c0c

Please sign in to comment.