Skip to content
This repository has been archived by the owner on Aug 24, 2024. It is now read-only.

Commit

Permalink
chore: update lean-toolchain
Browse files Browse the repository at this point in the history
  • Loading branch information
insightmind committed Mar 21, 2022
1 parent 0488054 commit cd75f8b
Show file tree
Hide file tree
Showing 5 changed files with 58 additions and 20 deletions.
2 changes: 1 addition & 1 deletion LeanInk/Analysis/DataTypes.lean
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ class Positional (α : Type u) where
tailPos : α -> String.Pos

namespace Positional
def length { α : Type u } [Positional α] (self : α) : Nat := (Positional.tailPos self) - (Positional.headPos self)
def length { α : Type u } [Positional α] (self : α) : String.Pos := (Positional.tailPos self) - (Positional.headPos self)

def smallest? { α : Type u } [Positional α] (list : List α) : Option α := List.foldl (λ a y =>
let y : α := y -- We need to help the compiler a bit here otherwise it thinks `y : Option α`
Expand Down
2 changes: 1 addition & 1 deletion LeanInk/Analysis/SemanticToken.lean
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ namespace SemanticTraversalInfo

def highlightKeyword (headPos tailPos : String.Pos) (stx: Syntax) : AnalysisM (List Token) := do
if let Syntax.atom info val := stx then
if (val.length > 0 && val[0].isAlpha) || (val.length > 1 && val[0] = '#' && val[1].isAlpha) then
if (val.length > 0 && val[0].isAlpha) || (val.length > 1 && val[0] = '#' && val[1].isAlpha) then
return genSemanticToken stx SemanticTokenType.keyword
return []

Expand Down
6 changes: 3 additions & 3 deletions LeanInk/Annotation/Alectryon.lean
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ def minPos (x y : String.Pos) := if x < y then x else y
def maxPos (x y : String.Pos) := if x > y then x else y

partial def genTokens (contents : String) (head : String.Pos) (offset : String.Pos) (l : List Token) (compounds : List (Compound Analysis.Token)) : AnalysisM (List Token) := do
let textTail := contents.utf8ByteSize + offset
let textTail := contents.utf8ByteSize + offset
let mut head : String.Pos := head
let mut tokens : List Token := []
for x in compounds do
Expand All @@ -171,7 +171,7 @@ partial def genTokens (contents : String) (head : String.Pos) (offset : String.P
match text with
| none => logInfo s!"Empty 1 {text} {x.headPos} {tail}"
| some text => tokens := { raw := text }::tokens
match extractContents offset contents head (contents.utf8ByteSize + offset) with
match extractContents offset contents head (contents.utf8ByteSize + offset) with
| none => return tokens.reverse
| some x => return ({ raw := x }::tokens).reverse

Expand Down Expand Up @@ -233,7 +233,7 @@ Generates AlectryonFragments for the given CompoundFragments and input file cont
def annotateFileWithCompounds (l : List Alectryon.Fragment) (contents : String) : List Annotation -> AnalysisM (List Fragment)
| [] => pure l
| x::[] => do
let fragment ← genFragment x contents.utf8ByteSize (contents.extract x.sentence.headPos contents.utf8ByteSize)
let fragment ← genFragment x contents.utf8ByteSize (contents.extract x.sentence.headPos contents.utf8ByteSize)
return l.append [fragment]
| x::y::ys => do
let fragment ← genFragment x y.sentence.headPos (contents.extract x.sentence.headPos (y.sentence.headPos))
Expand Down
2 changes: 1 addition & 1 deletion lean-toolchain
Original file line number Diff line number Diff line change
@@ -1 +1 @@
leanprover/lean4:nightly-2022-03-07
leanprover/lean4:nightly-2022-03-21
66 changes: 52 additions & 14 deletions test/bench/Basic.lean.leanInk.expected
Original file line number Diff line number Diff line change
Expand Up @@ -3637,7 +3637,7 @@
"docstring": null,
"_type": "token"},
{"typeinfo":
{"type": "{α : Type ?u.2688} → [self : BEq α] → α → α → Bool",
{"type": "{α : Type ?u.2646} → [self : BEq α] → α → α → Bool",
"name": "BEq.beq",
"_type": "typeinfo"},
"semanticType": null,
Expand Down Expand Up @@ -20864,11 +20864,23 @@
"_type": "token"},
{"typeinfo": null,
"semanticType": null,
"raw": ", _, ",
"raw": ", ",
"link": null,
"docstring": null,
"_type": "token"},
{"typeinfo": {"type": "0 < x✝", "name": "h", "_type": "typeinfo"},
{"typeinfo": {"type": "Nat", "name": "_", "_type": "typeinfo"},
"semanticType": null,
"raw": "_",
"link": null,
"docstring": null,
"_type": "token"},
{"typeinfo": null,
"semanticType": null,
"raw": ", ",
"link": null,
"docstring": null,
"_type": "token"},
{"typeinfo": {"type": "0 < ?m.13614", "name": "h", "_type": "typeinfo"},
"semanticType": "Name.Variable",
"raw": "h",
"link": null,
Expand Down Expand Up @@ -21046,7 +21058,7 @@
"docstring": null,
"_type": "token"},
{"typeinfo": {"type": "a < b", "name": "this", "_type": "typeinfo"},
"semanticType": "Name.Variable",
"semanticType": "Keyword",
"raw": "this",
"link": null,
"docstring": null,
Expand Down Expand Up @@ -22734,7 +22746,7 @@
"_type": "token"},
{"typeinfo":
{"type": "Exists fun k => n + k = m", "name": "this", "_type": "typeinfo"},
"semanticType": "Name.Variable",
"semanticType": "Keyword",
"raw": "this",
"link": null,
"docstring": null,
Expand Down Expand Up @@ -22769,7 +22781,7 @@
"link": null,
"docstring": null,
"_type": "token"},
{"typeinfo": {"type": "succ n ≤ succ m", "name": "h", "_type": "typeinfo"},
{"typeinfo": {"type": "n + k = m", "name": "h", "_type": "typeinfo"},
"semanticType": "Name.Variable",
"raw": "h",
"link": null,
Expand Down Expand Up @@ -31340,7 +31352,7 @@
"link": null,
"docstring": null,
"_type": "token"},
{"typeinfo": {"type": "m * n = k * m", "name": "h", "_type": "typeinfo"},
{"typeinfo": {"type": "n * m = k * m", "name": "h", "_type": "typeinfo"},
"semanticType": "Name.Variable",
"raw": "h",
"link": null,
Expand Down Expand Up @@ -32437,7 +32449,7 @@
"link": null,
"docstring": null,
"_type": "token"},
{"typeinfo": {"type": "i ≤ j", "name": "h", "_type": "typeinfo"},
{"typeinfo": {"type": "i ≤ succ j", "name": "h", "_type": "typeinfo"},
"semanticType": "Name.Variable",
"raw": "h",
"link": null,
Expand Down Expand Up @@ -32724,7 +32736,7 @@
"_type": "token"},
{"typeinfo":
{"type": "n ^ i * 1 ≤ n ^ j * n", "name": "this", "_type": "typeinfo"},
"semanticType": "Name.Variable",
"semanticType": "Keyword",
"raw": "this",
"link": null,
"docstring": null,
Expand Down Expand Up @@ -35295,7 +35307,7 @@
"link": null,
"docstring": null,
"_type": "token"},
{"typeinfo": {"type": "i < succ a", "name": "h", "_type": "typeinfo"},
{"typeinfo": {"type": "succ i = succ a", "name": "h", "_type": "typeinfo"},
"semanticType": "Name.Variable",
"raw": "h",
"link": null,
Expand Down Expand Up @@ -35693,7 +35705,7 @@
"link": null,
"docstring": null,
"_type": "token"},
{"typeinfo": {"type": "succ i < succ a", "name": "h", "_type": "typeinfo"},
{"typeinfo": {"type": "i < succ a", "name": "h", "_type": "typeinfo"},
"semanticType": "Name.Variable",
"raw": "h",
"link": null,
Expand Down Expand Up @@ -41670,7 +41682,20 @@
"_type": "token"},
{"typeinfo": null,
"semanticType": null,
"raw": "\n | _, ",
"raw": "\n | ",
"link": null,
"docstring": null,
"_type": "token"},
{"typeinfo":
{"type": "Exists fun k => a - b + k = c", "name": "_", "_type": "typeinfo"},
"semanticType": null,
"raw": "_",
"link": null,
"docstring": null,
"_type": "token"},
{"typeinfo": null,
"semanticType": null,
"raw": ", ",
"link": null,
"docstring": null,
"_type": "token"},
Expand Down Expand Up @@ -42142,7 +42167,7 @@
"link": null,
"docstring": null,
"_type": "token"},
{"typeinfo": {"type": "d + (a - b) = c", "name": "hd", "_type": "typeinfo"},
{"typeinfo": {"type": "a - b + d = c", "name": "hd", "_type": "typeinfo"},
"semanticType": "Name.Variable",
"raw": "hd",
"link": null,
Expand Down Expand Up @@ -43716,7 +43741,20 @@
"_type": "token"},
{"typeinfo": null,
"semanticType": null,
"raw": "\n | _, ",
"raw": "\n | ",
"link": null,
"docstring": null,
"_type": "token"},
{"typeinfo":
{"type": "Exists fun k => a + k = c + b", "name": "_", "_type": "typeinfo"},
"semanticType": null,
"raw": "_",
"link": null,
"docstring": null,
"_type": "token"},
{"typeinfo": null,
"semanticType": null,
"raw": ", ",
"link": null,
"docstring": null,
"_type": "token"},
Expand Down

0 comments on commit cd75f8b

Please sign in to comment.