Skip to content

Commit

Permalink
Now searches for unclosed quotes + suggestions are closed by default
Browse files Browse the repository at this point in the history
  • Loading branch information
elijah-potter committed Jan 14, 2024
1 parent 6121f57 commit db0d1a9
Show file tree
Hide file tree
Showing 14 changed files with 153 additions and 73 deletions.
43 changes: 40 additions & 3 deletions lt-core/src/document.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,48 @@ pub struct Document {
}

impl Document {
// Lexes and parses text to produce a document.
pub fn new(text: &str) -> Self {
let source: Vec<_> = text.chars().collect();
let tokens = lex_to_end(&source);

Self { source, tokens }
let mut doc = Self { source, tokens };
doc.match_quotes();

doc
}

fn iter_quote_indices(&self) -> impl Iterator<Item = usize> + '_ {
self.tokens.iter().enumerate().filter_map(|(idx, token)| {
if let TokenKind::Punctuation(Punctuation::Quote(_)) = &token.kind {
Some(idx)
} else {
None
}
})
}

/// Searches for quotation marks and fills the [`Punctuation::Quote::twin_loc`] field.
/// This is on a best effort basis.
///
/// Current algorithm is very basic and could use some work.
fn match_quotes(&mut self) {
let quote_indices: Vec<usize> = self.iter_quote_indices().collect();

for i in 0..quote_indices.len() / 2 {
let a_i = quote_indices[i * 2];
let b_i = quote_indices[i * 2 + 1];

{
let a = self.tokens[a_i].kind.as_mut_quote().unwrap();
a.twin_loc = Some(b_i);
}

{
let b = self.tokens[b_i].kind.as_mut_quote().unwrap();
b.twin_loc = Some(a_i);
}
}
}

pub fn tokens(&self) -> impl Iterator<Item = Token> + '_ {
Expand All @@ -32,7 +69,7 @@ impl Document {
self.tokens().map(|token| token.to_fat(&self.source))
}

/// Create a list of the sentence terminators in the document.
/// Iterate over the locations of the sentence terminators in the document.
fn sentence_terminators(&self) -> impl Iterator<Item = usize> + '_ {
self.tokens.iter().enumerate().filter_map(|(index, token)| {
if let Token {
Expand All @@ -55,7 +92,7 @@ impl Document {
let first_sentence = self
.sentence_terminators()
.next()
.map(|first_term| &self.tokens[0..=first_term]);
.map(|first_term| &self.tokens[1..=first_term]);

let rest = self
.sentence_terminators()
Expand Down
2 changes: 2 additions & 0 deletions lt-core/src/linting/lint.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,14 @@ pub struct Lint {
pub span: Span,
pub lint_kind: LintKind,
pub suggestions: Vec<Suggestion>,
pub message: String,
}

#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
pub enum LintKind {
Spelling,
Capitalization,
UnmatchedQuote,
}

#[derive(Debug, Clone, Serialize, Deserialize)]
Expand Down
4 changes: 3 additions & 1 deletion lt-core/src/linting/mod.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
mod lint;
mod sentence_capitalization;
mod spell_check;
mod unclosed_quotes;

pub use lint::{Lint, LintKind, Suggestion};

Expand All @@ -11,9 +12,10 @@ use self::lint::Linter;
pub fn all_linters(document: &Document) -> Vec<Lint> {
let mut lints = Vec::new();

let linters: [Linter; 2] = [
let linters: [Linter; 3] = [
spell_check::spell_check,
sentence_capitalization::sentence_capitalization_lint,
unclosed_quotes::unclosed_quotes,
];

for linter in linters {
Expand Down
1 change: 1 addition & 0 deletions lt-core/src/linting/sentence_capitalization.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ pub fn sentence_capitalization_lint(document: &Document) -> Vec<Lint> {
suggestions: vec![Suggestion::ReplaceWith(
first_letter.to_uppercase().collect_vec(),
)],
message: "This sentance does not start with a capital letter".to_string(),
})
}
}
Expand Down
1 change: 1 addition & 0 deletions lt-core/src/linting/spell_check.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ pub fn spell_check(document: &Document) -> Vec<Lint> {
span: word.span,
lint_kind: LintKind::Spelling,
suggestions: suggestions.collect(),
message: "Did you mean to spell it this way?".to_string(),
})
}

Expand Down
19 changes: 19 additions & 0 deletions lt-core/src/linting/unclosed_quotes.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
use crate::{document::Document, parsing::Quote, Lint, LintKind, Punctuation, TokenKind};

pub fn unclosed_quotes(document: &Document) -> Vec<Lint> {
let mut lints = Vec::new();

// TODO: Try zipping quote positions
for token in document.tokens() {
if let TokenKind::Punctuation(Punctuation::Quote(Quote { twin_loc: None })) = token.kind {
lints.push(Lint {
span: token.span,
lint_kind: LintKind::UnmatchedQuote,
suggestions: vec![],
message: "This quote has no termination.".to_string(),
})
}
}

lints
}
23 changes: 21 additions & 2 deletions lt-core/src/parsing/lexer.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
use crate::span::Span;

use super::token::{Punctuation, Token, TokenKind};
use super::{
token::{Punctuation, Token, TokenKind},
Quote,
};

#[derive(Debug)]
pub struct FoundToken {
Expand Down Expand Up @@ -152,6 +155,10 @@ fn lex_characters(source: &[char], cs: &str, token: TokenKind) -> Option<FoundTo
macro_rules! lex_punctuation {
($($text:literal => $res:ident),*) => {
fn lex_punctuation(source: &[char]) -> Option<FoundToken> {
if let Some(found) = lex_quote(source){
return Some(found);
}

$(
if let Some(found) = lex_characters(source, $text, TokenKind::Punctuation(Punctuation::$res)){
return Some(found);
Expand All @@ -169,7 +176,6 @@ lex_punctuation! {
"?" => Question,
":" => Colon,
";" => Semicolon,
"\"" => Quote,
"," => Comma,
"-" => Hyphen,
"[" => OpenSquare,
Expand All @@ -179,6 +185,19 @@ lex_punctuation! {
"#" => Hash
}

fn lex_quote(source: &[char]) -> Option<FoundToken> {
let c = *source.first()?;

if c == '\"' || c == '“' || c == '”' {
Some(FoundToken {
next_index: 1,
token: TokenKind::Punctuation(Punctuation::Quote(Quote { twin_loc: None })),
})
} else {
None
}
}

#[cfg(test)]
mod tests {
use crate::{
Expand Down
3 changes: 1 addition & 2 deletions lt-core/src/parsing/mod.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
mod lexer;

mod token;

pub use lexer::{lex_to_end, lex_to_end_str};
pub use token::{FatToken, Punctuation, Token, TokenKind, TokenStringExt};
pub use token::{FatToken, Punctuation, Quote, Token, TokenKind, TokenStringExt};
18 changes: 17 additions & 1 deletion lt-core/src/parsing/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,16 @@ pub enum TokenKind {
Newline(usize),
}

impl TokenKind {
pub fn as_mut_quote(&mut self) -> Option<&mut Quote> {
self.as_mut_punctuation()?.as_mut_quote()
}

pub fn as_quote(&self) -> Option<&Quote> {
self.as_punctuation()?.as_quote()
}
}

#[derive(Debug, Is, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)]
#[serde(tag = "kind")]
pub enum Punctuation {
Expand All @@ -54,7 +64,7 @@ pub enum Punctuation {
/// ;
Semicolon,
/// "
Quote,
Quote(Quote),
/// ,
Comma,
/// -
Expand All @@ -71,6 +81,12 @@ pub enum Punctuation {
Hash,
}

#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub struct Quote {
/// The location of the matching quote, if it exists.
pub twin_loc: Option<usize>,
}

pub trait TokenStringExt {
fn first_word(&self) -> Option<Token>;
}
Expand Down
18 changes: 18 additions & 0 deletions web/src/app.css
Original file line number Diff line number Diff line change
Expand Up @@ -12,3 +12,21 @@
src: url('/fonts/outfit.ttf');
}

.underlinespecial {
position: relative;
}

.underlinespecial:after {
transition-property: all;
transition-timing-function: cubic-bezier(0.4, 0, 0.2, 1);
transition-duration: 150ms;
content: '';
display: block;
width: 100%;
height: var(--line-width);
border-radius: 1000px;
background: var(--bg-color);
position: absolute;
bottom: -3px;
left: 0;
}
40 changes: 0 additions & 40 deletions web/src/lib/Highlights.svelte

This file was deleted.

13 changes: 8 additions & 5 deletions web/src/lib/Underlines.svelte
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
<script lang="ts">
import type { Lint, Token, TokenKind } from '$lib/analysis';
import { contentToString, lintText, spanContent } from '$lib/analysis';
import type { Lint } from '$lib/analysis';
import { lintText, spanContent } from '$lib/analysis';
export let content: string;
export let focusLintIndex: number | undefined;
Expand Down Expand Up @@ -53,7 +53,7 @@
let lintContent = [
spanContent(lint.span, content).replaceAll(' ', '\u00A0'),
'red',
lintIndex === focusLintIndex ? '3px' : '1px'
lintIndex === focusLintIndex ? '4px' : '2px'
];
return [...prevContent, lintContent];
Expand All @@ -80,15 +80,18 @@
</script>

<div class="grid">
<div class="p-0 m-0 indent-0" style="grid-row: 1; grid-column: 1; color: transparent;">
<div class="p-0 m-0 indent-0 text-transparent" style="grid-row: 1; grid-column: 1">
{#each modified as chunk}
{#if chunk == null}
<br />
{:else if typeof chunk == 'string'}
<span class="">{chunk}</span>
{:else}
<span style={`margin-right: -4px;`}>
<span class="transition-all" style={`border-bottom: ${chunk[2]} solid ${chunk[1]};`}>
<span
class="underlinespecial"
style={`--bg-color: ${chunk[1]}; --line-width: ${chunk[2]};`}
>
{chunk[0]}
</span>
</span>
Expand Down
1 change: 1 addition & 0 deletions web/src/lib/analysis.ts
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ export interface Lint {
span: Span;
lint_kind: 'Capitalization' | 'Spelling';
suggestions: Suggestion[];
message: string;
}

export interface Suggestion {
Expand Down
Loading

0 comments on commit db0d1a9

Please sign in to comment.