Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add support for inline variable declaration #991

Draft
wants to merge 3 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 36 additions & 0 deletions compiler/plc_ast/src/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -628,6 +628,8 @@ pub enum AstStatement {
ReturnStatement(ReturnStatement),
JumpStatement(JumpStatement),
LabelStatement(LabelStatement),
InlineVariable(InlineVariable),
DataTypeDeclaration(Box<DataTypeDeclaration>),
}

impl Debug for AstNode {
Expand Down Expand Up @@ -747,6 +749,12 @@ impl Debug for AstNode {
AstStatement::LabelStatement(LabelStatement { name, .. }) => {
f.debug_struct("LabelStatement").field("name", name).finish()
}
AstStatement::InlineVariable(InlineVariable { name, datatype }) => {
f.debug_struct("InlineVariable").field("name", name).field("datatype", datatype).finish()
}
AstStatement::DataTypeDeclaration(decl) => {
f.debug_tuple("DataTypeDeclaration").field(decl).finish()
}
}
}
}
Expand Down Expand Up @@ -1519,6 +1527,27 @@ impl AstFactory {
pub fn create_label_statement(name: String, location: SourceLocation, id: AstId) -> AstNode {
AstNode { stmt: AstStatement::LabelStatement(LabelStatement { name }), location, id }
}

/// Creates a new inline declaration by boxing the name and datatype
pub fn create_inline_declaration(
name: AstNode,
datatype: Option<AstNode>,
id: AstId,
location: SourceLocation,
) -> AstNode {
let name = Box::new(name);
let datatype = datatype.map(Box::new);
AstNode { stmt: AstStatement::InlineVariable(InlineVariable { name, datatype }), id, location }
}

pub fn create_type_declaration(
datatype: DataTypeDeclaration,
id: AstId,
location: SourceLocation,
) -> AstNode {
let datatype = Box::new(datatype);
AstNode { stmt: AstStatement::DataTypeDeclaration(datatype), id, location }
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct EmptyStatement {}
Expand Down Expand Up @@ -1601,3 +1630,10 @@ pub struct JumpStatement {
pub struct LabelStatement {
pub name: String,
}

/// Represents a new vaiable declaration in the body
#[derive(Clone, Debug, PartialEq)]
pub struct InlineVariable {
pub name: Box<AstNode>,
pub datatype: Option<Box<AstNode>>,
}
3 changes: 2 additions & 1 deletion src/index.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ use self::{

pub mod const_expressions;
mod instance_iterator;
pub mod scoped_index;
pub mod symbol;
#[cfg(test)]
mod tests;
Expand Down Expand Up @@ -734,7 +735,7 @@ impl PouIndexEntry {
/// the TypeIndex carries all types.
/// it is extracted into its seaprate struct so it can be
/// internally borrowed individually from the other maps
#[derive(Debug)]
#[derive(Debug, Clone)]
pub struct TypeIndex {
/// all types (structs, enums, type, POUs, etc.)
types: SymbolMap<String, DataType>,
Expand Down
64 changes: 64 additions & 0 deletions src/index/scoped_index.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
use std::{collections::HashMap, rc::Rc};

use plc_ast::provider::IdProvider;
use plc_source::source_location::SourceLocation;

use crate::typesystem::DataType;

use super::VariableIndexEntry;

/// A minimal index implementation that can be used for local scopes
#[derive(Debug, Clone)]
pub struct ScopedIndex {
///The scope of the current index, this is usually a POU
scope: String,

/// A unique identifier that new variables in this scope will inherit
suffix_provider: IdProvider,

/// The location that caused this scope to be created
start_location: SourceLocation,

/// New variables defined by this index
variables: HashMap<String, VariableIndexEntry>,

/// Datatypes defined by this index
type_index: HashMap<String, DataType>,

parent: Option<Rc<ScopedIndex>>,
}

impl ScopedIndex {
pub fn merge_into(self, target: &mut Self) {
target.variables.extend(self.variables);
target.type_index.extend(self.type_index);
}

pub fn add_variable(&mut self, _name: &str) {}

pub fn add_type(&mut self, _name: &str) {}

pub fn find_variable(&self, _name: &str) -> Option<&VariableIndexEntry> {
todo!()
}

pub fn find_type(&self, _name: &str) -> Option<&DataType> {
todo!()
}

pub fn new(
parent: Option<Rc<ScopedIndex>>,
location: SourceLocation,
scope: &str,
suffix_provider: IdProvider,
) -> ScopedIndex {
ScopedIndex {
scope: scope.to_string(),
suffix_provider,
start_location: location,
parent,
type_index: Default::default(),
variables: Default::default(),
}
}
}
10 changes: 10 additions & 0 deletions src/index/symbol.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,16 @@ impl<K, V> Default for SymbolMap<K, V> {
}
}

impl<K, V> Clone for SymbolMap<K, V>
where
K: Clone,
V: Clone,
{
fn clone(&self) -> Self {
Self { inner_map: self.inner_map.clone() }
}
}

impl<K, V> SymbolMap<K, V>
where
K: Hash + Eq,
Expand Down
4 changes: 3 additions & 1 deletion src/lexer/tests/lexer_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,14 +58,16 @@ fn undefined_pragmas_are_ignored_by_the_lexer() {
#[test]
fn registered_pragmas_parsed() {
let mut lexer = lex(r"
{external}{ref}{sized}{not_registerd}
{external}{ref}{sized}{def}{not_registerd}
");
assert_eq!(lexer.token, PropertyExternal, "Token : {}", lexer.slice());
lexer.advance();
assert_eq!(lexer.token, PropertyByRef, "Token : {}", lexer.slice());
lexer.advance();
assert_eq!(lexer.token, PropertySized, "Token : {}", lexer.slice());
lexer.advance();
assert_eq!(lexer.token, PropertyDef, "Token : {}", lexer.slice());
lexer.advance();
assert_eq!(lexer.token, End);
}

Expand Down
3 changes: 3 additions & 0 deletions src/lexer/tokens.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,9 @@ pub enum Token {
#[token("{sized}")]
PropertySized,

#[token("{def}")]
PropertyDef,

#[token("PROGRAM", ignore(case))]
KeywordProgram,

Expand Down
110 changes: 47 additions & 63 deletions src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -342,7 +342,7 @@ fn parse_super_class(lexer: &mut ParseSession) -> Option<String> {
fn parse_return_type(lexer: &mut ParseSession, pou_type: &PouType) -> Option<DataTypeDeclaration> {
let start_return_type = lexer.range().start;
if lexer.try_consume(&KeywordColon) {
if let Some((declaration, initializer)) = parse_data_type_definition(lexer, None) {
if let Some((declaration, initializer)) = parse_datatype_with_initializer(lexer, None) {
if let Some(init) = initializer {
lexer.accept_diagnostic(
Diagnostic::warning(
Expand Down Expand Up @@ -612,7 +612,7 @@ fn parse_full_data_type_definition(
None,
))
} else {
parse_data_type_definition(lexer, name).map(|(type_def, initializer)| {
parse_datatype_with_initializer(lexer, name).map(|(type_def, initializer)| {
if lexer.try_consume(&KeywordDotDotDot) {
(
DataTypeDeclaration::DataTypeDefinition {
Expand All @@ -630,23 +630,29 @@ fn parse_full_data_type_definition(
})
}

// TYPE xxx : 'STRUCT' | '(' | IDENTIFIER
fn parse_data_type_definition(
fn parse_datatype_with_initializer(
lexer: &mut ParseSession,
name: Option<String>,
) -> Option<DataTypeWithInitializer> {
parse_data_type_definition(lexer, name).map(|type_def| {
let initializer =
if lexer.try_consume(&KeywordAssignment) { Some(parse_expression(lexer)) } else { None };

(type_def, initializer)
})
}

// TYPE xxx : 'STRUCT' | '(' | IDENTIFIER
fn parse_data_type_definition(lexer: &mut ParseSession, name: Option<String>) -> Option<DataTypeDeclaration> {
let start = lexer.location();
if lexer.try_consume(&KeywordStruct) {
// Parse struct
let variables = parse_variable_list(lexer);
Some((
DataTypeDeclaration::DataTypeDefinition {
data_type: DataType::StructType { name, variables },
location: start.span(&lexer.location()),
scope: lexer.scope.clone(),
},
None,
))
Some(DataTypeDeclaration::DataTypeDefinition {
data_type: DataType::StructType { name, variables },
location: start.span(&lexer.location()),
scope: lexer.scope.clone(),
})
} else if lexer.try_consume(&KeywordArray) {
parse_array_type_definition(lexer, name)
} else if lexer.try_consume(&KeywordPointer) {
Expand Down Expand Up @@ -687,23 +693,18 @@ fn parse_pointer_definition(
lexer: &mut ParseSession,
name: Option<String>,
start_pos: usize,
) -> Option<(DataTypeDeclaration, Option<AstNode>)> {
parse_data_type_definition(lexer, None).map(|(decl, initializer)| {
(
DataTypeDeclaration::DataTypeDefinition {
data_type: DataType::PointerType { name, referenced_type: Box::new(decl) },
location: lexer.source_range_factory.create_range(start_pos..lexer.last_range.end),
scope: lexer.scope.clone(),
},
initializer,
)
) -> Option<DataTypeDeclaration> {
parse_data_type_definition(lexer, None).map(|decl| DataTypeDeclaration::DataTypeDefinition {
data_type: DataType::PointerType { name, referenced_type: Box::new(decl) },
location: lexer.source_range_factory.create_range(start_pos..lexer.last_range.end),
scope: lexer.scope.clone(),
})
}

fn parse_type_reference_type_definition(
lexer: &mut ParseSession,
name: Option<String>,
) -> Option<(DataTypeDeclaration, Option<AstNode>)> {
) -> Option<DataTypeDeclaration> {
let start = lexer.range().start;
//Subrange
let referenced_type = lexer.slice_and_advance();
Expand All @@ -718,9 +719,6 @@ fn parse_type_reference_type_definition(
None
};

let initial_value =
if lexer.try_consume(&KeywordAssignment) { Some(parse_expression(lexer)) } else { None };

let end = lexer.last_range.end;
if name.is_some() || bounds.is_some() {
let data_type = match bounds {
Expand Down Expand Up @@ -758,15 +756,12 @@ fn parse_type_reference_type_definition(
scope: lexer.scope.clone(),
},
};
Some((data_type, initial_value))
Some(data_type)
} else {
Some((
DataTypeDeclaration::DataTypeReference {
referenced_type,
location: lexer.source_range_factory.create_range(start..end),
},
initial_value,
))
Some(DataTypeDeclaration::DataTypeReference {
referenced_type,
location: lexer.source_range_factory.create_range(start..end),
})
}
}

Expand Down Expand Up @@ -805,7 +800,7 @@ fn parse_string_size_expression(lexer: &mut ParseSession) -> Option<AstNode> {
fn parse_string_type_definition(
lexer: &mut ParseSession,
name: Option<String>,
) -> Option<(DataTypeDeclaration, Option<AstNode>)> {
) -> Option<DataTypeDeclaration> {
let text = lexer.slice().to_string();
let start = lexer.range().start;
let is_wide = lexer.token == KeywordWideString;
Expand All @@ -832,34 +827,26 @@ fn parse_string_type_definition(
}),
_ => Some(DataTypeDeclaration::DataTypeReference { referenced_type: text, location }),
}
.zip(Some(lexer.try_consume(&KeywordAssignment).then(|| parse_expression(lexer))))
}

fn parse_enum_type_definition(
lexer: &mut ParseSession,
name: Option<String>,
) -> Option<(DataTypeDeclaration, Option<AstNode>)> {
fn parse_enum_type_definition(lexer: &mut ParseSession, name: Option<String>) -> Option<DataTypeDeclaration> {
let start = lexer.last_location();
let elements = parse_any_in_region(lexer, vec![KeywordParensClose], |lexer| {
// Parse Enum - we expect at least one element
let elements = parse_expression_list(lexer);
Some(elements)
})?;
let initializer = lexer.try_consume(&KeywordAssignment).then(|| parse_expression(lexer));
Some((
DataTypeDeclaration::DataTypeDefinition {
data_type: DataType::EnumType { name, elements, numeric_type: DINT_TYPE.to_string() },
location: start.span(&lexer.last_location()),
scope: lexer.scope.clone(),
},
initializer,
))
Some(DataTypeDeclaration::DataTypeDefinition {
data_type: DataType::EnumType { name, elements, numeric_type: DINT_TYPE.to_string() },
location: start.span(&lexer.last_location()),
scope: lexer.scope.clone(),
})
}

fn parse_array_type_definition(
lexer: &mut ParseSession,
name: Option<String>,
) -> Option<(DataTypeDeclaration, Option<AstNode>)> {
) -> Option<DataTypeDeclaration> {
let start = lexer.last_range.start;
let range = parse_any_in_region(lexer, vec![KeywordOf], |lexer| {
// Parse Array range
Expand All @@ -876,7 +863,7 @@ fn parse_array_type_definition(
})?;

let inner_type_defintion = parse_data_type_definition(lexer, None);
inner_type_defintion.map(|(reference, initializer)| {
inner_type_defintion.map(|reference| {
let reference_end = reference.get_location().to_range().map(|it| it.end).unwrap_or(0);
let location = lexer.source_range_factory.create_range(start..reference_end);

Expand Down Expand Up @@ -907,19 +894,16 @@ fn parse_array_type_definition(
}
};

(
DataTypeDeclaration::DataTypeDefinition {
data_type: DataType::ArrayType {
name,
bounds: range,
referenced_type: Box::new(reference),
is_variable_length,
},
location,
scope: lexer.scope.clone(),
DataTypeDeclaration::DataTypeDefinition {
data_type: DataType::ArrayType {
name,
bounds: range,
referenced_type: Box::new(reference),
is_variable_length,
},
initializer,
)
location,
scope: lexer.scope.clone(),
}
})
}

Expand Down
Loading
Loading