Skip to content

Commit

Permalink
LSP: Rewrite LexedProgram and ParseProgram traversal to use the P…
Browse files Browse the repository at this point in the history
…arse trait. (FuelLabs#4220)

## Description
This work was started in FuelLabs#3772. I've decided to split that up into 2
PR's, this one implements a new `Parse` trait for the lexed and parsed
ast's. The second one coming up with do the same but for the typed ast.

The diff is quite big but it's just essentially moving the logic that
was there into the trait impl's.

Partially addresses FuelLabs#3799
  • Loading branch information
JoshuaBatty authored Mar 7, 2023
1 parent f1fcec7 commit 0fbdb3b
Show file tree
Hide file tree
Showing 10 changed files with 1,188 additions and 1,103 deletions.
2 changes: 1 addition & 1 deletion sway-core/src/language/parsed/declaration.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ mod r#struct;
mod r#trait;
mod variable;

pub(crate) use abi::*;
pub use abi::*;
pub use constant::*;
pub use function::*;
pub use impl_trait::*;
Expand Down
5 changes: 1 addition & 4 deletions sway-lsp/src/capabilities/completion.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,10 +55,7 @@ fn is_initial_declaration(token_type: &Token) -> bool {
)
}
None => {
matches!(
token_type.parsed,
AstToken::Declaration(_) | AstToken::FunctionDeclaration(_)
)
matches!(token_type.parsed, AstToken::Declaration(_))
}
}
}
43 changes: 26 additions & 17 deletions sway-lsp/src/core/session.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,7 @@ use crate::{
},
error::{DocumentError, LanguageServerError},
traverse::{
dependency::Dependency, lexed_tree::LexedTree, parsed_tree::ParsedTree,
typed_tree::TypedTree,
dependency, lexed_tree, parsed_tree::ParsedTree, typed_tree::TypedTree, ParseContext,
},
};
use dashmap::DashMap;
Expand Down Expand Up @@ -171,25 +170,24 @@ impl Session {

let ast_res = CompileResult::new(typed, warnings, errors);
let typed_program = self.compile_res_to_typed_program(&ast_res)?;
let ctx = ParseContext::new(&self.token_map, engines);

// The final element in the results is the main program.
if i == results_len - 1 {
// First, populate our token_map with sway keywords.
let lexed_tree = LexedTree::new(&self.token_map);
lexed_tree.parse(&lexed);
lexed_tree::parse(&lexed, &ctx);

// Next, populate our token_map with un-typed yet parsed ast nodes.
let parsed_tree = ParsedTree::new(type_engine, &self.token_map);
let parsed_tree = ParsedTree::new(&ctx);
parsed_tree.collect_module_spans(&parsed);
self.parse_ast_to_tokens(&parsed, |an| parsed_tree.traverse_node(an));
self.parse_ast_to_tokens(&parsed, &ctx, |an, _ctx| parsed_tree.traverse_node(an));

// Finally, create runnables and populate our token_map with typed ast nodes.
self.create_runnables(typed_program);

let typed_tree =
TypedTree::new(engines, &self.token_map, &typed_program.root.namespace);
let typed_tree = TypedTree::new(&ctx, &typed_program.root.namespace);
typed_tree.collect_module_spans(typed_program);
self.parse_ast_to_typed_tokens(typed_program, |node| {
self.parse_ast_to_typed_tokens(typed_program, &ctx, |node, _ctx| {
typed_tree.traverse_node(node)
});

Expand All @@ -200,11 +198,12 @@ impl Session {
diagnostics = get_diagnostics(&ast_res.warnings, &ast_res.errors);
} else {
// Collect tokens from dependencies and the standard library prelude.
let dependency = Dependency::new(&self.token_map);
self.parse_ast_to_tokens(&parsed, |an| dependency.collect_parsed_declaration(an));
self.parse_ast_to_tokens(&parsed, &ctx, |an, ctx| {
dependency::collect_parsed_declaration(an, ctx)
});

self.parse_ast_to_typed_tokens(typed_program, |node| {
dependency.collect_typed_declaration(node)
self.parse_ast_to_typed_tokens(typed_program, &ctx, |node, ctx| {
dependency::collect_typed_declaration(node, ctx)
});
}
}
Expand Down Expand Up @@ -351,27 +350,37 @@ impl Session {
}

/// Parse the [ParseProgram] AST to populate the [TokenMap] with parsed AST nodes.
fn parse_ast_to_tokens(&self, parse_program: &ParseProgram, f: impl Fn(&AstNode)) {
fn parse_ast_to_tokens(
&self,
parse_program: &ParseProgram,
ctx: &ParseContext,
f: impl Fn(&AstNode, &ParseContext),
) {
let root_nodes = parse_program.root.tree.root_nodes.iter();
let sub_nodes = parse_program
.root
.submodules
.iter()
.flat_map(|(_, submodule)| &submodule.module.tree.root_nodes);

root_nodes.chain(sub_nodes).for_each(f);
root_nodes.chain(sub_nodes).for_each(|n| f(n, ctx));
}

/// Parse the [ty::TyProgram] AST to populate the [TokenMap] with typed AST nodes.
fn parse_ast_to_typed_tokens(&self, typed_program: &ty::TyProgram, f: impl Fn(&ty::TyAstNode)) {
fn parse_ast_to_typed_tokens(
&self,
typed_program: &ty::TyProgram,
ctx: &ParseContext,
f: impl Fn(&ty::TyAstNode, &ParseContext),
) {
let root_nodes = typed_program.root.all_nodes.iter();
let sub_nodes = typed_program
.root
.submodules
.iter()
.flat_map(|(_, submodule)| submodule.module.all_nodes.iter());

root_nodes.chain(sub_nodes).for_each(f);
root_nodes.chain(sub_nodes).for_each(|n| f(n, ctx));
}

/// Get a reference to the [ty::TyProgram] AST.
Expand Down
16 changes: 11 additions & 5 deletions sway-lsp/src/core/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,11 @@ use sway_core::{
decl_engine::DeclEngine,
language::{
parsed::{
ConstantDeclaration, Declaration, EnumVariant, Expression, FunctionDeclaration,
FunctionParameter, Scrutinee, StorageField, StructExpressionField, StructField,
StructScrutineeField, Supertrait, TraitFn, UseStatement,
AbiCastExpression, AmbiguousPathExpression, Declaration, DelineatedPathExpression,
EnumVariant, Expression, FunctionApplicationExpression, FunctionParameter,
MethodApplicationExpression, Scrutinee, StorageField, StructExpression,
StructExpressionField, StructField, StructScrutineeField, Supertrait, TraitFn,
UseStatement,
},
ty,
},
Expand All @@ -24,15 +26,19 @@ use tower_lsp::lsp_types::{Position, Range};
pub enum AstToken {
Declaration(Declaration),
Expression(Expression),
StructExpression(StructExpression),
StructExpressionField(StructExpressionField),
StructScrutineeField(StructScrutineeField),
FunctionDeclaration(FunctionDeclaration),
FunctionParameter(FunctionParameter),
FunctionApplicationExpression(FunctionApplicationExpression),
MethodApplicationExpression(MethodApplicationExpression),
AmbiguousPathExpression(AmbiguousPathExpression),
DelineatedPathExpression(DelineatedPathExpression),
AbiCastExpression(AbiCastExpression),
StructField(StructField),
EnumVariant(EnumVariant),
TraitFn(TraitFn),
TraitConstraint(TraitConstraint),
ConstantDeclaration(ConstantDeclaration),
StorageField(StorageField),
Scrutinee(Scrutinee),
Keyword(Ident),
Expand Down
88 changes: 39 additions & 49 deletions sway-lsp/src/traverse/dependency.rs
Original file line number Diff line number Diff line change
@@ -1,63 +1,53 @@
use crate::core::{
token::{self, AstToken, SymbolKind, Token, TypeDefinition, TypedAstToken},
token_map::TokenMap,
use crate::{
core::token::{self, AstToken, SymbolKind, Token, TypeDefinition, TypedAstToken},
traverse::ParseContext,
};
use sway_core::language::{
parsed::{AstNode, AstNodeContent, Declaration},
ty,
};

pub struct Dependency<'a> {
tokens: &'a TokenMap,
}

impl<'a> Dependency<'a> {
pub fn new(tokens: &'a TokenMap) -> Self {
Self { tokens }
}

/// Insert Declaration tokens into the TokenMap.
pub fn collect_parsed_declaration(&self, node: &AstNode) {
if let AstNodeContent::Declaration(declaration) = &node.content {
let parsed_token = AstToken::Declaration(declaration.clone());
/// Insert Declaration tokens into the TokenMap.
pub fn collect_parsed_declaration(node: &AstNode, ctx: &ParseContext) {
if let AstNodeContent::Declaration(declaration) = &node.content {
let parsed_token = AstToken::Declaration(declaration.clone());

let (ident, symbol_kind) = match declaration {
Declaration::VariableDeclaration(variable) => {
(variable.name.clone(), SymbolKind::Variable)
}
Declaration::StructDeclaration(decl) => (decl.name.clone(), SymbolKind::Struct),
Declaration::TraitDeclaration(decl) => (decl.name.clone(), SymbolKind::Trait),
Declaration::FunctionDeclaration(decl) => (decl.name.clone(), SymbolKind::Function),
Declaration::ConstantDeclaration(decl) => (decl.name.clone(), SymbolKind::Const),
Declaration::EnumDeclaration(decl) => (decl.name.clone(), SymbolKind::Enum),
_ => return,
};
let (ident, symbol_kind) = match declaration {
Declaration::VariableDeclaration(variable) => {
(variable.name.clone(), SymbolKind::Variable)
}
Declaration::StructDeclaration(decl) => (decl.name.clone(), SymbolKind::Struct),
Declaration::TraitDeclaration(decl) => (decl.name.clone(), SymbolKind::Trait),
Declaration::FunctionDeclaration(decl) => (decl.name.clone(), SymbolKind::Function),
Declaration::ConstantDeclaration(decl) => (decl.name.clone(), SymbolKind::Const),
Declaration::EnumDeclaration(decl) => (decl.name.clone(), SymbolKind::Enum),
_ => return,
};

let key = token::to_ident_key(&ident);
let token = Token::from_parsed(parsed_token, symbol_kind);
self.tokens.insert(key, token);
}
let key = token::to_ident_key(&ident);
let token = Token::from_parsed(parsed_token, symbol_kind);
ctx.tokens.insert(key, token);
}
}

/// Insert TypedDeclaration tokens into the TokenMap.
pub fn collect_typed_declaration(&self, node: &ty::TyAstNode) {
if let ty::TyAstNodeContent::Declaration(declaration) = &node.content {
let typed_token = TypedAstToken::TypedDeclaration(declaration.clone());
/// Insert TypedDeclaration tokens into the TokenMap.
pub fn collect_typed_declaration(node: &ty::TyAstNode, ctx: &ParseContext) {
if let ty::TyAstNodeContent::Declaration(declaration) = &node.content {
let typed_token = TypedAstToken::TypedDeclaration(declaration.clone());

let ident = match declaration {
ty::TyDeclaration::VariableDeclaration(variable) => variable.name.clone(),
ty::TyDeclaration::StructDeclaration(decl_ref) => decl_ref.name.clone(),
ty::TyDeclaration::EnumDeclaration(decl_ref) => decl_ref.name.clone(),
ty::TyDeclaration::TraitDeclaration { name, .. }
| ty::TyDeclaration::FunctionDeclaration { name, .. }
| ty::TyDeclaration::ConstantDeclaration { name, .. } => name.clone(),
_ => return,
};
let ident = token::to_ident_key(&ident);
if let Some(mut token) = self.tokens.try_get_mut(&ident).try_unwrap() {
token.typed = Some(typed_token);
token.type_def = Some(TypeDefinition::Ident(ident.0));
}
let ident = match declaration {
ty::TyDeclaration::VariableDeclaration(variable) => variable.name.clone(),
ty::TyDeclaration::StructDeclaration(decl_ref) => decl_ref.name.clone(),
ty::TyDeclaration::EnumDeclaration(decl_ref) => decl_ref.name.clone(),
ty::TyDeclaration::TraitDeclaration { name, .. }
| ty::TyDeclaration::FunctionDeclaration { name, .. }
| ty::TyDeclaration::ConstantDeclaration { name, .. } => name.clone(),
_ => return,
};
let ident = token::to_ident_key(&ident);
if let Some(mut token) = ctx.tokens.try_get_mut(&ident).try_unwrap() {
token.typed = Some(typed_token);
token.type_def = Some(TypeDefinition::Ident(ident.0));
}
}
}
Loading

0 comments on commit 0fbdb3b

Please sign in to comment.