Skip to content

Commit

Permalink
better completions
Browse files Browse the repository at this point in the history
  • Loading branch information
mustafaquraish committed Apr 26, 2024
1 parent 006ef7e commit e599657
Show file tree
Hide file tree
Showing 5 changed files with 85 additions and 54 deletions.
6 changes: 5 additions & 1 deletion compiler/lsp/finder.oc
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,11 @@ def Finder::find_in_expression(&this, node: &AST): bool {
return .set_usage(node.resolved_symbol, node)
}
}

Error => {
if node.span.contains_loc(.loc) {
return .set_usage(node.resolved_symbol, node)
}
}
else => {
if verbose println("Unhandled node type in Finder::find_in_expression: %s", node.type)
}
Expand Down
65 changes: 39 additions & 26 deletions compiler/lsp/utils.oc
Original file line number Diff line number Diff line change
Expand Up @@ -374,7 +374,10 @@ def gen_namespace_json(ns: &Namespace): &Value {
return obj
}

def gen_completion_item(sym: &Symbol): &Value {
def insert_completion_item(completions: &Value, sym: &Symbol, seen: &Set<str>) {
if sym.display in seen return
seen += sym.display

let val = Value::new(Dictionary)
val["label"] = sym.name
val["detail"] = gen_hover_string(sym)
Expand Down Expand Up @@ -407,7 +410,7 @@ def gen_completion_item(sym: &Symbol): &Value {
}
}
}
return val
completions += val
}

def get_unique_reference_spans(sym: &Symbol, for_rename: bool): &Vector<Span> {
Expand Down Expand Up @@ -482,46 +485,53 @@ def gen_renames_json(sym: &Symbol, loc: Location): &Value {
return obj
}

def gen_completions_from_scope(scope: &Scope, completions: &Value) {
def gen_completions_from_scope(scope: &Scope, completions: &Value, hint_type: &Type, seen: &Set<str>) {
if not scope? return
for item in scope.items.iter_values() {
completions += gen_completion_item(item)
let item_type = get_symbol_typedef(item)
if hint_type? and not item_type.eq(hint_type) continue

if hint_type? and hint_type.base == Enum {
// Suggest names of enum variants
let enom = hint_type.u.enum_
for field in enom.fields.iter() {
insert_completion_item(completions, field.sym, seen)
}
}
insert_completion_item(completions, item, seen)
}
gen_completions_from_scope(scope.parent, completions)
gen_completions_from_scope(scope.parent, completions, hint_type, seen)
}

def gen_completion_items_from_ns(ns: &Namespace, completions: &Value) {
let seen = Set<str>::new()
defer seen.free()

def gen_completion_items_from_ns(completions: &Value, ns: &Namespace, seen: &Set<str>) {
for it : ns.namespaces.iter_values() {
seen += it.sym.name
completions += gen_completion_item(it.sym)
insert_completion_item(completions, it.sym, seen)
}
for it : ns.enums.iter() {
seen += it.sym.name
completions += gen_completion_item(it.sym)
insert_completion_item(completions, it.sym, seen)
}
for it : ns.structs.iter() {
seen += it.sym.name
completions += gen_completion_item(it.sym)
insert_completion_item(completions, it.sym, seen)
}
for it : ns.variables.iter() {
if it.resolved_symbol? {
seen += it.resolved_symbol.name
completions += gen_completion_item(it.resolved_symbol)
insert_completion_item(completions, it.resolved_symbol, seen)
}
}
for it : ns.constants.iter() {
if it.resolved_symbol? {
seen += it.resolved_symbol.name
completions += gen_completion_item(it.resolved_symbol)
insert_completion_item(completions, it.resolved_symbol, seen)
}
}
for it : ns.functions.iter() {
if not it.is_method {
seen += it.sym.name
completions += gen_completion_item(it.sym)
insert_completion_item(completions, it.sym, seen)
}
}

Expand Down Expand Up @@ -560,31 +570,31 @@ def gen_completion_items_from_ns(ns: &Namespace, completions: &Value) {
}
}

def gen_completions_from_symbol(sym: &Symbol, node: &AST, completions: &Value) {
def gen_completions_from_symbol(sym: &Symbol, node: &AST, completions: &Value, seen: &Set<str>) {
match sym.type {
Structure => {
if node? and node.type != NSLookup {
for field : sym.u.struc.fields.iter() {
completions += gen_completion_item(field.sym)
insert_completion_item(completions, field.sym, seen)
}
}
for mth : sym.u.struc.type.methods.iter_values() {
completions += gen_completion_item(mth.sym)
insert_completion_item(completions, mth.sym, seen)
}
}
TypeDef => {
for mth : sym.u.type_def.methods.iter_values() {
completions += gen_completion_item(mth.sym)
insert_completion_item(completions, mth.sym, seen)
}
}
Enum => {
if node? and node.type == NSLookup {
if (node? and node.type == NSLookup) {
for field : sym.u.enum_.fields.iter() {
completions += gen_completion_item(field.sym)
insert_completion_item(completions, field.sym, seen)
}
}
for mth : sym.u.enum_.type.methods.iter_values() {
completions += gen_completion_item(mth.sym)
insert_completion_item(completions, mth.sym, seen)
}
}
Variable => {
Expand All @@ -593,10 +603,10 @@ def gen_completions_from_symbol(sym: &Symbol, node: &AST, completions: &Value) {
typ = typ.u.ptr
}
if typ? and typ.sym? {
gen_completions_from_symbol(typ.sym, node, completions)
gen_completions_from_symbol(typ.sym, node, completions, seen)
}
}
Namespace => gen_completion_items_from_ns(sym.u.ns, completions)
Namespace => gen_completion_items_from_ns(completions, sym.u.ns, seen)
else => {
if verbose then println(f"gen_completions_json: unhandled symbol type: {sym.type}")
}
Expand All @@ -608,6 +618,7 @@ def gen_completions_json(finder: &Finder): &Value {
if not node? return null

let completions = Value::new(List)
let hint_type = node.etype

let sym = match node.type {
Member => node.u.member.lhs.resolved_symbol
Expand All @@ -616,12 +627,14 @@ def gen_completions_json(finder: &Finder): &Value {
else => null
}

let seen = Set<str>::new()
defer seen.free()
match sym? {
// If we have a symbol to complete, we can generate completions from it...
true => gen_completions_from_symbol(sym, node, completions)
true => gen_completions_from_symbol(sym, node, completions, seen)

// ...otherwise, we can generate completions from the current scope.
false => gen_completions_from_scope(finder.found_scope, completions)
false => gen_completions_from_scope(finder.found_scope, completions, hint_type, seen)
}

let obj = Value::new(Dictionary)
Expand Down
46 changes: 29 additions & 17 deletions compiler/parser.oc
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,11 @@ def Parser::token_is(&this, type: TokenType): bool {
return .token().type == type
}

def Parser::token_is_eof_or(&this, type: TokenType): bool {
if .token_is(TokenType::EOF) return true
return .token_is(type)
}

def Parser::token_is_identifier(&this, name: str): bool => .token().is_identifier(name)

def Parser::peek_token_is(&this, off: u32, type: TokenType): bool {
Expand Down Expand Up @@ -192,7 +197,7 @@ def Parser::parse_type(&this): &Type => match .token().type {
.consume(TokenType::OpenParen)
let params = Vector<&Variable>::new()
let is_variadic = false
while not .token_is(TokenType::CloseParen) {
while not .token_is_eof_or(TokenType::CloseParen) {
if .token_is(TokenType::Ellipsis) {
.consume(TokenType::Ellipsis)
is_variadic = true
Expand Down Expand Up @@ -294,7 +299,7 @@ def Parser::parse_scoped_identifier(&this, consume_template: bool = true): &AST
let start = .consume(TokenType::LessThan)
let args = Vector<&Type>::new()

while not .token_is(TokenType::GreaterThan) {
while not .token_is_eof_or(TokenType::GreaterThan) {
args.push(.parse_type())
if not .token_is(TokenType::GreaterThan) {
if not .consume_if(TokenType::Comma) {
Expand Down Expand Up @@ -454,7 +459,7 @@ def Parser::parse_match(&this): &AST {

let cases = Vector<&MatchCase>::new()
.consume(TokenType::OpenCurly)
while not .token_is(TokenType::CloseCurly) {
while not .token_is_eof_or(TokenType::CloseCurly) {
if .token_is(TokenType::Else) {
node.u.match_stmt.defolt_span = .token().span
.consume(TokenType::Else)
Expand Down Expand Up @@ -502,7 +507,7 @@ def Parser::parse_literal_suffix_type(&this, suffix: &Token): &Type {
def Parser::parse_call(&this, callee: &AST): &AST {
let start = .consume(TokenType::OpenParen)
let args = Vector<&Argument>::new()
while not .token_is(TokenType::CloseParen) {
while not .token_is_eof_or(TokenType::CloseParen) {
let label_tok: &Token = null
if .token_is(Identifier) and .peek_token_is(1, Colon) {
label_tok = .consume(TokenType::Identifier)
Expand Down Expand Up @@ -531,7 +536,7 @@ def Parser::parse_var_initializer(&this): &AST {
if .token_is(TokenType::OpenSquare) {
let start = .consume(TokenType::OpenSquare)
let elements = Vector<&AST>::new()
while not .token_is(TokenType::CloseSquare) {
while not .token_is_eof_or(TokenType::CloseSquare) {
elements.push(.parse_var_initializer())
if not .token_is(TokenType::CloseSquare) {
.consume(TokenType::Comma)
Expand Down Expand Up @@ -695,8 +700,15 @@ def Parser::parse_atom(&this, end_type: TokenType): &AST {
}
else => {
.unhandled_type("parse_expression")
node = AST::new(Error, .token().span)
.curr += 1
let prev_span = .tokens[.curr - 1].span
let cur_span = .token().span

let err_span = Span(prev_span.end, cur_span.start)
if not .token_is(end_type) {
.curr += 1
err_span = cur_span
}
node = AST::new(Error, err_span)
}
}
return node
Expand Down Expand Up @@ -1048,7 +1060,7 @@ def Parser::parse_multi_if(&this, start_tok: &Token): &AST {
let branches = Vector<IfBranch>::new()
node.u.if_stmt.branches = branches

while not .token_is(TokenType::CloseCurly) {
while not .token_is_eof_or(TokenType::CloseCurly) {
if .token_is(TokenType::Else) {
node.u.if_stmt.els_span = .token().span
.consume(TokenType::Else)
Expand Down Expand Up @@ -1341,13 +1353,13 @@ def Parser::parse_block(&this): &AST {
let start = .consume(TokenType::OpenCurly)

let statements = Vector<&AST>::new()
while not .token_is(TokenType::CloseCurly) {
while not .token_is_eof_or(TokenType::CloseCurly) {
let statement = .parse_statement()
if statement? statements.push(statement)
}

if not .token_is(TokenType::CloseCurly) {
.error(Error::new(.token().span, "Expected '}'"))
.error(Error::new(.token().span, "Expected '}' at end of block"))
return AST::new(Error, .token().span)
}
let end = .consume(TokenType::CloseCurly)
Expand All @@ -1360,7 +1372,7 @@ def Parser::parse_block(&this): &AST {
def Parser::parse_template_params(&this, sym: &Symbol, out_span: &Span = null) {
let start = .consume(TokenType::LessThan).span
let params = Vector<&Symbol>::new()
while not .token_is(TokenType::GreaterThan) {
while not .token_is_eof_or(TokenType::GreaterThan) {
let type = .consume(TokenType::Identifier)
let name = type.text
let sym = Symbol::new(TypeDef, .ns, name, name, name, type.span)
Expand Down Expand Up @@ -1434,7 +1446,7 @@ def Parser::parse_function(&this): &Function {

.consume(TokenType::OpenParen)
let seen_default = false
while not .token_is(TokenType::CloseParen) {
while not .token_is_eof_or(TokenType::CloseParen) {

// Ellipses are only allowed as the last parameter, so we break early if we see one
if .token_is(Ellipsis) {
Expand Down Expand Up @@ -1641,7 +1653,7 @@ def Parser::parse_import_path(&this, end_type: TokenType): &Vector<&ImportPart>
let open = .consume(TokenType::OpenCurly)

let sub_paths = Vector<&Vector<&ImportPart>>::new()
while not .token_is(TokenType::CloseCurly) {
while not .token_is_eof_or(TokenType::CloseCurly) {
let sub_path = .parse_import_path(CloseCurly)
if not sub_path? return null

Expand Down Expand Up @@ -1834,7 +1846,7 @@ def Parser::parse_struct(&this): &Structure {
// Extern structs don't need to have a body.
if not struc.sym.is_extern or .token_is(TokenType::OpenCurly) {
.consume(TokenType::OpenCurly)
while not .token_is(TokenType::CloseCurly) {
while not .token_is_eof_or(TokenType::CloseCurly) {
if not .parse_struct_field(struc) break
if not .token_is(TokenType::CloseCurly) {
.consume_newline_or(TokenType::Comma)
Expand Down Expand Up @@ -1865,7 +1877,7 @@ def Parser::parse_enum(&this): &Enum {
}

.consume(TokenType::OpenCurly)
while not .token_is(TokenType::CloseCurly) {
while not .token_is_eof_or(TokenType::CloseCurly) {
// Parse any attributes for the enum value
.parse_attributes_if_any()

Expand Down Expand Up @@ -1919,7 +1931,7 @@ def Parser::parse_attribute(&this) {
}
let attr = Attribute::new(attr_type, name.span)

while not .token_is(TokenType::CloseSquare) {
while not .token_is_eof_or(TokenType::CloseSquare) {
if not .token_is(StringLiteral) {
.error(Error::new(.token().span, "Only string literals supported in attribute arguments"))
.curr += 1
Expand Down Expand Up @@ -1949,7 +1961,7 @@ def Parser::parse_attributes_if_any(&this) {
def Parser::parse_namespace_until(&this, end_type: TokenType) {
.add_doc_comment(.ns.sym, .token())

while not .token_is(end_type) {
while not .token_is_eof_or(end_type) {

.parse_attributes_if_any()
match .token().type {
Expand Down
Loading

0 comments on commit e599657

Please sign in to comment.