diff --git a/client/package.json b/client/package.json index ac08757..0a49af7 100644 --- a/client/package.json +++ b/client/package.json @@ -3,7 +3,7 @@ "description": "A language server for Verilog", "author": "chrehall68", "license": "MIT", - "version": "1.0.5", + "version": "1.0.6", "repository": { "type": "git", "url": "https://github.com/chrehall68/vls" diff --git a/server/internal/lang/lexer.go b/server/internal/lang/lexer.go index 98b6f0c..288274c 100644 --- a/server/internal/lang/lexer.go +++ b/server/internal/lang/lexer.go @@ -37,14 +37,14 @@ func (t Token) Line() int { // Lexer is a lexer type Lexer struct { regexps []*regexp.Regexp - funcs []func(string) (Token, error) + funcs []func(string) ([]Token, error) logger *zap.Logger } func NewLexer(logger *zap.Logger) *Lexer { return &Lexer{ regexps: []*regexp.Regexp{}, - funcs: []func(string) (Token, error){}, + funcs: []func(string) ([]Token, error){}, logger: logger, } } @@ -52,7 +52,7 @@ func NewLexer(logger *zap.Logger) *Lexer { // AddMapping adds a mapping to the lexer // the pattern should probably start with a ^ to indicate // the start of the string -func (l *Lexer) AddMapping(pattern *regexp.Regexp, mapper func(string) (Token, error)) { +func (l *Lexer) AddMapping(pattern *regexp.Regexp, mapper func(string) ([]Token, error)) { l.regexps = append(l.regexps, pattern) l.funcs = append(l.funcs, mapper) } @@ -60,8 +60,8 @@ func (l *Lexer) AddMapping(pattern *regexp.Regexp, mapper func(string) (Token, e // helper to make adding a mapping easier when you don't need to capture // the value func (l *Lexer) AddMappingNoCapture(pattern *regexp.Regexp, Type string) { - l.AddMapping(pattern, func(code string) (Token, error) { - return Token{Type: Type, Value: code}, nil + l.AddMapping(pattern, func(code string) ([]Token, error) { + return []Token{{Type: Type, Value: code}}, nil }) } @@ -76,8 +76,8 @@ func (l *Lexer) Lex(code string) ([]Token, error) { // the most characters, and match that token // with the code maxLength := 0 - f := func(_ string) (Token, error) { - return Token{}, errors.New("no token found") + f := func(_ string) ([]Token, error) { + return []Token{}, errors.New("no token found") } // enforce order of precedence (mappings inserted first take precedence) @@ -97,24 +97,28 @@ func (l *Lexer) Lex(code string) ([]Token, error) { } // now, match the token with the code - token, err := f(code[i : i+maxLength]) - if err == nil { // don't add empty tokens - token.startCharacter = i - lineStart - token.endCharacter = i + maxLength - lineStart - token.line = line - tokens = append(tokens, token) - } else { + ts, err := f(code[i : i+maxLength]) + if err != nil { return nil, err } - - // update line info and i - for j := 0; j < maxLength; j++ { - if code[i+j] == '\n' { - line++ - lineStart = i + j + 1 + curPos := i + for _, t := range ts { + t.startCharacter = curPos - lineStart + t.endCharacter = curPos + len(t.Value) - lineStart + t.line = line + tokens = append(tokens, t) + + // update line info and i + for j := 0; j < len(t.Value); j++ { + if code[curPos+j] == '\n' { + line++ + lineStart = curPos + j + 1 + } } + curPos += len(t.Value) } i += maxLength + } return tokens, nil diff --git a/server/internal/lang/vlexer.go b/server/internal/lang/vlexer.go index 892c7d5..1b6b493 100644 --- a/server/internal/lang/vlexer.go +++ b/server/internal/lang/vlexer.go @@ -22,7 +22,21 @@ func NewVLexer(logger *zap.Logger) *VLexer { vlexer.AddMappingNoCapture(regexp.MustCompile(`^[\r\n]+`), "newline") // comments vlexer.AddMappingNoCapture(regexp.MustCompile(`^\/\/.*`), "comment") - vlexer.AddMappingNoCapture(regexp.MustCompile(`(?s)^\/\*.*\*\/`), "comment") + vlexer.AddMapping(regexp.MustCompile(`^\/\*(.*?\n?)*?\*\/`), func(code string) ([]Token, error) { + re := regexp.MustCompile(`(?P.*\n?)`) + matches := re.FindAllStringSubmatch(code[2:len(code)-2], -1) // remove /* and */ + tokens := []Token{} + tokens = append(tokens, Token{Type: "comment", Value: `/*`}) // add the first token + // add all the comments + for _, match := range matches { + if len(match[1]) > 0 { + tokens = append(tokens, Token{Type: "comment", Value: match[re.SubexpIndex("COMMENT")]}) + } + } + tokens = append(tokens, Token{Type: "comment", Value: `*/`}) // add the last token + + return tokens, nil + }) // keywords vlexer.AddMappingNoCapture(regexp.MustCompile(`^module`), "module") vlexer.AddMappingNoCapture(regexp.MustCompile(`^endmodule`), "endmodule") @@ -73,22 +87,22 @@ func NewVLexer(logger *zap.Logger) *VLexer { vlexer.AddMappingNoCapture(regexp.MustCompile(`^((reg)|(wire)|(genvar)|(parameter)|(integer))`), "type") vlexer.AddMappingNoCapture(regexp.MustCompile(`^((input)|(output)|(inout))`), "direction") vlexer.AddMappingNoCapture(regexp.MustCompile(`^defparam`), "defparam") - vlexer.AddMapping(regexp.MustCompile("^`?[A-Za-z][a-zA-Z0-9_]*"), func(code string) (Token, error) { - re := regexp.MustCompile("^`?(?P[A-Za-z][a-zA-Z0-9_]*)") + vlexer.AddMapping(regexp.MustCompile("^`?[A-Za-z][a-zA-Z0-9_]*"), func(code string) ([]Token, error) { + re := regexp.MustCompile("^(?P`?[A-Za-z][a-zA-Z0-9_]*)") matches := re.FindStringSubmatch(code) if len(matches) == 0 { vlexer.logger.Sugar().Error("failed to parse identifier on ", code) - return Token{}, errors.New("failed to parse identifier") + return []Token{}, errors.New("failed to parse identifier") } - return Token{Type: "identifier", Value: matches[re.SubexpIndex("IDENTIFIER")]}, nil + return []Token{{Type: "identifier", Value: matches[re.SubexpIndex("IDENTIFIER")]}}, nil }) - vlexer.AddMapping(regexp.MustCompile(`^(([0-9]*\'[hbd][0-9xzXZA-Fa-f]+)|([0-9]+)|(\"[^\n\"]*\"))`), func(code string) (Token, error) { + vlexer.AddMapping(regexp.MustCompile(`^(([0-9]*\'[hbd][0-9xzXZA-Fa-f]+)|([0-9]+)|(\"[^\n\"]*\"))`), func(code string) ([]Token, error) { re := regexp.MustCompile(`^(?P(([0-9]*\'[hbd][0-9xzXZA-Fa-f]+)|([0-9]+)|(\"[^\n\"]*\")))`) matches := re.FindStringSubmatch(code) if len(matches) == 0 { - return Token{}, errors.New("failed to parse literal" + code) + return []Token{}, errors.New("failed to parse literal" + code) } - return Token{Type: "literal", Value: matches[re.SubexpIndex("LITERAL")]}, nil + return []Token{{Type: "literal", Value: matches[re.SubexpIndex("LITERAL")]}}, nil }) return vlexer diff --git a/server/internal/vlsp/definition.go b/server/internal/vlsp/definition.go index 4e0fab5..a3e5052 100644 --- a/server/internal/vlsp/definition.go +++ b/server/internal/vlsp/definition.go @@ -28,9 +28,8 @@ func (h Handler) getLocationDetails(fname string, line int, character int) (*Loc // keep track of which module we're inside if strings.Contains(lineString, "module") { tokens, err := lexer.Lex(lineString) - h.state.log.Sugar().Info("lineTOkens: ", tokens) + h.state.log.Sugar().Info("lineTokens: ", tokens) if err == nil { - h.state.log.Sugar().Info("Ok so far: ") for i := range tokens { if tokens[i].Type == "module" { // new module? @@ -69,7 +68,6 @@ func (h Handler) jumpTo(fname string, line int, character int) ([]protocol.Locat result := []protocol.Location{} if details.token.Type == "identifier" { // see if it's a module or definition - h.state.log.Sugar().Info("now looking for", details.token.Value) location, ok := h.state.symbolMap[details.token.Value] if ok { result = append(result, location) @@ -77,7 +75,6 @@ func (h Handler) jumpTo(fname string, line int, character int) ([]protocol.Locat // otherwise, maybe it's a variable moduleMap, ok := h.state.variableDefinitions[details.currentModule] if ok { - h.state.log.Sugar().Info("moduleMap: ", moduleMap) // look for variable definition location, ok := moduleMap[details.token.Value] if ok { diff --git a/server/internal/vlsp/semtokens.go b/server/internal/vlsp/semtokens.go index 31570c1..f23d3d5 100644 --- a/server/internal/vlsp/semtokens.go +++ b/server/internal/vlsp/semtokens.go @@ -31,11 +31,12 @@ func GetSemanticTokensOptions() SemanticTokensOptions { protocol.SemanticTokenType, // 0 protocol.SemanticTokenComment, // 1 protocol.SemanticTokenNumber, // 2 - protocol.SemanticTokenMacro, // 3 + protocol.SemanticTokenKeyword, // 3 protocol.SemanticTokenVariable, // 4 protocol.SemanticTokenClass, // 5 protocol.SemanticTokenParameter, // 6 protocol.SemanticTokenFunction, // 7 + protocol.SemanticTokenMacro, // 8 }, TokenModifiers: []protocol.SemanticTokenModifiers{}, }, @@ -44,7 +45,7 @@ func GetSemanticTokensOptions() SemanticTokensOptions { } } -func Encode(tokens []lang.Token) []uint32 { +func (h Handler) Encode(tokens []lang.Token) []uint32 { result := []uint32{} prevLine := 0 prevCharacter := 0 @@ -87,6 +88,14 @@ func Encode(tokens []lang.Token) []uint32 { "signed": 7, "dollar": 7, "pound": 7, + // 8 is reserved for defined identifiers + } + // flattened defines + flattenedDefines := map[string]bool{} + for _, defines := range h.state.defines { + for _, def := range defines { + flattenedDefines["`"+def.Identifier.Value] = true + } } addToken := func(token lang.Token) { @@ -98,6 +107,12 @@ func Encode(tokens []lang.Token) []uint32 { prevCharacter = 0 } + // special case for defined identifiers + _, ok := flattenedDefines[token.Value] + if token.Type == "identifier" && ok { + val = 8 + } + // add into result result = append(result, uint32(token.Line()-prevLine), uint32(token.StartCharacter()-prevCharacter), uint32(len(token.Value)), val, 0) @@ -177,7 +192,7 @@ func (h Handler) SemanticTokensFull(ctx context.Context, params *protocol.Semant // encode result := &protocol.SemanticTokens{ - Data: Encode(tokens), + Data: h.Encode(tokens), } h.state.log.Sugar().Info("SemanticTokensFull result: ", result) diff --git a/server/internal/vlsp/symbols.go b/server/internal/vlsp/symbols.go index 5215a26..9c0286c 100644 --- a/server/internal/vlsp/symbols.go +++ b/server/internal/vlsp/symbols.go @@ -109,7 +109,8 @@ func (h Handler) GetSymbolsForFile(fname string, firstTime bool) { } } for _, define := range h.state.defines[fname] { - h.state.symbolMap[define.Identifier.Value] = protocol.Location{ + // explicitly add the backticks for defines + h.state.symbolMap["`"+define.Identifier.Value] = protocol.Location{ URI: protocol.DocumentURI(PathToURI(fname)), Range: protocol.Range{ Start: protocol.Position{Line: uint32(define.Identifier.Line()), Character: uint32(define.Identifier.StartCharacter())},