Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Improve] Highlighting for comments and defines #25

Merged
merged 2 commits into from
Nov 22, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion client/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
"description": "A language server for Verilog",
"author": "chrehall68",
"license": "MIT",
"version": "1.0.5",
"version": "1.0.6",
"repository": {
"type": "git",
"url": "https://github.com/chrehall68/vls"
Expand Down
44 changes: 24 additions & 20 deletions server/internal/lang/lexer.go
Original file line number Diff line number Diff line change
Expand Up @@ -37,31 +37,31 @@ func (t Token) Line() int {
// Lexer is a lexer
type Lexer struct {
regexps []*regexp.Regexp
funcs []func(string) (Token, error)
funcs []func(string) ([]Token, error)
logger *zap.Logger
}

func NewLexer(logger *zap.Logger) *Lexer {
return &Lexer{
regexps: []*regexp.Regexp{},
funcs: []func(string) (Token, error){},
funcs: []func(string) ([]Token, error){},
logger: logger,
}
}

// AddMapping adds a mapping to the lexer
// the pattern should probably start with a ^ to indicate
// the start of the string
func (l *Lexer) AddMapping(pattern *regexp.Regexp, mapper func(string) (Token, error)) {
func (l *Lexer) AddMapping(pattern *regexp.Regexp, mapper func(string) ([]Token, error)) {
l.regexps = append(l.regexps, pattern)
l.funcs = append(l.funcs, mapper)
}

// helper to make adding a mapping easier when you don't need to capture
// the value
func (l *Lexer) AddMappingNoCapture(pattern *regexp.Regexp, Type string) {
l.AddMapping(pattern, func(code string) (Token, error) {
return Token{Type: Type, Value: code}, nil
l.AddMapping(pattern, func(code string) ([]Token, error) {
return []Token{{Type: Type, Value: code}}, nil
})
}

Expand All @@ -76,8 +76,8 @@ func (l *Lexer) Lex(code string) ([]Token, error) {
// the most characters, and match that token
// with the code
maxLength := 0
f := func(_ string) (Token, error) {
return Token{}, errors.New("no token found")
f := func(_ string) ([]Token, error) {
return []Token{}, errors.New("no token found")
}

// enforce order of precedence (mappings inserted first take precedence)
Expand All @@ -97,24 +97,28 @@ func (l *Lexer) Lex(code string) ([]Token, error) {
}

// now, match the token with the code
token, err := f(code[i : i+maxLength])
if err == nil { // don't add empty tokens
token.startCharacter = i - lineStart
token.endCharacter = i + maxLength - lineStart
token.line = line
tokens = append(tokens, token)
} else {
ts, err := f(code[i : i+maxLength])
if err != nil {
return nil, err
}

// update line info and i
for j := 0; j < maxLength; j++ {
if code[i+j] == '\n' {
line++
lineStart = i + j + 1
curPos := i
for _, t := range ts {
t.startCharacter = curPos - lineStart
t.endCharacter = curPos + len(t.Value) - lineStart
t.line = line
tokens = append(tokens, t)

// update line info and i
for j := 0; j < len(t.Value); j++ {
if code[curPos+j] == '\n' {
line++
lineStart = curPos + j + 1
}
}
curPos += len(t.Value)
}
i += maxLength

}

return tokens, nil
Expand Down
30 changes: 22 additions & 8 deletions server/internal/lang/vlexer.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,21 @@ func NewVLexer(logger *zap.Logger) *VLexer {
vlexer.AddMappingNoCapture(regexp.MustCompile(`^[\r\n]+`), "newline")
// comments
vlexer.AddMappingNoCapture(regexp.MustCompile(`^\/\/.*`), "comment")
vlexer.AddMappingNoCapture(regexp.MustCompile(`(?s)^\/\*.*\*\/`), "comment")
vlexer.AddMapping(regexp.MustCompile(`^\/\*(.*?\n?)*?\*\/`), func(code string) ([]Token, error) {
re := regexp.MustCompile(`(?P<COMMENT>.*\n?)`)
matches := re.FindAllStringSubmatch(code[2:len(code)-2], -1) // remove /* and */
tokens := []Token{}
tokens = append(tokens, Token{Type: "comment", Value: `/*`}) // add the first token
// add all the comments
for _, match := range matches {
if len(match[1]) > 0 {
tokens = append(tokens, Token{Type: "comment", Value: match[re.SubexpIndex("COMMENT")]})
}
}
tokens = append(tokens, Token{Type: "comment", Value: `*/`}) // add the last token

return tokens, nil
})
// keywords
vlexer.AddMappingNoCapture(regexp.MustCompile(`^module`), "module")
vlexer.AddMappingNoCapture(regexp.MustCompile(`^endmodule`), "endmodule")
Expand Down Expand Up @@ -73,22 +87,22 @@ func NewVLexer(logger *zap.Logger) *VLexer {
vlexer.AddMappingNoCapture(regexp.MustCompile(`^((reg)|(wire)|(genvar)|(parameter)|(integer))`), "type")
vlexer.AddMappingNoCapture(regexp.MustCompile(`^((input)|(output)|(inout))`), "direction")
vlexer.AddMappingNoCapture(regexp.MustCompile(`^defparam`), "defparam")
vlexer.AddMapping(regexp.MustCompile("^`?[A-Za-z][a-zA-Z0-9_]*"), func(code string) (Token, error) {
re := regexp.MustCompile("^`?(?P<IDENTIFIER>[A-Za-z][a-zA-Z0-9_]*)")
vlexer.AddMapping(regexp.MustCompile("^`?[A-Za-z][a-zA-Z0-9_]*"), func(code string) ([]Token, error) {
re := regexp.MustCompile("^(?P<IDENTIFIER>`?[A-Za-z][a-zA-Z0-9_]*)")
matches := re.FindStringSubmatch(code)
if len(matches) == 0 {
vlexer.logger.Sugar().Error("failed to parse identifier on ", code)
return Token{}, errors.New("failed to parse identifier")
return []Token{}, errors.New("failed to parse identifier")
}
return Token{Type: "identifier", Value: matches[re.SubexpIndex("IDENTIFIER")]}, nil
return []Token{{Type: "identifier", Value: matches[re.SubexpIndex("IDENTIFIER")]}}, nil
})
vlexer.AddMapping(regexp.MustCompile(`^(([0-9]*\'[hbd][0-9xzXZA-Fa-f]+)|([0-9]+)|(\"[^\n\"]*\"))`), func(code string) (Token, error) {
vlexer.AddMapping(regexp.MustCompile(`^(([0-9]*\'[hbd][0-9xzXZA-Fa-f]+)|([0-9]+)|(\"[^\n\"]*\"))`), func(code string) ([]Token, error) {
re := regexp.MustCompile(`^(?P<LITERAL>(([0-9]*\'[hbd][0-9xzXZA-Fa-f]+)|([0-9]+)|(\"[^\n\"]*\")))`)
matches := re.FindStringSubmatch(code)
if len(matches) == 0 {
return Token{}, errors.New("failed to parse literal" + code)
return []Token{}, errors.New("failed to parse literal" + code)
}
return Token{Type: "literal", Value: matches[re.SubexpIndex("LITERAL")]}, nil
return []Token{{Type: "literal", Value: matches[re.SubexpIndex("LITERAL")]}}, nil
})

return vlexer
Expand Down
5 changes: 1 addition & 4 deletions server/internal/vlsp/definition.go
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,8 @@ func (h Handler) getLocationDetails(fname string, line int, character int) (*Loc
// keep track of which module we're inside
if strings.Contains(lineString, "module") {
tokens, err := lexer.Lex(lineString)
h.state.log.Sugar().Info("lineTOkens: ", tokens)
h.state.log.Sugar().Info("lineTokens: ", tokens)
if err == nil {
h.state.log.Sugar().Info("Ok so far: ")
for i := range tokens {
if tokens[i].Type == "module" {
// new module?
Expand Down Expand Up @@ -69,15 +68,13 @@ func (h Handler) jumpTo(fname string, line int, character int) ([]protocol.Locat
result := []protocol.Location{}
if details.token.Type == "identifier" {
// see if it's a module or definition
h.state.log.Sugar().Info("now looking for", details.token.Value)
location, ok := h.state.symbolMap[details.token.Value]
if ok {
result = append(result, location)
} else {
// otherwise, maybe it's a variable
moduleMap, ok := h.state.variableDefinitions[details.currentModule]
if ok {
h.state.log.Sugar().Info("moduleMap: ", moduleMap)
// look for variable definition
location, ok := moduleMap[details.token.Value]
if ok {
Expand Down
21 changes: 18 additions & 3 deletions server/internal/vlsp/semtokens.go
Original file line number Diff line number Diff line change
Expand Up @@ -31,11 +31,12 @@ func GetSemanticTokensOptions() SemanticTokensOptions {
protocol.SemanticTokenType, // 0
protocol.SemanticTokenComment, // 1
protocol.SemanticTokenNumber, // 2
protocol.SemanticTokenMacro, // 3
protocol.SemanticTokenKeyword, // 3
protocol.SemanticTokenVariable, // 4
protocol.SemanticTokenClass, // 5
protocol.SemanticTokenParameter, // 6
protocol.SemanticTokenFunction, // 7
protocol.SemanticTokenMacro, // 8
},
TokenModifiers: []protocol.SemanticTokenModifiers{},
},
Expand All @@ -44,7 +45,7 @@ func GetSemanticTokensOptions() SemanticTokensOptions {
}
}

func Encode(tokens []lang.Token) []uint32 {
func (h Handler) Encode(tokens []lang.Token) []uint32 {
result := []uint32{}
prevLine := 0
prevCharacter := 0
Expand Down Expand Up @@ -87,6 +88,14 @@ func Encode(tokens []lang.Token) []uint32 {
"signed": 7,
"dollar": 7,
"pound": 7,
// 8 is reserved for defined identifiers
}
// flattened defines
flattenedDefines := map[string]bool{}
for _, defines := range h.state.defines {
for _, def := range defines {
flattenedDefines["`"+def.Identifier.Value] = true
}
}

addToken := func(token lang.Token) {
Expand All @@ -98,6 +107,12 @@ func Encode(tokens []lang.Token) []uint32 {
prevCharacter = 0
}

// special case for defined identifiers
_, ok := flattenedDefines[token.Value]
if token.Type == "identifier" && ok {
val = 8
}

// add into result
result = append(result, uint32(token.Line()-prevLine), uint32(token.StartCharacter()-prevCharacter), uint32(len(token.Value)), val, 0)

Expand Down Expand Up @@ -177,7 +192,7 @@ func (h Handler) SemanticTokensFull(ctx context.Context, params *protocol.Semant

// encode
result := &protocol.SemanticTokens{
Data: Encode(tokens),
Data: h.Encode(tokens),
}
h.state.log.Sugar().Info("SemanticTokensFull result: ", result)

Expand Down
3 changes: 2 additions & 1 deletion server/internal/vlsp/symbols.go
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,8 @@ func (h Handler) GetSymbolsForFile(fname string, firstTime bool) {
}
}
for _, define := range h.state.defines[fname] {
h.state.symbolMap[define.Identifier.Value] = protocol.Location{
// explicitly add the backticks for defines
h.state.symbolMap["`"+define.Identifier.Value] = protocol.Location{
URI: protocol.DocumentURI(PathToURI(fname)),
Range: protocol.Range{
Start: protocol.Position{Line: uint32(define.Identifier.Line()), Character: uint32(define.Identifier.StartCharacter())},
Expand Down
Loading