1. package chroma
    
  2. 
    
  3. import (
    
  4. 	"testing"
    
  5. 
    
  6. 	assert "github.com/alecthomas/assert/v2"
    
  7. )
    
  8. 
    
  9. func TestTokenTypeClassifiers(t *testing.T) {
    
  10. 	assert.True(t, GenericDeleted.InCategory(Generic))
    
  11. 	assert.True(t, LiteralStringBacktick.InSubCategory(String))
    
  12. 	assert.Equal(t, LiteralStringBacktick.String(), "LiteralStringBacktick")
    
  13. }
    
  14. 
    
  15. func TestSimpleLexer(t *testing.T) {
    
  16. 	lexer := mustNewLexer(t, &Config{
    
  17. 		Name:      "INI",
    
  18. 		Aliases:   []string{"ini", "cfg"},
    
  19. 		Filenames: []string{"*.ini", "*.cfg"},
    
  20. 	}, map[string][]Rule{
    
  21. 		"root": {
    
  22. 			{`\s+`, Whitespace, nil},
    
  23. 			{`;.*?$`, Comment, nil},
    
  24. 			{`\[.*?\]$`, Keyword, nil},
    
  25. 			{`(.*?)(\s*)(=)(\s*)(.*?)$`, ByGroups(Name, Whitespace, Operator, Whitespace, String), nil},
    
  26. 		},
    
  27. 	})
    
  28. 	actual, err := Tokenise(lexer, nil, `
    
  29. 	; this is a comment
    
  30. 	[section]
    
  31. 	a = 10
    
  32. `)
    
  33. 	assert.NoError(t, err)
    
  34. 	expected := []Token{
    
  35. 		{Whitespace, "\n\t"},
    
  36. 		{Comment, "; this is a comment"},
    
  37. 		{Whitespace, "\n\t"},
    
  38. 		{Keyword, "[section]"},
    
  39. 		{Whitespace, "\n\t"},
    
  40. 		{Name, "a"},
    
  41. 		{Whitespace, " "},
    
  42. 		{Operator, "="},
    
  43. 		{Whitespace, " "},
    
  44. 		{LiteralString, "10"},
    
  45. 		{Whitespace, "\n"},
    
  46. 	}
    
  47. 	assert.Equal(t, expected, actual)
    
  48. }