package lexers
import (
"embed"
"io/fs"
"github.com/alecthomas/chroma/v2"
)//go:embed embeddedvar embedded embed.FS
// GlobalLexerRegistry is the global LexerRegistry of Lexers.var GlobalLexerRegistry = func() *chroma.LexerRegistry {
reg := chroma.NewLexerRegistry()
// index(reg)
paths, err := fs.Glob(embedded, "embedded/*.xml")
if err != nil {
panic(err)
}for _, path := range paths {
reg.Register(chroma.MustNewXMLLexer(embedded, path))
}return reg
}()// Names of all lexers, optionally including aliases.func Names(withAliases bool) []string {
return GlobalLexerRegistry.Names(withAliases)
}// Get a Lexer by name, alias or file extension.//// Note that this if there isn't an exact match on name or alias, this will// call Match(), so it is not efficient.func Get(name string) chroma.Lexer {
return GlobalLexerRegistry.Get(name)
}// MatchMimeType attempts to find a lexer for the given MIME type.func MatchMimeType(mimeType string) chroma.Lexer {
return GlobalLexerRegistry.MatchMimeType(mimeType)
}// Match returns the first lexer matching filename.//// Note that this iterates over all file patterns in all lexers, so it's not// particularly efficient.func Match(filename string) chroma.Lexer {
return GlobalLexerRegistry.Match(filename)
}// Register a Lexer with the global registry.func Register(lexer chroma.Lexer) chroma.Lexer {
return GlobalLexerRegistry.Register(lexer)
}// Analyse text content and return the "best" lexer..func Analyse(text string) chroma.Lexer {
return GlobalLexerRegistry.Analyse(text)
}// PlaintextRules is used for the fallback lexer as well as the explicit// plaintext lexer.func PlaintextRules() chroma.Rules {
return chroma.Rules{
"root": []chroma.Rule{
{`.+`, chroma.Text, nil},
{`\n`, chroma.Text, nil},
},}}// Fallback lexer if no other is found.var Fallback chroma.Lexer = chroma.MustNewLexer(&chroma.Config{
Name: "fallback",
Filenames: []string{"*"},
Priority: -1,
}, PlaintextRules)