mirror of
				https://github.com/go-gitea/gitea.git
				synced 2025-10-29 10:57:44 +09:00 
			
		
		
		
	* Add Dependencie Update Script * update gitea.com/lunny/levelqueue * update github.com/PuerkitoBio/goquery * update github.com/alecthomas/chroma * update github.com/blevesearch/bleve/v2 * update github.com/caddyserver/certmagic * update github.com/go-enry/go-enry/v2 * update github.com/go-redis/redis/v8 * update github.com/hashicorp/golang-lru * update github.com/klauspost/compress * update github.com/markbates/goth * update github.com/mholt/archiver/v3 * update github.com/microcosm-cc/bluemonday * update github.com/minio/minio-go/v7 * update github.com/olivere/elastic/v7 * update github.com/xanzy/go-gitlab * update github.com/yuin/goldmark
		
			
				
	
	
		
			192 lines
		
	
	
		
			4.5 KiB
		
	
	
	
		
			Go
		
	
	
	
		
			Vendored
		
	
	
	
			
		
		
	
	
			192 lines
		
	
	
		
			4.5 KiB
		
	
	
	
		
			Go
		
	
	
	
		
			Vendored
		
	
	
	
| // Package internal contains common API functions and structures shared between lexer packages.
 | |
| package internal
 | |
| 
 | |
| import (
 | |
| 	"path/filepath"
 | |
| 	"sort"
 | |
| 	"strings"
 | |
| 
 | |
| 	"github.com/danwakefield/fnmatch"
 | |
| 
 | |
| 	"github.com/alecthomas/chroma"
 | |
| )
 | |
| 
 | |
| var (
 | |
| 	ignoredSuffixes = [...]string{
 | |
| 		// Editor backups
 | |
| 		"~", ".bak", ".old", ".orig",
 | |
| 		// Debian and derivatives apt/dpkg backups
 | |
| 		".dpkg-dist", ".dpkg-old",
 | |
| 		// Red Hat and derivatives rpm backups
 | |
| 		".rpmnew", ".rpmorig", ".rpmsave",
 | |
| 		// Build system input/template files
 | |
| 		".in",
 | |
| 	}
 | |
| )
 | |
| 
 | |
| // Registry of Lexers.
 | |
| var Registry = struct {
 | |
| 	Lexers  chroma.Lexers
 | |
| 	byName  map[string]chroma.Lexer
 | |
| 	byAlias map[string]chroma.Lexer
 | |
| }{
 | |
| 	byName:  map[string]chroma.Lexer{},
 | |
| 	byAlias: map[string]chroma.Lexer{},
 | |
| }
 | |
| 
 | |
| // Names of all lexers, optionally including aliases.
 | |
| func Names(withAliases bool) []string {
 | |
| 	out := []string{}
 | |
| 	for _, lexer := range Registry.Lexers {
 | |
| 		config := lexer.Config()
 | |
| 		out = append(out, config.Name)
 | |
| 		if withAliases {
 | |
| 			out = append(out, config.Aliases...)
 | |
| 		}
 | |
| 	}
 | |
| 	sort.Strings(out)
 | |
| 	return out
 | |
| }
 | |
| 
 | |
| // Get a Lexer by name, alias or file extension.
 | |
| func Get(name string) chroma.Lexer {
 | |
| 	if lexer := Registry.byName[name]; lexer != nil {
 | |
| 		return lexer
 | |
| 	}
 | |
| 	if lexer := Registry.byAlias[name]; lexer != nil {
 | |
| 		return lexer
 | |
| 	}
 | |
| 	if lexer := Registry.byName[strings.ToLower(name)]; lexer != nil {
 | |
| 		return lexer
 | |
| 	}
 | |
| 	if lexer := Registry.byAlias[strings.ToLower(name)]; lexer != nil {
 | |
| 		return lexer
 | |
| 	}
 | |
| 
 | |
| 	candidates := chroma.PrioritisedLexers{}
 | |
| 	// Try file extension.
 | |
| 	if lexer := Match("filename." + name); lexer != nil {
 | |
| 		candidates = append(candidates, lexer)
 | |
| 	}
 | |
| 	// Try exact filename.
 | |
| 	if lexer := Match(name); lexer != nil {
 | |
| 		candidates = append(candidates, lexer)
 | |
| 	}
 | |
| 	if len(candidates) == 0 {
 | |
| 		return nil
 | |
| 	}
 | |
| 	sort.Sort(candidates)
 | |
| 	return candidates[0]
 | |
| }
 | |
| 
 | |
| // MatchMimeType attempts to find a lexer for the given MIME type.
 | |
| func MatchMimeType(mimeType string) chroma.Lexer {
 | |
| 	matched := chroma.PrioritisedLexers{}
 | |
| 	for _, l := range Registry.Lexers {
 | |
| 		for _, lmt := range l.Config().MimeTypes {
 | |
| 			if mimeType == lmt {
 | |
| 				matched = append(matched, l)
 | |
| 			}
 | |
| 		}
 | |
| 	}
 | |
| 	if len(matched) != 0 {
 | |
| 		sort.Sort(matched)
 | |
| 		return matched[0]
 | |
| 	}
 | |
| 	return nil
 | |
| }
 | |
| 
 | |
| // Match returns the first lexer matching filename.
 | |
| func Match(filename string) chroma.Lexer {
 | |
| 	filename = filepath.Base(filename)
 | |
| 	matched := chroma.PrioritisedLexers{}
 | |
| 	// First, try primary filename matches.
 | |
| 	for _, lexer := range Registry.Lexers {
 | |
| 		config := lexer.Config()
 | |
| 		for _, glob := range config.Filenames {
 | |
| 			if fnmatch.Match(glob, filename, 0) {
 | |
| 				matched = append(matched, lexer)
 | |
| 			} else {
 | |
| 				for _, suf := range &ignoredSuffixes {
 | |
| 					if fnmatch.Match(glob+suf, filename, 0) {
 | |
| 						matched = append(matched, lexer)
 | |
| 						break
 | |
| 					}
 | |
| 				}
 | |
| 			}
 | |
| 		}
 | |
| 	}
 | |
| 	if len(matched) > 0 {
 | |
| 		sort.Sort(matched)
 | |
| 		return matched[0]
 | |
| 	}
 | |
| 	matched = nil
 | |
| 	// Next, try filename aliases.
 | |
| 	for _, lexer := range Registry.Lexers {
 | |
| 		config := lexer.Config()
 | |
| 		for _, glob := range config.AliasFilenames {
 | |
| 			if fnmatch.Match(glob, filename, 0) {
 | |
| 				matched = append(matched, lexer)
 | |
| 			} else {
 | |
| 				for _, suf := range &ignoredSuffixes {
 | |
| 					if fnmatch.Match(glob+suf, filename, 0) {
 | |
| 						matched = append(matched, lexer)
 | |
| 						break
 | |
| 					}
 | |
| 				}
 | |
| 			}
 | |
| 		}
 | |
| 	}
 | |
| 	if len(matched) > 0 {
 | |
| 		sort.Sort(matched)
 | |
| 		return matched[0]
 | |
| 	}
 | |
| 	return nil
 | |
| }
 | |
| 
 | |
| // Analyse text content and return the "best" lexer..
 | |
| func Analyse(text string) chroma.Lexer {
 | |
| 	var picked chroma.Lexer
 | |
| 	highest := float32(0.0)
 | |
| 	for _, lexer := range Registry.Lexers {
 | |
| 		if analyser, ok := lexer.(chroma.Analyser); ok {
 | |
| 			weight := analyser.AnalyseText(text)
 | |
| 			if weight > highest {
 | |
| 				picked = lexer
 | |
| 				highest = weight
 | |
| 			}
 | |
| 		}
 | |
| 	}
 | |
| 	return picked
 | |
| }
 | |
| 
 | |
| // Register a Lexer with the global registry.
 | |
| func Register(lexer chroma.Lexer) chroma.Lexer {
 | |
| 	config := lexer.Config()
 | |
| 	Registry.byName[config.Name] = lexer
 | |
| 	Registry.byName[strings.ToLower(config.Name)] = lexer
 | |
| 	for _, alias := range config.Aliases {
 | |
| 		Registry.byAlias[alias] = lexer
 | |
| 		Registry.byAlias[strings.ToLower(alias)] = lexer
 | |
| 	}
 | |
| 	Registry.Lexers = append(Registry.Lexers, lexer)
 | |
| 	return lexer
 | |
| }
 | |
| 
 | |
| // PlaintextRules is used for the fallback lexer as well as the explicit
 | |
| // plaintext lexer.
 | |
| func PlaintextRules() chroma.Rules {
 | |
| 	return chroma.Rules{
 | |
| 		"root": []chroma.Rule{
 | |
| 			{`.+`, chroma.Text, nil},
 | |
| 			{`\n`, chroma.Text, nil},
 | |
| 		},
 | |
| 	}
 | |
| }
 | |
| 
 | |
| // Fallback lexer if no other is found.
 | |
| var Fallback chroma.Lexer = chroma.MustNewLazyLexer(&chroma.Config{
 | |
| 	Name:      "fallback",
 | |
| 	Filenames: []string{"*"},
 | |
| }, PlaintextRules)
 |