performance optimization test
All checks were successful
release-tag / release-image (push) Successful in 1m49s

This commit is contained in:
2025-11-10 00:49:50 +01:00
parent 068f13b73f
commit 2a07586d94

101
main.go
View File

@@ -728,7 +728,7 @@ func importCategory(st Store, cat, url string) (int, error) {
return count, sc.Err()
}
func startImporter(st Store, cats map[string]string) {
func startImporter(st Store, cats map[string]string, srv *server) {
go func() {
for {
start := time.Now()
@@ -752,6 +752,7 @@ func startImporter(st Store, cats map[string]string) {
}()
}
wg.Wait()
srv.rebuildIndex()
atomic.AddUint64(&metrics.importerCycles, 1)
atomic.StoreInt64(&metrics.importerLastSuccess, time.Now().Unix())
log.Printf("Import cycle finished in %s", time.Since(start))
@@ -764,10 +765,74 @@ func startImporter(st Store, cats map[string]string) {
// HTTP layer
// ──────────────────────────────────────────────────────────────────────────────
type ipIndex struct {
v4Lens []int // vorhandene Präfixlängen (z. B. 8, 16, 24, 32)
v6Lens []int
v4 map[int]map[string][]string // len -> "a.b.c.d/len" -> []categories
v6 map[int]map[string][]string // len -> "xxxx::/len" -> []categories
}
type server struct {
st Store
catalog map[string]string // latest catalog for category list
mu sync.RWMutex
idx atomic.Value
}
func (s *server) rebuildIndex() {
idx := &ipIndex{v4: map[int]map[string][]string{}, v6: map[int]map[string][]string{}}
lens4 := map[int]struct{}{}
lens6 := map[int]struct{}{}
cats := s.cats()
for _, c := range cats {
pfxs, _ := s.st.ListPrefixes(c)
for _, p := range pfxs {
m := idx.v6
lens := lens6
if p.Addr().Is4() {
m = idx.v4
lens = lens4
}
l := int(p.Bits())
if m[l] == nil {
m[l] = map[string][]string{}
}
k := p.String()
m[l][k] = append(m[l][k], c)
lens[l] = struct{}{}
}
}
// manuelle Blacklist mit aufnehmen (als Kategorie "manual-blacklist")
if bl, err := s.st.ListBlacklist(); err == nil {
for p := range bl {
m := idx.v6
lens := lens6
if p.Addr().Is4() {
m = idx.v4
lens = lens4
}
l := int(p.Bits())
if m[l] == nil {
m[l] = map[string][]string{}
}
k := p.String()
m[l][k] = append(m[l][k], "manual-blacklist")
lens[l] = struct{}{}
}
}
// Längen sortiert ablegen (damit Lookup nur vorhandene prüft)
for l := range lens4 {
idx.v4Lens = append(idx.v4Lens, l)
}
for l := range lens6 {
idx.v6Lens = append(idx.v6Lens, l)
}
sort.Ints(idx.v4Lens)
sort.Ints(idx.v6Lens)
s.idx.Store(idx)
}
func (s *server) cats() []string {
@@ -1310,32 +1375,30 @@ func (s *server) handleBlacklistList(w http.ResponseWriter, r *http.Request) {
writeJSON(w, map[string]any{"entries": list})
}
func (s *server) checkIP(ip netip.Addr, cats []string) ([]string, error) {
// 1) Whitelist gewinnt
func (s *server) checkIP(ip netip.Addr, _ []string) ([]string, error) {
if ok, _ := s.st.IsWhitelisted(ip); ok {
return nil, nil
}
// 2) Manuelle Blacklist
bl, _ := s.st.ListBlacklist()
for pfx := range bl {
if pfx.Contains(ip) {
// "manual-blacklist" als erste Kategorie markieren
return []string{"manual-blacklist"}, nil
}
idx := s.idx.Load().(*ipIndex)
var lens []int
var m map[int]map[string][]string
if ip.Is4() {
lens, m = idx.v4Lens, idx.v4
} else {
lens, m = idx.v6Lens, idx.v6
}
// 3) Import-Kategorien
seen := map[string]struct{}{}
var matches []string
for _, l := range lens {
p := netip.PrefixFrom(ip, l).String()
if cats, ok := m[l][p]; ok {
for _, c := range cats {
pfxs, err := s.st.ListPrefixes(c)
if err != nil {
return nil, err
}
for _, p := range pfxs {
if p.Contains(ip) {
if _, dup := seen[c]; !dup {
seen[c] = struct{}{}
matches = append(matches, c)
break
}
}
}
}
@@ -1428,7 +1491,7 @@ func main() {
}
}
srv := &server{st: st, catalog: cat}
startImporter(st, cat)
startImporter(st, cat, srv)
// honeypot listeners
if enableHoneypot {