feat: backend core - auth, user, role, permission, device, webhook, monitoring, cache, repository, service, middleware, API handlers
This commit is contained in:
232
internal/util/logredact/redact.go
Normal file
232
internal/util/logredact/redact.go
Normal file
@@ -0,0 +1,232 @@
|
||||
package logredact
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
)
|
||||
|
||||
// maxRedactDepth 限制递归深度以防止栈溢出
|
||||
const maxRedactDepth = 32
|
||||
|
||||
var defaultSensitiveKeys = map[string]struct{}{
|
||||
"authorization_code": {},
|
||||
"code": {},
|
||||
"code_verifier": {},
|
||||
"access_token": {},
|
||||
"refresh_token": {},
|
||||
"id_token": {},
|
||||
"client_secret": {},
|
||||
"password": {},
|
||||
}
|
||||
|
||||
var defaultSensitiveKeyList = []string{
|
||||
"authorization_code",
|
||||
"code",
|
||||
"code_verifier",
|
||||
"access_token",
|
||||
"refresh_token",
|
||||
"id_token",
|
||||
"client_secret",
|
||||
"password",
|
||||
}
|
||||
|
||||
type textRedactPatterns struct {
|
||||
reJSONLike *regexp.Regexp
|
||||
reQueryLike *regexp.Regexp
|
||||
rePlain *regexp.Regexp
|
||||
}
|
||||
|
||||
var (
|
||||
reGOCSPX = regexp.MustCompile(`GOCSPX-[0-9A-Za-z_-]{24,}`)
|
||||
reAIza = regexp.MustCompile(`AIza[0-9A-Za-z_-]{35}`)
|
||||
|
||||
defaultTextRedactPatterns = compileTextRedactPatterns(nil)
|
||||
extraTextPatternCache sync.Map // map[string]*textRedactPatterns
|
||||
)
|
||||
|
||||
func RedactMap(input map[string]any, extraKeys ...string) map[string]any {
|
||||
if input == nil {
|
||||
return map[string]any{}
|
||||
}
|
||||
keys := buildKeySet(extraKeys)
|
||||
redacted, ok := redactValueWithDepth(input, keys, 0).(map[string]any)
|
||||
if !ok {
|
||||
return map[string]any{}
|
||||
}
|
||||
return redacted
|
||||
}
|
||||
|
||||
func RedactJSON(raw []byte, extraKeys ...string) string {
|
||||
if len(raw) == 0 {
|
||||
return ""
|
||||
}
|
||||
var value any
|
||||
if err := json.Unmarshal(raw, &value); err != nil {
|
||||
return "<non-json payload redacted>"
|
||||
}
|
||||
keys := buildKeySet(extraKeys)
|
||||
redacted := redactValueWithDepth(value, keys, 0)
|
||||
encoded, err := json.Marshal(redacted)
|
||||
if err != nil {
|
||||
return "<redacted>"
|
||||
}
|
||||
return string(encoded)
|
||||
}
|
||||
|
||||
// RedactText 对非结构化文本做轻量脱敏。
|
||||
//
|
||||
// 规则:
|
||||
// - 如果文本本身是 JSON,则按 RedactJSON 处理。
|
||||
// - 否则尝试对常见 key=value / key:"value" 片段做脱敏。
|
||||
//
|
||||
// 注意:该函数用于日志/错误信息兜底,不保证覆盖所有格式。
|
||||
func RedactText(input string, extraKeys ...string) string {
|
||||
input = strings.TrimSpace(input)
|
||||
if input == "" {
|
||||
return ""
|
||||
}
|
||||
|
||||
raw := []byte(input)
|
||||
if json.Valid(raw) {
|
||||
return RedactJSON(raw, extraKeys...)
|
||||
}
|
||||
|
||||
patterns := getTextRedactPatterns(extraKeys)
|
||||
|
||||
out := input
|
||||
out = reGOCSPX.ReplaceAllString(out, "GOCSPX-***")
|
||||
out = reAIza.ReplaceAllString(out, "AIza***")
|
||||
out = patterns.reJSONLike.ReplaceAllString(out, `$1***$3`)
|
||||
out = patterns.reQueryLike.ReplaceAllString(out, `$1=***`)
|
||||
out = patterns.rePlain.ReplaceAllString(out, `$1$2***`)
|
||||
return out
|
||||
}
|
||||
|
||||
func compileTextRedactPatterns(extraKeys []string) *textRedactPatterns {
|
||||
keyAlt := buildKeyAlternation(extraKeys)
|
||||
return &textRedactPatterns{
|
||||
// JSON-like: "access_token":"..."
|
||||
reJSONLike: regexp.MustCompile(`(?i)("(?:` + keyAlt + `)"\s*:\s*")([^"]*)(")`),
|
||||
// Query-like: access_token=...
|
||||
reQueryLike: regexp.MustCompile(`(?i)\b((?:` + keyAlt + `))=([^&\s]+)`),
|
||||
// Plain: access_token: ... / access_token = ...
|
||||
rePlain: regexp.MustCompile(`(?i)\b((?:` + keyAlt + `))\b(\s*[:=]\s*)([^,\s]+)`),
|
||||
}
|
||||
}
|
||||
|
||||
func getTextRedactPatterns(extraKeys []string) *textRedactPatterns {
|
||||
normalizedExtraKeys := normalizeAndSortExtraKeys(extraKeys)
|
||||
if len(normalizedExtraKeys) == 0 {
|
||||
return defaultTextRedactPatterns
|
||||
}
|
||||
|
||||
cacheKey := strings.Join(normalizedExtraKeys, ",")
|
||||
if cached, ok := extraTextPatternCache.Load(cacheKey); ok {
|
||||
if patterns, ok := cached.(*textRedactPatterns); ok {
|
||||
return patterns
|
||||
}
|
||||
}
|
||||
|
||||
compiled := compileTextRedactPatterns(normalizedExtraKeys)
|
||||
actual, _ := extraTextPatternCache.LoadOrStore(cacheKey, compiled)
|
||||
if patterns, ok := actual.(*textRedactPatterns); ok {
|
||||
return patterns
|
||||
}
|
||||
return compiled
|
||||
}
|
||||
|
||||
func normalizeAndSortExtraKeys(extraKeys []string) []string {
|
||||
if len(extraKeys) == 0 {
|
||||
return nil
|
||||
}
|
||||
seen := make(map[string]struct{}, len(extraKeys))
|
||||
keys := make([]string, 0, len(extraKeys))
|
||||
for _, key := range extraKeys {
|
||||
normalized := normalizeKey(key)
|
||||
if normalized == "" {
|
||||
continue
|
||||
}
|
||||
if _, ok := seen[normalized]; ok {
|
||||
continue
|
||||
}
|
||||
seen[normalized] = struct{}{}
|
||||
keys = append(keys, normalized)
|
||||
}
|
||||
sort.Strings(keys)
|
||||
return keys
|
||||
}
|
||||
|
||||
func buildKeyAlternation(extraKeys []string) string {
|
||||
seen := make(map[string]struct{}, len(defaultSensitiveKeyList)+len(extraKeys))
|
||||
keys := make([]string, 0, len(defaultSensitiveKeyList)+len(extraKeys))
|
||||
for _, k := range defaultSensitiveKeyList {
|
||||
seen[k] = struct{}{}
|
||||
keys = append(keys, regexp.QuoteMeta(k))
|
||||
}
|
||||
for _, k := range extraKeys {
|
||||
n := normalizeKey(k)
|
||||
if n == "" {
|
||||
continue
|
||||
}
|
||||
if _, ok := seen[n]; ok {
|
||||
continue
|
||||
}
|
||||
seen[n] = struct{}{}
|
||||
keys = append(keys, regexp.QuoteMeta(n))
|
||||
}
|
||||
return strings.Join(keys, "|")
|
||||
}
|
||||
|
||||
func buildKeySet(extraKeys []string) map[string]struct{} {
|
||||
keys := make(map[string]struct{}, len(defaultSensitiveKeys)+len(extraKeys))
|
||||
for k := range defaultSensitiveKeys {
|
||||
keys[k] = struct{}{}
|
||||
}
|
||||
for _, key := range extraKeys {
|
||||
normalized := normalizeKey(key)
|
||||
if normalized == "" {
|
||||
continue
|
||||
}
|
||||
keys[normalized] = struct{}{}
|
||||
}
|
||||
return keys
|
||||
}
|
||||
|
||||
func redactValueWithDepth(value any, keys map[string]struct{}, depth int) any {
|
||||
if depth > maxRedactDepth {
|
||||
return "<depth limit exceeded>"
|
||||
}
|
||||
|
||||
switch v := value.(type) {
|
||||
case map[string]any:
|
||||
out := make(map[string]any, len(v))
|
||||
for k, val := range v {
|
||||
if isSensitiveKey(k, keys) {
|
||||
out[k] = "***"
|
||||
continue
|
||||
}
|
||||
out[k] = redactValueWithDepth(val, keys, depth+1)
|
||||
}
|
||||
return out
|
||||
case []any:
|
||||
out := make([]any, len(v))
|
||||
for i, item := range v {
|
||||
out[i] = redactValueWithDepth(item, keys, depth+1)
|
||||
}
|
||||
return out
|
||||
default:
|
||||
return value
|
||||
}
|
||||
}
|
||||
|
||||
func isSensitiveKey(key string, keys map[string]struct{}) bool {
|
||||
_, ok := keys[normalizeKey(key)]
|
||||
return ok
|
||||
}
|
||||
|
||||
func normalizeKey(key string) string {
|
||||
return strings.ToLower(strings.TrimSpace(key))
|
||||
}
|
||||
84
internal/util/logredact/redact_test.go
Normal file
84
internal/util/logredact/redact_test.go
Normal file
@@ -0,0 +1,84 @@
|
||||
package logredact
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestRedactText_JSONLike(t *testing.T) {
|
||||
in := `{"access_token":"ya29.a0AfH6SMDUMMY","refresh_token":"1//0gDUMMY","other":"ok"}`
|
||||
out := RedactText(in)
|
||||
if out == in {
|
||||
t.Fatalf("expected redaction, got unchanged")
|
||||
}
|
||||
if want := `"access_token":"***"`; !strings.Contains(out, want) {
|
||||
t.Fatalf("expected %q in %q", want, out)
|
||||
}
|
||||
if want := `"refresh_token":"***"`; !strings.Contains(out, want) {
|
||||
t.Fatalf("expected %q in %q", want, out)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRedactText_QueryLike(t *testing.T) {
|
||||
in := "access_token=ya29.a0AfH6SMDUMMY refresh_token=1//0gDUMMY"
|
||||
out := RedactText(in)
|
||||
if strings.Contains(out, "ya29") || strings.Contains(out, "1//0") {
|
||||
t.Fatalf("expected tokens redacted, got %q", out)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRedactText_GOCSPX(t *testing.T) {
|
||||
in := "client_secret=GOCSPX-your-client-secret"
|
||||
out := RedactText(in)
|
||||
if strings.Contains(out, "your-client-secret") {
|
||||
t.Fatalf("expected secret redacted, got %q", out)
|
||||
}
|
||||
if !strings.Contains(out, "client_secret=***") {
|
||||
t.Fatalf("expected key redacted, got %q", out)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRedactText_ExtraKeyCacheUsesNormalizedSortedKey(t *testing.T) {
|
||||
clearExtraTextPatternCache()
|
||||
|
||||
out1 := RedactText("custom_secret=abc", "Custom_Secret", " custom_secret ")
|
||||
out2 := RedactText("custom_secret=xyz", "custom_secret")
|
||||
if !strings.Contains(out1, "custom_secret=***") {
|
||||
t.Fatalf("expected custom key redacted in first call, got %q", out1)
|
||||
}
|
||||
if !strings.Contains(out2, "custom_secret=***") {
|
||||
t.Fatalf("expected custom key redacted in second call, got %q", out2)
|
||||
}
|
||||
|
||||
if got := countExtraTextPatternCacheEntries(); got != 1 {
|
||||
t.Fatalf("expected 1 cached pattern set, got %d", got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRedactText_DefaultPathDoesNotUseExtraCache(t *testing.T) {
|
||||
clearExtraTextPatternCache()
|
||||
|
||||
out := RedactText("access_token=abc")
|
||||
if !strings.Contains(out, "access_token=***") {
|
||||
t.Fatalf("expected default key redacted, got %q", out)
|
||||
}
|
||||
if got := countExtraTextPatternCacheEntries(); got != 0 {
|
||||
t.Fatalf("expected extra cache to remain empty, got %d", got)
|
||||
}
|
||||
}
|
||||
|
||||
func clearExtraTextPatternCache() {
|
||||
extraTextPatternCache.Range(func(key, value any) bool {
|
||||
extraTextPatternCache.Delete(key)
|
||||
return true
|
||||
})
|
||||
}
|
||||
|
||||
func countExtraTextPatternCacheEntries() int {
|
||||
count := 0
|
||||
extraTextPatternCache.Range(func(key, value any) bool {
|
||||
count++
|
||||
return true
|
||||
})
|
||||
return count
|
||||
}
|
||||
Reference in New Issue
Block a user