diff --git a/AGENTS.md b/AGENTS.md
index 9050f1c..552c921 100644
--- a/AGENTS.md
+++ b/AGENTS.md
@@ -33,6 +33,7 @@ mise exec -- npx playwright test e2e/datetime.spec.mjs
mise exec -- npx playwright test e2e/levelless.spec.mjs
mise exec -- npx playwright test e2e/copy.spec.mjs
mise exec -- npx playwright test e2e/ui-prefs.spec.mjs
+mise exec -- npx playwright test e2e/lucene-query.spec.mjs
# Manual test log generation
mise exec -- node e2e/loggen.mjs --count 200
@@ -72,6 +73,7 @@ e2e/datetime.spec.mjs Datetime range picker UI and API integration
e2e/levelless.spec.mjs Levelless log entries rendering and filtering
e2e/copy.spec.mjs Row copy button and field-value click-to-filter
e2e/ui-prefs.spec.mjs Persistent UI preferences (columns, widths, time preset, reset)
+e2e/lucene-query.spec.mjs Lucene query features: existence (field:*), regex, FTS, +/-, wildcard
e2e/screenshot.mjs Screenshot generator with realistic data
e2e/loggen.mjs Manual test-data log generator (json/logfmt/mixed)
.github/workflows/ci-build-test.yml CI pipeline (build, vet, unit tests, E2E tests)
diff --git a/e2e/helpers.mjs b/e2e/helpers.mjs
index 6328566..bb99e20 100644
--- a/e2e/helpers.mjs
+++ b/e2e/helpers.mjs
@@ -19,6 +19,7 @@ const FILE_PORT_OFFSETS = Object.freeze({
'datetime.spec.mjs': 0,
'field-filter-append.spec.mjs': 1,
'levelless.spec.mjs': 2,
+ 'lucene-query.spec.mjs': 10,
'resize.spec.mjs': 3,
'search-caret.spec.mjs': 4,
'search.spec.mjs': 5,
diff --git a/e2e/lucene-query.spec.mjs b/e2e/lucene-query.spec.mjs
new file mode 100644
index 0000000..04e54ca
--- /dev/null
+++ b/e2e/lucene-query.spec.mjs
@@ -0,0 +1,163 @@
+/**
+ * lucene-query.spec.mjs — Lucene-style query feature tests.
+ *
+ * Covers: field existence (field:*), regex (field:/regex/), FTS (bare keyword
+ * and quoted phrase), required/prohibited (+/-), and wildcard queries.
+ */
+
+import { test, expect } from '@playwright/test';
+import { portForTestFile, startServer, stopServer, postJSON } from './helpers.mjs';
+
+let server;
+let baseURL;
+
+/** Logs with varied fields and messages for query testing. */
+const TEST_LINES = [
+ // logs with request_id
+ JSON.stringify({ level: 'INFO', msg: 'connection timeout', time: '2026-02-18T10:01:00Z', service: 'api-gateway', request_id: 'req-001' }),
+ JSON.stringify({ level: 'INFO', msg: 'connection refused', time: '2026-02-18T10:02:00Z', service: 'api-edge', request_id: 'req-002' }),
+ // logs without request_id
+ JSON.stringify({ level: 'WARN', msg: 'all good', time: '2026-02-18T10:03:00Z', service: 'auth-service' }),
+ JSON.stringify({ level: 'ERROR', msg: 'internal error', time: '2026-02-18T10:04:00Z', service: 'auth-service' }),
+ JSON.stringify({ level: 'ERROR', msg: 'gateway error', time: '2026-02-18T10:05:00Z', service: 'api-gateway', request_id: 'req-005' }),
+ JSON.stringify({ level: 'DEBUG', msg: 'debug trace', time: '2026-02-18T10:06:00Z', service: 'api-edge' }),
+];
+
+test.describe('lucene-query', () => {
+ test.beforeAll(async ({}, workerInfo) => {
+ const port = portForTestFile(workerInfo);
+ server = await startServer(port, { lines: TEST_LINES });
+ baseURL = `http://localhost:${port}`;
+ });
+
+ test.afterAll(async () => {
+ await stopServer(server);
+ });
+
+ test('field existence: request_id:* returns only logs with that field', async ({ page }) => {
+ await page.goto(baseURL);
+ const result = await postJSON(page, '/query', { query: 'request_id:*', limit: 100 });
+ expect(result.status).toBe(200);
+ const logs = result.body.logs;
+ expect(logs.length).toBeGreaterThan(0);
+ // Every returned log must have request_id
+ for (const log of logs) {
+ expect(log.fields).toHaveProperty('request_id');
+ }
+ // Logs without request_id must not appear
+ const withoutRequestId = logs.filter((l) => !l.fields?.request_id);
+ expect(withoutRequestId).toHaveLength(0);
+ });
+
+ test('field existence: non-existent field returns no results', async ({ page }) => {
+ await page.goto(baseURL);
+ const result = await postJSON(page, '/query', { query: 'nonexistent_field:*', limit: 100 });
+ expect(result.status).toBe(200);
+ expect(result.body.logs).toHaveLength(0);
+ });
+
+ test('regex: service:/^api-(gateway|edge)$/ matches only api services', async ({ page }) => {
+ await page.goto(baseURL);
+ const result = await postJSON(page, '/query', {
+ query: 'service:/^api-(gateway|edge)$/',
+ limit: 100,
+ });
+ expect(result.status).toBe(200);
+ const logs = result.body.logs;
+ expect(logs.length).toBeGreaterThan(0);
+ for (const log of logs) {
+ expect(['api-gateway', 'api-edge']).toContain(log.fields?.service);
+ }
+ // auth-service must not appear
+ const authLogs = logs.filter((l) => l.fields?.service === 'auth-service');
+ expect(authLogs).toHaveLength(0);
+ });
+
+ test('FTS: bare keyword "timeout" matches message containing word', async ({ page }) => {
+ await page.goto(baseURL);
+ const result = await postJSON(page, '/query', { query: 'timeout', limit: 100 });
+ expect(result.status).toBe(200);
+ const logs = result.body.logs;
+ expect(logs.length).toBeGreaterThan(0);
+ for (const log of logs) {
+ expect(log.message.toLowerCase()).toContain('timeout');
+ }
+ });
+
+ test('FTS: quoted phrase "connection refused" matches the exact phrase', async ({ page }) => {
+ await page.goto(baseURL);
+ const result = await postJSON(page, '/query', {
+ query: '"connection refused"',
+ limit: 100,
+ });
+ expect(result.status).toBe(200);
+ const logs = result.body.logs;
+ expect(logs.length).toBeGreaterThan(0);
+ for (const log of logs) {
+ expect(log.message.toLowerCase()).toContain('connection refused');
+ }
+ // "connection timeout" must NOT appear
+ const nonMatchingLogs = logs.filter((l) => l.message.toLowerCase().includes('timeout') && !l.message.toLowerCase().includes('refused'));
+ expect(nonMatchingLogs).toHaveLength(0);
+ });
+
+ test('required/prohibited: +level:ERROR -service:auth returns only non-auth ERRORs', async ({ page }) => {
+ await page.goto(baseURL);
+ const result = await postJSON(page, '/query', {
+ query: '+level:ERROR -service:auth',
+ limit: 100,
+ });
+ expect(result.status).toBe(200);
+ const logs = result.body.logs;
+ expect(logs.length).toBeGreaterThan(0);
+ for (const log of logs) {
+ expect(log.level).toBe('ERROR');
+ expect(log.fields?.service).not.toContain('auth');
+ }
+ });
+
+ test('wildcard: service:api* matches all api-prefixed services', async ({ page }) => {
+ await page.goto(baseURL);
+ const result = await postJSON(page, '/query', { query: 'service:api*', limit: 100 });
+ expect(result.status).toBe(200);
+ const logs = result.body.logs;
+ expect(logs.length).toBeGreaterThan(0);
+ for (const log of logs) {
+ expect(log.fields?.service).toMatch(/^api/);
+ }
+ // auth-service must not appear
+ const authLogs = logs.filter((l) => l.fields?.service === 'auth-service');
+ expect(authLogs).toHaveLength(0);
+ });
+
+ test('UI: regex and +/- queries highlighted correctly', async ({ page }) => {
+ await page.goto(baseURL);
+ const searchInput = page.locator('.search-editor-input');
+ await expect(searchInput).toBeVisible();
+
+ // Regex literal should get hl-regex class
+ await searchInput.fill('service:/^api-(gateway|edge)$/');
+ await page.waitForTimeout(100);
+ const hasRegexHighlight = await page.evaluate(
+ () => !!document.querySelector('.search-highlight .hl-regex'),
+ );
+ expect(hasRegexHighlight).toBeTruthy();
+
+ // +/- prefix operators should get hl-op class
+ await searchInput.fill('+level:ERROR -service:auth');
+ await page.waitForTimeout(100);
+ const opSpans = await page.evaluate(
+ () => Array.from(document.querySelectorAll('.search-highlight .hl-op')).map((e) => e.textContent),
+ );
+ expect(opSpans).toContain('+');
+ expect(opSpans).toContain('-');
+
+ // ? wildcard should get hl-wildcard class
+ await searchInput.fill('service:api-?');
+ await page.waitForTimeout(100);
+ const hasWildcardHighlight = await page.evaluate(
+ () => !!document.querySelector('.search-highlight .hl-wildcard'),
+ );
+ expect(hasWildcardHighlight).toBeTruthy();
+ });
+});
diff --git a/pkg/query/lucene.go b/pkg/query/lucene.go
index 6e61d3c..6a6c43a 100644
--- a/pkg/query/lucene.go
+++ b/pkg/query/lucene.go
@@ -146,6 +146,22 @@ func (p *parser) parsePrimary() (Filter, error) {
return filter, nil
}
+ // Handle required (+) prefix — Lucene semantics: clause is required (same as default AND).
+ if p.peekChar('+') {
+ p.consume(1)
+ return p.parsePrimary()
+ }
+
+ // Handle prohibited (-) prefix — Lucene semantics: clause must NOT match.
+ if p.peekChar('-') {
+ p.consume(1)
+ filter, err := p.parsePrimary()
+ if err != nil {
+ return nil, err
+ }
+ return &NotFilter{Filter: filter}, nil
+ }
+
// Parse field:value or keyword
token := p.readToken()
if token == "" {
@@ -163,24 +179,88 @@ func (p *parser) parsePrimary() (Filter, error) {
return p.parseRange(field, value)
}
- // Handle quoted strings
+ // Handle quoted strings (phrase match)
if strings.HasPrefix(value, "\"") {
value = strings.Trim(value, "\"")
return &FieldFilter{Field: field, Value: value, Exact: true}, nil
}
- // Handle wildcards
- if strings.Contains(value, "*") {
+ // Handle regex values: field:/regex/
+ if strings.HasPrefix(value, "/") {
+ regexStr := p.extractRegex(value)
+ re, err := regexp.Compile(regexStr)
+ if err != nil {
+ return nil, fmt.Errorf("invalid regex: %w", err)
+ }
+ return &RegexFilter{Field: field, Regex: re}, nil
+ }
+
+ // Strip boost suffix (^n) — accepted but ignored for filtering
+ value = stripBoost(value)
+
+ // Handle existence: field:*
+ if value == "*" {
+ return &ExistenceFilter{Field: field}, nil
+ }
+
+ // Handle wildcards (* and ?)
+ if strings.ContainsAny(value, "*?") {
return &WildcardFilter{Field: field, Pattern: value}, nil
}
return &FieldFilter{Field: field, Value: value, Exact: false}, nil
}
+ // Strip boost from bare keyword
+ token = stripBoost(token)
+ if token == "" {
+ return &AllFilter{}, nil
+ }
+
+ // Bare quoted phrase — search message and fields for the phrase
+ if len(token) >= 2 && token[0] == '"' && token[len(token)-1] == '"' {
+ return &KeywordFilter{Keyword: token[1 : len(token)-1]}, nil
+ }
+
// Keyword search (searches message and fields)
return &KeywordFilter{Keyword: token}, nil
}
+// extractRegex extracts the regex string from a value that starts with "/".
+// If the value already ends with "/" (complete token), the content between
+// the slashes is returned. Otherwise, additional characters are consumed
+// from the parser input until the closing "/" is found — this handles regex
+// patterns that were cut short by "(" or ")" in the token reader.
+func (p *parser) extractRegex(value string) string {
+ // Complete regex already captured (e.g. "/regex/")
+ if len(value) >= 2 && value[len(value)-1] == '/' {
+ return value[1 : len(value)-1]
+ }
+ // Partial — strip leading "/" and continue reading until closing "/"
+ regexStr := value[1:]
+ for p.pos < len(p.input) {
+ ch := p.input[p.pos]
+ if ch == '/' {
+ p.pos++ // consume closing /
+ break
+ }
+ regexStr += string(ch)
+ p.pos++
+ }
+ return regexStr
+}
+
+// stripBoost removes a trailing "^number" boosting suffix from a token.
+// Boosting is accepted for query-string compatibility but ignored for filtering.
+func stripBoost(s string) string {
+ if idx := strings.LastIndex(s, "^"); idx > 0 {
+ if _, err := strconv.ParseFloat(s[idx+1:], 64); err == nil {
+ return s[:idx]
+ }
+ }
+ return s
+}
+
func (p *parser) parseRange(field, rangeStr string) (Filter, error) {
// Range format: [start TO end]
rangeStr = strings.TrimPrefix(rangeStr, "[")
@@ -428,7 +508,7 @@ func (f *KeywordFilter) Match(entry *storage.LogEntry) bool {
return false
}
-// WildcardFilter matches field values with wildcards
+// WildcardFilter matches field values with wildcards (* and ?)
type WildcardFilter struct {
Field string
Pattern string
@@ -450,13 +530,58 @@ func (f *WildcardFilter) Match(entry *storage.LogEntry) bool {
}
}
- // Convert wildcard pattern to regex
+ // Convert wildcard pattern to regex (* → .*, ? → .)
pattern := strings.ReplaceAll(f.Pattern, "*", ".*")
+ pattern = strings.ReplaceAll(pattern, "?", ".")
pattern = "^" + pattern + "$"
matched, _ := regexp.MatchString("(?i)"+pattern, value)
return matched
}
+// ExistenceFilter matches entries where the specified field is present.
+// For built-in fields (level, message) it matches when the value is non-empty.
+// For custom fields it matches when the key exists in the entry's Fields map.
+type ExistenceFilter struct {
+ Field string
+}
+
+func (f *ExistenceFilter) Match(entry *storage.LogEntry) bool {
+ switch f.Field {
+ case "level":
+ return entry.Level != ""
+ case "message":
+ return entry.Message != ""
+ default:
+ _, ok := entry.Fields[f.Field]
+ return ok
+ }
+}
+
+// RegexFilter matches entries where the field value matches the given regular expression.
+type RegexFilter struct {
+ Field string
+ Regex *regexp.Regexp
+}
+
+func (f *RegexFilter) Match(entry *storage.LogEntry) bool {
+ var value string
+
+ switch f.Field {
+ case "level":
+ value = entry.Level
+ case "message":
+ value = entry.Message
+ default:
+ if v, ok := entry.Fields[f.Field]; ok {
+ value = fmt.Sprintf("%v", v)
+ } else {
+ return false
+ }
+ }
+
+ return f.Regex.MatchString(value)
+}
+
// TimestampRangeFilter filters by timestamp range
type TimestampRangeFilter struct {
Start time.Time
diff --git a/pkg/query/lucene_test.go b/pkg/query/lucene_test.go
index 5749f4f..efc1744 100644
--- a/pkg/query/lucene_test.go
+++ b/pkg/query/lucene_test.go
@@ -1,6 +1,7 @@
package query
import (
+ "regexp"
"testing"
"time"
@@ -23,6 +24,15 @@ func TestParse(t *testing.T) {
{"complex query", "(level:ERROR OR level:WARN) AND service:api", false},
{"wildcard field", "message:*timeout*", false},
{"quoted string", `message:"connection refused"`, false},
+ {"existence query", "request_id:*", false},
+ {"regex query", `service:/^api-gateway$/`, false},
+ {"regex with alternation", `service:/^api-(gateway|edge)$/`, false},
+ {"required prefix", "+level:ERROR", false},
+ {"prohibited prefix", "-level:DEBUG", false},
+ {"boost syntax", "error^2", false},
+ {"field boost syntax", "level:ERROR^3", false},
+ {"question mark wildcard", "service:api-?", false},
+ {"bare quoted phrase", `"connection refused"`, false},
}
for _, tt := range tests {
@@ -654,3 +664,372 @@ func TestAllFilter(t *testing.T) {
t.Error("AllFilter should match all entries")
}
}
+
+func TestExistenceFilter(t *testing.T) {
+ tests := []struct {
+ name string
+ filter *ExistenceFilter
+ entry *storage.LogEntry
+ want bool
+ }{
+ {
+ name: "field present in Fields map",
+ filter: &ExistenceFilter{Field: "request_id"},
+ entry: &storage.LogEntry{Level: "INFO", Message: "test", Fields: map[string]interface{}{"request_id": "req-001"}},
+ want: true,
+ },
+ {
+ name: "field absent from Fields map",
+ filter: &ExistenceFilter{Field: "request_id"},
+ entry: &storage.LogEntry{Level: "INFO", Message: "test", Fields: map[string]interface{}{}},
+ want: false,
+ },
+ {
+ name: "level field present (non-empty)",
+ filter: &ExistenceFilter{Field: "level"},
+ entry: &storage.LogEntry{Level: "ERROR", Message: "test", Fields: map[string]interface{}{}},
+ want: true,
+ },
+ {
+ name: "level field absent (empty)",
+ filter: &ExistenceFilter{Field: "level"},
+ entry: &storage.LogEntry{Level: "", Message: "test", Fields: map[string]interface{}{}},
+ want: false,
+ },
+ {
+ name: "message field present (non-empty)",
+ filter: &ExistenceFilter{Field: "message"},
+ entry: &storage.LogEntry{Level: "INFO", Message: "hello", Fields: map[string]interface{}{}},
+ want: true,
+ },
+ {
+ name: "message field absent (empty)",
+ filter: &ExistenceFilter{Field: "message"},
+ entry: &storage.LogEntry{Level: "INFO", Message: "", Fields: map[string]interface{}{}},
+ want: false,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ if got := tt.filter.Match(tt.entry); got != tt.want {
+ t.Errorf("ExistenceFilter.Match() = %v, want %v", got, tt.want)
+ }
+ })
+ }
+}
+
+func TestRegexFilter(t *testing.T) {
+ tests := []struct {
+ name string
+ filter *RegexFilter
+ entry *storage.LogEntry
+ want bool
+ wantErr bool
+ }{
+ {
+ name: "regex matches service field",
+ filter: &RegexFilter{Field: "service", Regex: regexp.MustCompile(`^api-(gateway|edge)$`)},
+ entry: &storage.LogEntry{Level: "INFO", Message: "test", Fields: map[string]interface{}{"service": "api-gateway"}},
+ want: true,
+ },
+ {
+ name: "regex no match on service",
+ filter: &RegexFilter{Field: "service", Regex: regexp.MustCompile(`^api-(gateway|edge)$`)},
+ entry: &storage.LogEntry{Level: "INFO", Message: "test", Fields: map[string]interface{}{"service": "auth-service"}},
+ want: false,
+ },
+ {
+ name: "regex matches level field",
+ filter: &RegexFilter{Field: "level", Regex: regexp.MustCompile(`^(ERROR|WARN)$`)},
+ entry: &storage.LogEntry{Level: "ERROR", Message: "test", Fields: map[string]interface{}{}},
+ want: true,
+ },
+ {
+ name: "regex missing field returns false",
+ filter: &RegexFilter{Field: "missing", Regex: regexp.MustCompile(`.*`)},
+ entry: &storage.LogEntry{Level: "INFO", Message: "test", Fields: map[string]interface{}{}},
+ want: false,
+ },
+ {
+ name: "regex matches message field",
+ filter: &RegexFilter{Field: "message", Regex: regexp.MustCompile(`timeout`)},
+ entry: &storage.LogEntry{Level: "ERROR", Message: "connection timeout", Fields: map[string]interface{}{}},
+ want: true,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ if got := tt.filter.Match(tt.entry); got != tt.want {
+ t.Errorf("RegexFilter.Match() = %v, want %v", got, tt.want)
+ }
+ })
+ }
+}
+
+func TestParsePrefixOperators(t *testing.T) {
+ tests := []struct {
+ name string
+ query string
+ entry *storage.LogEntry
+ want bool
+ }{
+ {
+ name: "+ required prefix matches",
+ query: "+level:ERROR",
+ entry: &storage.LogEntry{Level: "ERROR", Message: "test", Fields: map[string]interface{}{}},
+ want: true,
+ },
+ {
+ name: "+ required prefix no match",
+ query: "+level:ERROR",
+ entry: &storage.LogEntry{Level: "INFO", Message: "test", Fields: map[string]interface{}{}},
+ want: false,
+ },
+ {
+ name: "- prohibited prefix excludes matching entry",
+ query: "-level:DEBUG",
+ entry: &storage.LogEntry{Level: "DEBUG", Message: "test", Fields: map[string]interface{}{}},
+ want: false,
+ },
+ {
+ name: "- prohibited prefix passes non-matching entry",
+ query: "-level:DEBUG",
+ entry: &storage.LogEntry{Level: "ERROR", Message: "test", Fields: map[string]interface{}{}},
+ want: true,
+ },
+ {
+ name: "+required -prohibited combined",
+ query: "+level:ERROR -service:auth",
+ entry: &storage.LogEntry{Level: "ERROR", Message: "test", Fields: map[string]interface{}{"service": "api"}},
+ want: true,
+ },
+ {
+ name: "+required -prohibited excludes prohibited service",
+ query: "+level:ERROR -service:auth",
+ entry: &storage.LogEntry{Level: "ERROR", Message: "test", Fields: map[string]interface{}{"service": "auth"}},
+ want: false,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ q, err := Parse(tt.query)
+ if err != nil {
+ t.Fatalf("Parse() error = %v", err)
+ }
+ if got := q.Match(tt.entry); got != tt.want {
+ t.Errorf("Query.Match() = %v, want %v", got, tt.want)
+ }
+ })
+ }
+}
+
+func TestParseRegexQuery(t *testing.T) {
+ tests := []struct {
+ name string
+ query string
+ entry *storage.LogEntry
+ want bool
+ }{
+ {
+ name: "simple regex matches",
+ query: `service:/^api-gateway$/`,
+ entry: &storage.LogEntry{Level: "INFO", Message: "test", Fields: map[string]interface{}{"service": "api-gateway"}},
+ want: true,
+ },
+ {
+ name: "simple regex no match",
+ query: `service:/^api-gateway$/`,
+ entry: &storage.LogEntry{Level: "INFO", Message: "test", Fields: map[string]interface{}{"service": "auth-service"}},
+ want: false,
+ },
+ {
+ name: "regex with alternation",
+ query: `service:/^api-(gateway|edge)$/`,
+ entry: &storage.LogEntry{Level: "INFO", Message: "test", Fields: map[string]interface{}{"service": "api-edge"}},
+ want: true,
+ },
+ {
+ name: "regex alternation no match",
+ query: `service:/^api-(gateway|edge)$/`,
+ entry: &storage.LogEntry{Level: "INFO", Message: "test", Fields: map[string]interface{}{"service": "api-other"}},
+ want: false,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ q, err := Parse(tt.query)
+ if err != nil {
+ t.Fatalf("Parse(%q) error = %v", tt.query, err)
+ }
+ if got := q.Match(tt.entry); got != tt.want {
+ t.Errorf("Query.Match() = %v, want %v", got, tt.want)
+ }
+ })
+ }
+}
+
+func TestParseExistenceQuery(t *testing.T) {
+ tests := []struct {
+ name string
+ query string
+ entry *storage.LogEntry
+ want bool
+ }{
+ {
+ name: "field:* matches when field present",
+ query: "request_id:*",
+ entry: &storage.LogEntry{Level: "INFO", Message: "test", Fields: map[string]interface{}{"request_id": "req-001"}},
+ want: true,
+ },
+ {
+ name: "field:* no match when field absent",
+ query: "request_id:*",
+ entry: &storage.LogEntry{Level: "INFO", Message: "test", Fields: map[string]interface{}{}},
+ want: false,
+ },
+ {
+ name: "level:* matches when level set",
+ query: "level:*",
+ entry: &storage.LogEntry{Level: "ERROR", Message: "test", Fields: map[string]interface{}{}},
+ want: true,
+ },
+ {
+ name: "level:* no match when level empty",
+ query: "level:*",
+ entry: &storage.LogEntry{Level: "", Message: "test", Fields: map[string]interface{}{}},
+ want: false,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ q, err := Parse(tt.query)
+ if err != nil {
+ t.Fatalf("Parse(%q) error = %v", tt.query, err)
+ }
+ if got := q.Match(tt.entry); got != tt.want {
+ t.Errorf("Query.Match() = %v, want %v", got, tt.want)
+ }
+ })
+ }
+}
+
+func TestParseBoostSyntax(t *testing.T) {
+ tests := []struct {
+ name string
+ query string
+ entry *storage.LogEntry
+ want bool
+ }{
+ {
+ name: "keyword with boost still matches",
+ query: "timeout^2",
+ entry: &storage.LogEntry{Level: "ERROR", Message: "connection timeout", Fields: map[string]interface{}{}},
+ want: true,
+ },
+ {
+ name: "field value with boost still matches",
+ query: "level:ERROR^3",
+ entry: &storage.LogEntry{Level: "ERROR", Message: "test", Fields: map[string]interface{}{}},
+ want: true,
+ },
+ {
+ name: "boost accepted - non matching entry still excluded",
+ query: "level:ERROR^3",
+ entry: &storage.LogEntry{Level: "INFO", Message: "test", Fields: map[string]interface{}{}},
+ want: false,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ q, err := Parse(tt.query)
+ if err != nil {
+ t.Fatalf("Parse(%q) error = %v", tt.query, err)
+ }
+ if got := q.Match(tt.entry); got != tt.want {
+ t.Errorf("Query.Match() = %v, want %v", got, tt.want)
+ }
+ })
+ }
+}
+
+func TestParseQuotedPhraseKeyword(t *testing.T) {
+ tests := []struct {
+ name string
+ query string
+ entry *storage.LogEntry
+ want bool
+ }{
+ {
+ name: "bare quoted phrase matches in message",
+ query: `"connection refused"`,
+ entry: &storage.LogEntry{Level: "ERROR", Message: "connection refused", Fields: map[string]interface{}{}},
+ want: true,
+ },
+ {
+ name: "bare quoted phrase substring match",
+ query: `"connection"`,
+ entry: &storage.LogEntry{Level: "ERROR", Message: "connection timeout", Fields: map[string]interface{}{}},
+ want: true,
+ },
+ {
+ name: "bare quoted phrase no match",
+ query: `"connection refused"`,
+ entry: &storage.LogEntry{Level: "ERROR", Message: "all good", Fields: map[string]interface{}{}},
+ want: false,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ q, err := Parse(tt.query)
+ if err != nil {
+ t.Fatalf("Parse(%q) error = %v", tt.query, err)
+ }
+ if got := q.Match(tt.entry); got != tt.want {
+ t.Errorf("Query.Match() = %v, want %v", got, tt.want)
+ }
+ })
+ }
+}
+
+func TestWildcardFilterQuestionMark(t *testing.T) {
+ tests := []struct {
+ name string
+ filter *WildcardFilter
+ entry *storage.LogEntry
+ want bool
+ }{
+ {
+ name: "? matches single char",
+ filter: &WildcardFilter{Field: "service", Pattern: "api-????way"},
+ entry: &storage.LogEntry{Level: "INFO", Message: "test", Fields: map[string]interface{}{"service": "api-gateway"}},
+ want: true,
+ },
+ {
+ name: "? no match when extra chars",
+ filter: &WildcardFilter{Field: "service", Pattern: "api-?"},
+ entry: &storage.LogEntry{Level: "INFO", Message: "test", Fields: map[string]interface{}{"service": "api-gateway"}},
+ want: false,
+ },
+ {
+ name: "? combined with * wildcard",
+ filter: &WildcardFilter{Field: "service", Pattern: "api-?*"},
+ entry: &storage.LogEntry{Level: "INFO", Message: "test", Fields: map[string]interface{}{"service": "api-gateway"}},
+ want: true,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ if got := tt.filter.Match(tt.entry); got != tt.want {
+ t.Errorf("WildcardFilter.Match() = %v, want %v", got, tt.want)
+ }
+ })
+ }
+}
diff --git a/pkg/server/index.html b/pkg/server/index.html
index b5b4fe7..e701c55 100644
--- a/pkg/server/index.html
+++ b/pkg/server/index.html
@@ -32,6 +32,7 @@
--peek-red: #ef4444;
--peek-amber: #f59e0b;
--peek-blue: #38bdf8;
+ --peek-purple: #a78bfa;
--peek-gray: #6b7280;
--peek-dim: #4b5060;
@@ -150,6 +151,7 @@
--peek-red: #ef4444;
--peek-amber: #d97706;
--peek-blue: #2563eb;
+ --peek-purple: #7c3aed;
--peek-gray: #9ca3af;
--peek-dim: #d1d5db;
@@ -420,6 +422,7 @@
.hl-quote { color: var(--peek-green); }
.hl-wildcard { color: var(--peek-amber); font-style: italic; }
.hl-range { color: var(--peek-green); }
+ .hl-regex { color: var(--peek-purple); }
.hl-paren { color: var(--muted-foreground); }
.hl-error { color: var(--peek-red); text-decoration: underline wavy var(--peek-red); }
@@ -1768,17 +1771,17 @@
// Lucene syntax tokenizer & highlighter
// ──────────────────────────────────────────
- // Sub-tokenize a value string for wildcard (*) spans
+ // Sub-tokenize a value string for wildcard (* and ?) spans
function tokenizeValue(val) {
const parts = []
let i = 0
while (i < val.length) {
let j = i
- while (j < val.length && val[j] !== '*') j++
+ while (j < val.length && val[j] !== '*' && val[j] !== '?') j++
if (j > i) parts.push({type: 'value', text: val.slice(i, j)})
if (j < val.length) {
let k = j
- while (k < val.length && val[k] === '*') k++
+ while (k < val.length && (val[k] === '*' || val[k] === '?')) k++
parts.push({type: 'wildcard', text: val.slice(j, k)})
j = k
}
@@ -1857,8 +1860,28 @@
}
// Word token (field:value, operator, or bare value)
+ // Special handling: when a regex literal is present (field:/regex/),
+ // the word reader must not stop at ( or ) inside the regex.
let j = i
- while (j < text.length && !/[\s"()\[\]{}]/.test(text[j])) j++
+ let inRegex = false // true once we have consumed the opening "/"
+ while (j < text.length) {
+ const c = text[j]
+ if (inRegex) {
+ // Inside regex: consume everything until the closing "/"
+ if (c === '/') { j++; inRegex = false; break }
+ j++
+ } else {
+ // Normal word: stop at whitespace or grouping chars
+ if (/[\s"()\[\]{}]/.test(c)) break
+ // Detect field:/ transition; consume both chars and enter regex mode
+ if (c === ':' && j + 1 < text.length && text[j + 1] === '/') {
+ inRegex = true
+ j += 2 // consume :/
+ } else {
+ j++
+ }
+ }
+ }
const word = text.slice(i, j)
i = j
if (!word) { i++; continue }
@@ -1868,14 +1891,30 @@
} else {
const colonIdx = word.indexOf(':')
if (colonIdx > 0) {
- tokens.push({type: 'field', text: word.slice(0, colonIdx)})
+ // Handle +/- required/prohibited prefix operators
+ let fieldPart = word.slice(0, colonIdx)
+ if (fieldPart[0] === '+' || fieldPart[0] === '-') {
+ tokens.push({type: 'op', text: fieldPart[0]})
+ fieldPart = fieldPart.slice(1)
+ }
+ if (fieldPart) tokens.push({type: 'field', text: fieldPart})
tokens.push({type: 'colon', text: ':'})
const val = word.slice(colonIdx + 1)
- if (val) {
+ if (!val) {
+ // nothing after colon
+ } else if (val.startsWith('/')) {
+ tokens.push({type: 'regex', text: val})
+ } else {
for (const t of tokenizeValue(val)) tokens.push(t)
}
} else {
- for (const t of tokenizeValue(word)) tokens.push(t)
+ // Handle +/- prefix for bare keywords
+ let bareWord = word
+ if (bareWord[0] === '+' || bareWord[0] === '-') {
+ tokens.push({type: 'op', text: bareWord[0]})
+ bareWord = bareWord.slice(1)
+ }
+ for (const t of tokenizeValue(bareWord)) tokens.push(t)
}
}
}
@@ -1906,6 +1945,7 @@
case 'quote': return `${e}`
case 'wildcard': return `${e}`
case 'range': return `${e}`
+ case 'regex': return `${e}`
case 'paren': return `${e}`
case 'error': return `${e}`
default: return e