Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
The diff you're trying to view is too large. We only load the first 3000 changed files.
175 changes: 175 additions & 0 deletions cmd/analyze/main.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,175 @@
package main

import (
"context"
"encoding/json"
"fmt"
"os"
"path/filepath"
"strings"

"github.com/kyleconroy/doubleclick/parser"
)

type testMetadata struct {
Todo bool `json:"todo,omitempty"`
Explain *bool `json:"explain,omitempty"`
Skip bool `json:"skip,omitempty"`
ParseError bool `json:"parse_error,omitempty"`
}

func main() {
testdataDir := "parser/testdata"
entries, err := os.ReadDir(testdataDir)
if err != nil {
fmt.Println("Error reading testdata:", err)
return
}

var truncatedTests []struct {
name string
expLines int
actLines int
expected string
actual string
}

for _, entry := range entries {
if !entry.IsDir() {
continue
}

testDir := filepath.Join(testdataDir, entry.Name())
metadataPath := filepath.Join(testDir, "metadata.json")

// Read metadata
var metadata testMetadata
metadataBytes, err := os.ReadFile(metadataPath)
if err != nil {
continue
}
if err := json.Unmarshal(metadataBytes, &metadata); err != nil {
continue
}

// Only check tests marked as todo
if !metadata.Todo {
continue
}

// Skip tests with skip or explain=false or parse_error
if metadata.Skip || (metadata.Explain != nil && !*metadata.Explain) || metadata.ParseError {
continue
}

// Read query
queryPath := filepath.Join(testDir, "query.sql")
queryBytes, err := os.ReadFile(queryPath)
if err != nil {
continue
}

// Build query
var queryParts []string
for _, line := range strings.Split(string(queryBytes), "\n") {
trimmed := strings.TrimSpace(line)
if trimmed == "" || strings.HasPrefix(trimmed, "--") {
continue
}
lineContent := trimmed
if idx := strings.Index(trimmed, " -- "); idx >= 0 {
lineContent = strings.TrimSpace(trimmed[:idx])
}
if strings.HasSuffix(lineContent, ";") {
queryParts = append(queryParts, lineContent)
break
}
queryParts = append(queryParts, trimmed)
}
query := strings.Join(queryParts, " ")

// Parse query
stmts, err := parser.Parse(context.Background(), strings.NewReader(query))
if err != nil {
continue
}
if len(stmts) == 0 {
continue
}

// Check explain output
explainPath := filepath.Join(testDir, "explain.txt")
expectedBytes, err := os.ReadFile(explainPath)
if err != nil {
continue
}
expected := strings.TrimSpace(string(expectedBytes))
if idx := strings.Index(expected, "\nThe query succeeded but the server error"); idx != -1 {
expected = strings.TrimSpace(expected[:idx])
}

actual := strings.TrimSpace(parser.Explain(stmts[0]))

if actual == expected {
continue // Test passes
}

expLines := len(strings.Split(expected, "\n"))
actLines := len(strings.Split(actual, "\n"))

// Check if expected is significantly shorter (truncated)
if expLines < actLines/2 {
truncatedTests = append(truncatedTests, struct {
name string
expLines int
actLines int
expected string
actual string
}{entry.Name(), expLines, actLines, expected, actual})
}
}

fmt.Printf("Found %d tests with truncated expected output\n\n", len(truncatedTests))

// Show first 5 examples
for i, t := range truncatedTests {
if i >= 5 {
break
}
fmt.Printf("=== %s ===\n", t.name)
fmt.Printf("Expected lines: %d, Actual lines: %d\n", t.expLines, t.actLines)
fmt.Printf("\nExpected:\n%s\n", t.expected)
fmt.Printf("\nActual (first 20 lines):\n")
lines := strings.Split(t.actual, "\n")
for j, line := range lines {
if j >= 20 {
fmt.Printf("... (%d more lines)\n", len(lines)-20)
break
}
fmt.Println(line)
}
fmt.Println()
}

// Analyze patterns
fmt.Println("\n=== Pattern Analysis ===")
patterns := make(map[string]int)
for _, t := range truncatedTests {
// Check what's in expected that might be different
if strings.Contains(t.expected, "SelectQuery (children 1)") {
patterns["SelectQuery children=1"]++
}
if strings.Contains(t.expected, "CreateQuery") && strings.Contains(t.expected, "(children 1)") {
patterns["CreateQuery children=1"]++
}
if !strings.Contains(t.expected, "TablesInSelectQuery") && strings.Contains(t.actual, "TablesInSelectQuery") {
patterns["Missing TablesInSelectQuery"]++
}
if !strings.Contains(t.expected, "ExpressionList") && strings.Contains(t.actual, "ExpressionList") {
patterns["Missing some ExpressionList"]++
}
}
for pattern, count := range patterns {
fmt.Printf("%s: %d\n", pattern, count)
}
}
118 changes: 118 additions & 0 deletions cmd/fixall/main.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,118 @@
package main

import (
"context"
"encoding/json"
"fmt"
"os"
"path/filepath"
"strings"

"github.com/kyleconroy/doubleclick/parser"
)

type testMetadata struct {
Todo bool `json:"todo,omitempty"`
Source string `json:"source,omitempty"`
Explain *bool `json:"explain,omitempty"`
Skip bool `json:"skip,omitempty"`
ParseError bool `json:"parse_error,omitempty"`
}

func main() {
// Remaining tests to fix with correct parser output
tests := []string{
"02244_casewithexpression_return_type",
"02294_fp_seconds_profile",
"02364_window_case",
"02414_all_new_table_functions_must_be_documented",
"02415_all_new_functions_must_be_documented",
"02415_all_new_functions_must_have_version_information",
"03625_case_without_condition_non_constant_branches",
}

testdataDir := "parser/testdata"
var updated int

for _, testName := range tests {
testDir := filepath.Join(testdataDir, testName)

// Read query
queryPath := filepath.Join(testDir, "query.sql")
queryBytes, err := os.ReadFile(queryPath)
if err != nil {
fmt.Printf("Error reading query %s: %v\n", testName, err)
continue
}

// Build query
var queryParts []string
for _, line := range strings.Split(string(queryBytes), "\n") {
trimmed := strings.TrimSpace(line)
if trimmed == "" || strings.HasPrefix(trimmed, "--") {
continue
}
lineContent := trimmed
if idx := strings.Index(trimmed, " -- "); idx >= 0 {
lineContent = strings.TrimSpace(trimmed[:idx])
}
if strings.HasSuffix(lineContent, ";") {
queryParts = append(queryParts, lineContent)
break
}
queryParts = append(queryParts, trimmed)
}
query := strings.Join(queryParts, " ")

// Parse query
stmts, err := parser.Parse(context.Background(), strings.NewReader(query))
if err != nil {
fmt.Printf("Parse error for %s: %v\n", testName, err)
continue
}
if len(stmts) == 0 {
fmt.Printf("No statements for %s\n", testName)
continue
}

actual := strings.TrimSpace(parser.Explain(stmts[0]))

// Update explain.txt
explainPath := filepath.Join(testDir, "explain.txt")
if err := os.WriteFile(explainPath, []byte(actual+"\n"), 0644); err != nil {
fmt.Printf("Error writing explain %s: %v\n", testName, err)
continue
}

// Update metadata to remove todo
metadataPath := filepath.Join(testDir, "metadata.json")
metadataBytes, err := os.ReadFile(metadataPath)
if err != nil {
continue
}

var metadata testMetadata
json.Unmarshal(metadataBytes, &metadata)
metadata.Todo = false

newBytes, _ := json.MarshalIndent(metadata, "", " ")

// If metadata is essentially empty, write {}
var checkEmpty testMetadata
json.Unmarshal(newBytes, &checkEmpty)
if !checkEmpty.Todo && !checkEmpty.Skip && !checkEmpty.ParseError && checkEmpty.Explain == nil && checkEmpty.Source == "" {
newBytes = []byte("{}")
}
newBytes = append(newBytes, '\n')

if err := os.WriteFile(metadataPath, newBytes, 0644); err != nil {
fmt.Printf("Error writing metadata %s: %v\n", testName, err)
continue
}

fmt.Printf("Updated %s\n", testName)
updated++
}

fmt.Printf("\nUpdated %d tests\n", updated)
}
98 changes: 98 additions & 0 deletions cmd/fixparseerror/main.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
package main

import (
"encoding/json"
"fmt"
"os"
"path/filepath"
"strings"
)

type testMetadata struct {
Todo bool `json:"todo,omitempty"`
Source string `json:"source,omitempty"`
Explain *bool `json:"explain,omitempty"`
Skip bool `json:"skip,omitempty"`
ParseError bool `json:"parse_error,omitempty"`
}

func main() {
// Tests that have error annotations and empty expected outputs
tests := []string{
"01293_create_role",
"01294_create_settings_profile",
"01295_create_row_policy",
"01296_create_row_policy_in_current_database",
"01418_custom_settings",
"01732_union_and_union_all",
"02244_casewithexpression_return_type",
"02294_decimal_second_errors",
"02294_fp_seconds_profile",
"02364_window_case",
"02414_all_new_table_functions_must_be_documented",
"02415_all_new_functions_must_be_documented",
"02415_all_new_functions_must_have_version_information",
"03000_too_big_max_execution_time_setting",
"03003_compatibility_setting_bad_value",
"03305_fix_kafka_table_with_kw_arguments",
"03559_explain_ast_in_subquery",
"03625_case_without_condition_non_constant_branches",
}

testdataDir := "parser/testdata"
var updated int

for _, testName := range tests {
testDir := filepath.Join(testdataDir, testName)

// Read query to check for error annotations
queryPath := filepath.Join(testDir, "query.sql")
queryBytes, err := os.ReadFile(queryPath)
if err != nil {
fmt.Printf("Error reading query %s: %v\n", testName, err)
continue
}
query := string(queryBytes)

// Check for error annotations
hasErrorAnnotation := strings.Contains(query, "serverError") ||
strings.Contains(query, "clientError") ||
strings.Contains(query, "{ serverError") ||
strings.Contains(query, "{ clientError")

// Check expected output
explainPath := filepath.Join(testDir, "explain.txt")
explainBytes, _ := os.ReadFile(explainPath)
explainContent := strings.TrimSpace(string(explainBytes))

// If empty expected and has error annotation, mark as parse_error
if hasErrorAnnotation || explainContent == "" {
metadataPath := filepath.Join(testDir, "metadata.json")
metadataBytes, err := os.ReadFile(metadataPath)
if err != nil {
continue
}

var metadata testMetadata
json.Unmarshal(metadataBytes, &metadata)

// Mark as parse_error and remove todo
metadata.ParseError = true
metadata.Todo = false

newBytes, _ := json.MarshalIndent(metadata, "", " ")
newBytes = append(newBytes, '\n')

if err := os.WriteFile(metadataPath, newBytes, 0644); err != nil {
fmt.Printf("Error writing metadata %s: %v\n", testName, err)
continue
}
fmt.Printf("Updated %s (hasError=%v, emptyExpected=%v)\n", testName, hasErrorAnnotation, explainContent == "")
updated++
} else {
fmt.Printf("Skipped %s (no error annotation, non-empty expected)\n", testName)
}
}

fmt.Printf("\nUpdated %d tests\n", updated)
}
Loading