Skip to content

Commit

Permalink
feat: add query parser library (#73)
Browse files Browse the repository at this point in the history
* feat: add query parser library

* feat: return error if invalid type

* refactor: remove main.go

* refactor: lint fix

* refactor: rename to rql

* refactor: make data type key private

* refactor: remove custom util function

* refactor: fix tests

* fix golangci.yml

* refactor: create constants

* refactor: create constants

---------

Co-authored-by: Punit Kulal <[email protected]>
  • Loading branch information
whoAbhishekSah and punit-kulal authored Feb 17, 2025
1 parent c27d064 commit 78904a0
Show file tree
Hide file tree
Showing 4 changed files with 425 additions and 1 deletion.
3 changes: 2 additions & 1 deletion .golangci.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
output:
formats: line-number
formats:
- format: line-number
linters:
enable-all: false
disable-all: true
Expand Down
128 changes: 128 additions & 0 deletions rql/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,128 @@
# RQL (Rest Query Language)

A library to parse support advanced REST API query parameters like (filter, pagination, sort, group, search) and logical operators on the keys (like eq, neq, like, gt, lt etc)

It takes a Golang struct and a json string as input and returns a Golang object that can be used to prepare SQL Statements (using raw sql or ORM Query builders).

### Usage

Frontend should send the parameters and operator like this schema to the backend service on some route with `POST` HTTP Method

```json
{
"filters": [
{ "name": "id", "operator": "neq", "value": 20 },
{ "name": "title", "operator": "neq", "value": "nasa" },
{ "name": "enabled", "operator": "eq", "value": false },
{
"name": "created_at",
"operator": "gte",
"value": "2025-02-05T11:25:37.957Z"
},
{ "name": "title", "operator": "like", "value": "xyz" }
],
"group_by": ["billing_plan_name"],
"offset": 20,
"limit": 50,
"search": "abcd",
"sort": [
{ "key": "title", "order": "desc" },
{ "key": "created_at", "order": "asc" }
]
}
```

The `rql` library can be used to parse this json, validate it and returns a Struct containing all the info to generate the operations and values for SQL.

The validation happens via stuct tags defined on your model. Example:

```golang
type Organization struct {
Id int `rql:"type=number,min=10,max=200"`
BillingPlanName string `rql:"type=string"`
CreatedAt time.Time `rql:"type=datetime"`
MemberCount int `rql:"type=number"`
Title string `rql:"type=string"`
Enabled bool `rql:"type=bool"`
}

```

**Supported data types:**

1. number
2. string
3. datetime
4. bool

Check `main.go` for more info on usage.

Using this struct, a SQL query can be generated. Here is an example using `goqu` SQL Builder

```go
//init the library's "Query" object with input json bytes
userInput := &parser.Query{}

//assuming jsonBytes is defined earlier
err = json.Unmarshal(jsonBytes, userInput)
if err != nil {
panic(fmt.Sprintf("failed to unmarshal query string to parser query struct, err:%s", err.Error()))
}

//validate the json input
err = parser.ValidateQuery(userInput, Organization{})
if err != nil {
panic(err)
}

//userInput object can be utilized to prepare SQL statement
query := goqu.From("organizations")

fuzzySearchColumns := []string{"id", "billing_plan_name", "title"}

for _, filter_item := range userInput.Filters {
query = query.Where(goqu.Ex{
filter_item.Name: goqu.Op{filter_item.Operator: filter_item.Value},
})
}

listOfExpressions := make([]goqu.Expression, 0)

if userInput.Search != "" {
for _, col := range fuzzySearchColumns {
listOfExpressions = append(listOfExpressions, goqu.Ex{
col: goqu.Op{"LIKE": userInput.Search},
})
}
}

query = query.Where(goqu.Or(listOfExpressions...))

query = query.Offset(uint(userInput.Offset))
for _, sort_item := range userInput.Sort {
switch sort_item.Order {
case "asc":
query = query.OrderAppend(goqu.C(sort_item.Key).Asc())
case "desc":
query = query.OrderAppend(goqu.C(sort_item.Key).Desc())
default:
}
}
query = query.Limit(uint(userInput.Limit))
sql, _, _ := query.ToSQL()
fmt.Println(sql)


```

giving output as

```sql
SELECT * FROM "organizations" WHERE (("id" != 20) AND ("title" != 'nasa') AND ("enabled" IS FALSE) AND ("createdAt" >= '2025-02-05T11:25:37.957Z') AND ("title" LIKE 'xyz') AND (("id" LIKE 'abcd') OR ("billing_plan_name" LIKE 'abcd') OR ("title" LIKE 'abcd'))) ORDER BY "title" DESC, "createdAt" ASC LIMIT 50 OFFSET 20
```

### Improvements

1. The operators need to mapped with SQL operators like (`eq` should be converted to `=` etc). Right now we are relying on GoQU to do that, but we can make it SQL ORL lib agnostic.

2. Support validation on the range or values of the data. Like `min`, `max` on number etc.
207 changes: 207 additions & 0 deletions rql/parser.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,207 @@
package rql

import (
"fmt"
"reflect"
"slices"
"strings"
"time"
)

var validNumberOperations = []string{"eq", "neq", "gt", "gte", "lte"}
var validStringOperations = []string{"eq", "neq", "like"}
var validBoolOperations = []string{"eq", "neq"}
var validDatetimeOperations = []string{"eq", "neq", "gt", "gte", "lte"}

const TAG = "rql"
const DATATYPE_NUMBER = "number"
const DATATYPE_DATETIME = "datetime"
const DATATYPE_STRING = "string"
const DATATYPE_BOOL = "bool"
const SORT_ORDER_ASC = "asc"
const SORT_ORDER_DESC = "desc"

var validSortOrder = []string{SORT_ORDER_ASC, SORT_ORDER_DESC}

type Query struct {
Filters []Filter `json:"filters"`
GroupBy []string `json:"group_by"`
Offset int `json:"offset"`
Limit int `json:"limit"`
Search string `json:"search"`
Sort []Sort `json:"sort"`
}

type Filter struct {
Name string `json:"name"`
Operator string `json:"operator"`
dataType string
Value any `json:"value"`
}

type Sort struct {
Key string `json:"key"`
Order string `json:"order"`
}

func ValidateQuery(q *Query, checkStruct interface{}) error {
val := reflect.ValueOf(checkStruct)

// validate filters
for _, filterItem := range q.Filters {
//validate filter key name
filterIdx := searchKeyInsideStruct(filterItem.Name, val)
if filterIdx < 0 {
return fmt.Errorf("'%s' is not a valid filter key", filterItem.Name)
}
structKeyTag := val.Type().Field(filterIdx).Tag.Get(TAG)

// validate filter key data type
allowedDataType := getDataTypeOfField(structKeyTag)
filterItem.dataType = allowedDataType
switch allowedDataType {
case DATATYPE_NUMBER:
err := validateNumberType(filterItem)
if err != nil {
return err
}
case DATATYPE_BOOL:
err := validateBoolType(filterItem)
if err != nil {
return err
}
case DATATYPE_DATETIME:
err := validateDatetimeType(filterItem)
if err != nil {
return err
}
case DATATYPE_STRING:
err := validateStringType(filterItem)
if err != nil {
return err
}
default:
return fmt.Errorf("type '%s' is not recognized", allowedDataType)
}

if !isValidOperator(filterItem) {
return fmt.Errorf("value '%s' for key '%s' is valid string", filterItem.Operator, filterItem.Name)
}
}

err := validateGroupByKeys(q, val)
if err != nil {
return err
}
return validateSortKey(q, val)
}

func validateNumberType(filterItem Filter) error {
// check if the type is any of Golang numeric types
// if not, return error
switch filterItem.Value.(type) {
case uint8, uint16, uint32, uint64, int8, int16, int32, int64, float32, float64, int, uint:
return nil
default:
return fmt.Errorf("value %v for key '%s' is not int type", filterItem.Value, filterItem.Name)
}
}

func validateDatetimeType(filterItem Filter) error {
// cast the value to datetime
// if failed, return error
castedVal, ok := filterItem.Value.(string)
if !ok {
return fmt.Errorf("value %s for key '%s' is not a valid ISO datetime string", filterItem.Value, filterItem.Name)
}
_, err := time.Parse(time.RFC3339, castedVal)
if err != nil {
return fmt.Errorf("value %s for key '%s' is not a valid ISO datetime string", filterItem.Value, filterItem.Name)
}
return nil
}

func validateBoolType(filterItem Filter) error {
// cast the value to bool
// if failed, return error
_, ok := filterItem.Value.(bool)
if !ok {
return fmt.Errorf("value %v for key '%s' is not bool type", filterItem.Value, filterItem.Name)
}
return nil
}

func validateStringType(filterItem Filter) error {
// cast the value to string
// if failed, return error
_, ok := filterItem.Value.(string)
if !ok {
return fmt.Errorf("value %s for key '%s' is valid string type", filterItem.Value, filterItem.Name)
}
return nil
}

func searchKeyInsideStruct(keyName string, val reflect.Value) int {
for i := 0; i < val.NumField(); i++ {
if strings.ToLower(val.Type().Field(i).Name) == strings.ToLower(keyName) {
return i
}
}
return -1
}

// parse the tag schema which is of the format
// type=int,min=10,max=200
// to extract type else fallback to string
func getDataTypeOfField(tagString string) string {
res := DATATYPE_STRING
splitted := strings.Split(tagString, ",")
for _, item := range splitted {
kvSplitted := strings.Split(item, "=")
if len(kvSplitted) == 2 {
if kvSplitted[0] == "type" {
return kvSplitted[1]
}
}
}
//fallback to string if type not found in tag value
return res
}

func isValidOperator(filterItem Filter) bool {
switch filterItem.dataType {
case DATATYPE_NUMBER:
return slices.Contains(validNumberOperations, filterItem.Operator)
case DATATYPE_DATETIME:
return slices.Contains(validDatetimeOperations, filterItem.Operator)
case DATATYPE_STRING:
return slices.Contains(validStringOperations, filterItem.Operator)
case DATATYPE_BOOL:
return slices.Contains(validBoolOperations, filterItem.Operator)
default:
return false
}
}

func validateSortKey(q *Query, val reflect.Value) error {
for _, item := range q.Sort {
filterIdx := searchKeyInsideStruct(item.Key, val)
if filterIdx < 0 {
return fmt.Errorf("'%s' is not a valid sort key", item.Key)
}
if !slices.Contains(validSortOrder, item.Order) {
return fmt.Errorf("'%s' is not a valid sort key", item.Key)
}
}
return nil
}

func validateGroupByKeys(q *Query, val reflect.Value) error {
for _, item := range q.GroupBy {
filterIdx := searchKeyInsideStruct(item, val)
if filterIdx < 0 {
return fmt.Errorf("'%s' is not a valid sort key", item)
}
}
return nil
}
Loading

0 comments on commit 78904a0

Please sign in to comment.