diff --git a/dev.yml b/dev.yml index e3a6b99..9075ee9 100644 --- a/dev.yml +++ b/dev.yml @@ -77,6 +77,8 @@ database: # Fields and table names that you wish to block blacklist: + - ar_internal_metadata + - schema_migrations - secret - password - encrypted diff --git a/prod.yml b/prod.yml index 7310637..a33d5e3 100644 --- a/prod.yml +++ b/prod.yml @@ -67,6 +67,8 @@ database: #posts: "{ account_id: { _eq: $account_id } }" blacklist: + - ar_internal_metadata + - schema_migrations - secret - password - encrypted diff --git a/psql/psql_test.go b/psql/psql_test.go index 693b1b9..4d8bc4b 100644 --- a/psql/psql_test.go +++ b/psql/psql_test.go @@ -1,9 +1,223 @@ package psql import ( + "os" + "strings" "testing" + + "github.com/dosco/super-graph/qcode" ) -func TestCompileToPSQL(t *testing.T) { +const ( + errNotExpected = "Generated SQL did not match what was expected" +) +var ( + qcompile *qcode.Compiler + pcompile *Compiler +) + +func TestMain(m *testing.M) { + fm := qcode.NewFilterMap(map[string]string{ + "users": "{ id: { _eq: $user_id } }", + "posts": "{ account_id: { _eq: $account_id } }", + }) + + bl := qcode.NewBlacklist([]string{ + "secret", + "password", + "token", + }) + + qcompile = qcode.NewCompiler(fm, bl) + + tables := []*DBTable{ + &DBTable{Name: "customers", Type: "table"}, + &DBTable{Name: "users", Type: "table"}, + &DBTable{Name: "products", Type: "table"}, + &DBTable{Name: "purchases", Type: "table"}, + } + + columns := [][]*DBColumn{ + []*DBColumn{ + &DBColumn{ID: 1, Name: "id", Type: "bigint", NotNull: true, PrimaryKey: true, Uniquekey: false, FKeyTable: "", FKeyColID: []int(nil)}, + &DBColumn{ID: 2, Name: "full_name", Type: "character varying", NotNull: true, PrimaryKey: false, Uniquekey: false, FKeyTable: "", FKeyColID: []int(nil)}, + &DBColumn{ID: 3, Name: "phone", Type: "character varying", NotNull: false, PrimaryKey: false, Uniquekey: false, FKeyTable: "", FKeyColID: []int(nil)}, + &DBColumn{ID: 4, Name: "email", Type: "character varying", NotNull: true, PrimaryKey: false, Uniquekey: false, FKeyTable: "", FKeyColID: []int(nil)}, + &DBColumn{ID: 5, Name: "encrypted_password", Type: "character varying", NotNull: true, PrimaryKey: false, Uniquekey: false, FKeyTable: "", FKeyColID: []int(nil)}, + &DBColumn{ID: 6, Name: "reset_password_token", Type: "character varying", NotNull: false, PrimaryKey: false, Uniquekey: false, FKeyTable: "", FKeyColID: []int(nil)}, + &DBColumn{ID: 7, Name: "reset_password_sent_at", Type: "timestamp without time zone", NotNull: false, PrimaryKey: false, Uniquekey: false, FKeyTable: "", FKeyColID: []int(nil)}, + &DBColumn{ID: 8, Name: "remember_created_at", Type: "timestamp without time zone", NotNull: false, PrimaryKey: false, Uniquekey: false, FKeyTable: "", FKeyColID: []int(nil)}, + &DBColumn{ID: 9, Name: "created_at", Type: "timestamp without time zone", NotNull: true, PrimaryKey: false, Uniquekey: false, FKeyTable: "", FKeyColID: []int(nil)}, + &DBColumn{ID: 10, Name: "updated_at", Type: "timestamp without time zone", NotNull: true, PrimaryKey: false, Uniquekey: false, FKeyTable: "", FKeyColID: []int(nil)}}, + []*DBColumn{ + &DBColumn{ID: 1, Name: "id", Type: "bigint", NotNull: true, PrimaryKey: true, Uniquekey: false, FKeyTable: "", FKeyColID: []int(nil)}, + &DBColumn{ID: 2, Name: "full_name", Type: "character varying", NotNull: true, PrimaryKey: false, Uniquekey: false, FKeyTable: "", FKeyColID: []int(nil)}, + &DBColumn{ID: 3, Name: "phone", Type: "character varying", NotNull: false, PrimaryKey: false, Uniquekey: false, FKeyTable: "", FKeyColID: []int(nil)}, + &DBColumn{ID: 4, Name: "avatar", Type: "character varying", NotNull: false, PrimaryKey: false, Uniquekey: false, FKeyTable: "", FKeyColID: []int(nil)}, + &DBColumn{ID: 5, Name: "email", Type: "character varying", NotNull: true, PrimaryKey: false, Uniquekey: false, FKeyTable: "", FKeyColID: []int(nil)}, + &DBColumn{ID: 6, Name: "encrypted_password", Type: "character varying", NotNull: true, PrimaryKey: false, Uniquekey: false, FKeyTable: "", FKeyColID: []int(nil)}, + &DBColumn{ID: 7, Name: "reset_password_token", Type: "character varying", NotNull: false, PrimaryKey: false, Uniquekey: false, FKeyTable: "", FKeyColID: []int(nil)}, + &DBColumn{ID: 8, Name: "reset_password_sent_at", Type: "timestamp without time zone", NotNull: false, PrimaryKey: false, Uniquekey: false, FKeyTable: "", FKeyColID: []int(nil)}, + &DBColumn{ID: 9, Name: "remember_created_at", Type: "timestamp without time zone", NotNull: false, PrimaryKey: false, Uniquekey: false, FKeyTable: "", FKeyColID: []int(nil)}, + &DBColumn{ID: 10, Name: "created_at", Type: "timestamp without time zone", NotNull: true, PrimaryKey: false, Uniquekey: false, FKeyTable: "", FKeyColID: []int(nil)}, + &DBColumn{ID: 11, Name: "updated_at", Type: "timestamp without time zone", NotNull: true, PrimaryKey: false, Uniquekey: false, FKeyTable: "", FKeyColID: []int(nil)}}, + []*DBColumn{ + &DBColumn{ID: 1, Name: "id", Type: "bigint", NotNull: true, PrimaryKey: true, Uniquekey: false, FKeyTable: "", FKeyColID: []int(nil)}, + &DBColumn{ID: 2, Name: "name", Type: "character varying", NotNull: false, PrimaryKey: false, Uniquekey: false, FKeyTable: "", FKeyColID: []int(nil)}, + &DBColumn{ID: 3, Name: "description", Type: "text", NotNull: false, PrimaryKey: false, Uniquekey: false, FKeyTable: "", FKeyColID: []int(nil)}, + &DBColumn{ID: 4, Name: "price", Type: "numeric(7,2)", NotNull: false, PrimaryKey: false, Uniquekey: false, FKeyTable: "", FKeyColID: []int(nil)}, + &DBColumn{ID: 5, Name: "user_id", Type: "bigint", NotNull: false, PrimaryKey: false, Uniquekey: false, FKeyTable: "users", FKeyColID: []int{1}}, + &DBColumn{ID: 6, Name: "created_at", Type: "timestamp without time zone", NotNull: true, PrimaryKey: false, Uniquekey: false, FKeyTable: "", FKeyColID: []int(nil)}, + &DBColumn{ID: 7, Name: "updated_at", Type: "timestamp without time zone", NotNull: true, PrimaryKey: false, Uniquekey: false, FKeyTable: "", FKeyColID: []int(nil)}}, + []*DBColumn{ + &DBColumn{ID: 1, Name: "id", Type: "bigint", NotNull: true, PrimaryKey: true, Uniquekey: false, FKeyTable: "", FKeyColID: []int(nil)}, + &DBColumn{ID: 2, Name: "customer_id", Type: "bigint", NotNull: false, PrimaryKey: false, Uniquekey: false, FKeyTable: "customers", FKeyColID: []int{1}}, + &DBColumn{ID: 3, Name: "product_id", Type: "bigint", NotNull: false, PrimaryKey: false, Uniquekey: false, FKeyTable: "products", FKeyColID: []int{1}}, + &DBColumn{ID: 4, Name: "sale_type", Type: "character varying", NotNull: false, PrimaryKey: false, Uniquekey: false, FKeyTable: "", FKeyColID: []int(nil)}, + &DBColumn{ID: 5, Name: "quantity", Type: "integer", NotNull: false, PrimaryKey: false, Uniquekey: false, FKeyTable: "", FKeyColID: []int(nil)}, + &DBColumn{ID: 6, Name: "due_date", Type: "timestamp without time zone", NotNull: false, PrimaryKey: false, Uniquekey: false, FKeyTable: "", FKeyColID: []int(nil)}, + &DBColumn{ID: 7, Name: "returned", Type: "timestamp without time zone", NotNull: false, PrimaryKey: false, Uniquekey: false, FKeyTable: "", FKeyColID: []int(nil)}}, + } + + schema := initSchema() + + for i, t := range tables { + updateSchema(schema, t, columns[i]) + } + + vars := NewVariables(map[string]string{ + "account_id": "select account_id from users where id = $user_id", + }) + + pcompile = NewCompiler(schema, vars) + + os.Exit(m.Run()) +} + +func compileGQLToPSQL(t *testing.T, gql string) string { + qc, err := qcompile.CompileQuery(gql) + if err != nil { + t.Fatal(err) + } + + var sqlStmt strings.Builder + + if err := pcompile.Compile(&sqlStmt, qc); err != nil { + t.Fatal(err) + } + + return sqlStmt.String() +} + +func TestCompileGQLWithArgs(t *testing.T) { + gql := `query { + products( + # returns only 30 items + limit: 30, + + # starts from item 10, commented out for now + # offset: 10, + + # orders the response items by highest price + order_by: { price: desc }, + + # no duplicate prices returned + distinct: [ price ] + + # only items with an id >= 30 and < 30 are returned + where: { id: { and: { greater_or_equals: 20, lt: 28 } } }) { + id + name + price + } + }` + + sql := `SELECT json_object_agg('products', products) FROM (SELECT coalesce(json_agg("products" ORDER BY "products_0.ob.price" DESC), '[]') AS "products" FROM (SELECT DISTINCT ON ("price") row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price") AS "sel_0")) AS "products", "products_0"."price" AS "products_0.ob.price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") < (28)) AND (("products"."id") >= (20)) ) LIMIT ('30') :: integer) AS "products_0" ORDER BY "products_0.ob.price" DESC LIMIT ('30') :: integer) AS "products_0") AS "done_1337";` + + resSQL := compileGQLToPSQL(t, gql) + + if resSQL != sql { + t.Fatal(errNotExpected) + } +} + +func TestCompileGQLOneToMany(t *testing.T) { + gql := `query { + users { + email + products { + name + price + } + } + }` + + sql := `SELECT json_object_agg('users', users) FROM (SELECT coalesce(json_agg("users"), '[]') AS "users" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "users_0"."email" AS "email", "products_1.join"."products" AS "products") AS "sel_0")) AS "users" FROM (SELECT "users"."email", "users"."id" FROM "users" WHERE ((("users"."id") = ('{{user_id}}')) ) LIMIT ('20') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("products"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_1" FROM (SELECT "products_1"."name" AS "name", "products_1"."price" AS "price") AS "sel_1")) AS "products" FROM (SELECT "products"."name", "products"."price" FROM "products" WHERE ( (("products"."user_id") = ("users_0"."id")) ) LIMIT ('20') :: integer) AS "products_1" LIMIT ('20') :: integer) AS "products_1") AS "products_1.join" ON ('true') LIMIT ('20') :: integer) AS "users_0") AS "done_1337";` + + resSQL := compileGQLToPSQL(t, gql) + + if resSQL != sql { + t.Fatal(errNotExpected) + } +} + +func TestCompileGQLBelongTo(t *testing.T) { + gql := `query { + products { + name + price + users { + email + } + } + }` + + sql := `SELECT json_object_agg('products', products) FROM (SELECT coalesce(json_agg("products"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."name" AS "name", "products_0"."price" AS "price", "users_1.join"."users" AS "users") AS "sel_0")) AS "products" FROM (SELECT "products"."name", "products"."price", "products"."user_id" FROM "products" LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("users"), '[]') AS "users" FROM (SELECT row_to_json((SELECT "sel_1" FROM (SELECT "users_1"."email" AS "email") AS "sel_1")) AS "users" FROM (SELECT "users"."email" FROM "users" WHERE ( (("users"."id") = ("products_0"."user_id")) ) LIMIT ('20') :: integer) AS "users_1" LIMIT ('20') :: integer) AS "users_1") AS "users_1.join" ON ('true') LIMIT ('20') :: integer) AS "products_0") AS "done_1337";` + + resSQL := compileGQLToPSQL(t, gql) + + if resSQL != sql { + t.Fatal(errNotExpected) + } +} + +func TestCompileGQLManyToMany(t *testing.T) { + gql := `query { + products { + name + customers { + email + full_name + } + } + }` + + sql := `SELECT json_object_agg('products', products) FROM (SELECT coalesce(json_agg("products"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."name" AS "name", "customers_1.join"."customers" AS "customers") AS "sel_0")) AS "products" FROM (SELECT "products"."name", "products"."id" FROM "products" LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("customers"), '[]') AS "customers" FROM (SELECT row_to_json((SELECT "sel_1" FROM (SELECT "customers_1"."email" AS "email", "customers_1"."full_name" AS "full_name") AS "sel_1")) AS "customers" FROM (SELECT "customers"."email", "customers"."full_name" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_0"."id")) WHERE ( (("customers"."id") = ("purchases"."customer_id")) ) LIMIT ('20') :: integer) AS "customers_1" LIMIT ('20') :: integer) AS "customers_1") AS "customers_1.join" ON ('true') LIMIT ('20') :: integer) AS "products_0") AS "done_1337";` + + resSQL := compileGQLToPSQL(t, gql) + + if resSQL != sql { + t.Fatal(errNotExpected) + } +} + +func TestCompileGQLManyToManyReverse(t *testing.T) { + gql := `query { + customers { + email + full_name + products { + name + } + } + }` + + sql := `SELECT json_object_agg('customers', customers) FROM (SELECT coalesce(json_agg("customers"), '[]') AS "customers" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "customers_0"."email" AS "email", "customers_0"."full_name" AS "full_name", "products_1.join"."products" AS "products") AS "sel_0")) AS "customers" FROM (SELECT "customers"."email", "customers"."full_name", "customers"."id" FROM "customers" LIMIT ('20') :: integer) AS "customers_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("products"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_1" FROM (SELECT "products_1"."name" AS "name") AS "sel_1")) AS "products" FROM (SELECT "products"."name" FROM "products" LEFT OUTER JOIN "purchases" ON (("purchases"."customer_id") = ("customers_0"."id")) WHERE ( (("products"."id") = ("purchases"."product_id")) ) LIMIT ('20') :: integer) AS "products_1" LIMIT ('20') :: integer) AS "products_1") AS "products_1.join" ON ('true') LIMIT ('20') :: integer) AS "customers_0") AS "done_1337";` + + resSQL := compileGQLToPSQL(t, gql) + + if resSQL != sql { + t.Fatal(errNotExpected) + } } diff --git a/psql/tables.go b/psql/tables.go index 9e51fed..57eaa15 100644 --- a/psql/tables.go +++ b/psql/tables.go @@ -40,12 +40,7 @@ type DBRel struct { } func NewDBSchema(db *pg.DB) (*DBSchema, error) { - schema := &DBSchema{ - ColMap: make(map[TCKey]*DBColumn), - ColIDMap: make(map[int]*DBColumn), - PCols: make(map[string]*DBColumn), - RelMap: make(map[TTKey]*DBRel), - } + schema := initSchema() tables, err := GetTables(db) if err != nil { @@ -57,86 +52,100 @@ func NewDBSchema(db *pg.DB) (*DBSchema, error) { if err != nil { return nil, err } - // Current table - ct := strings.ToLower(t.Name) - // Foreign key columns in current table - var jcols []*DBColumn - - for _, c := range cols { - schema.ColMap[TCKey{ct, strings.ToLower(c.Name)}] = c - schema.ColIDMap[c.ID] = c - } - - for _, c := range cols { - switch { - case c.PrimaryKey: - schema.PCols[ct] = c - - case len(c.FKeyTable) != 0: - if len(c.FKeyColID) == 0 { - continue - } - - // Foreign key column name - ft := strings.ToLower(c.FKeyTable) - fc, ok := schema.ColIDMap[c.FKeyColID[0]] - if !ok { - continue - } - - // Belongs-to relation between current table and the - // table in the foreign key - rel1 := &DBRel{RelBelongTo, "", "", c.Name, fc.Name} - schema.RelMap[TTKey{ct, ft}] = rel1 - - // One-to-many relation between the foreign key table and the - // the current table - rel2 := &DBRel{RelOneToMany, "", "", fc.Name, c.Name} - schema.RelMap[TTKey{ft, ct}] = rel2 - - jcols = append(jcols, c) - } - } - - // If table contains multiple foreign key columns it's a possible - // join table for many-to-many relationships or multiple one-to-many - // relations - - // Below one-to-many relations use the current table as the - // join table aka through table. - if len(jcols) > 1 { - col1, col2 := jcols[0], jcols[1] - - t1 := strings.ToLower(col1.FKeyTable) - t2 := strings.ToLower(col2.FKeyTable) - - fc1, ok := schema.ColIDMap[col1.FKeyColID[0]] - if !ok { - continue - } - fc2, ok := schema.ColIDMap[col2.FKeyColID[0]] - if !ok { - continue - } - - // One-to-many-through relation between 1nd foreign key table and the - // 2nd foreign key table - //rel1 := &DBRel{RelOneToManyThrough, ct, fc1.Name, col1.Name} - rel1 := &DBRel{RelOneToManyThrough, ct, col2.Name, fc2.Name, col1.Name} - schema.RelMap[TTKey{t1, t2}] = rel1 - - // One-to-many-through relation between 2nd foreign key table and the - // 1nd foreign key table - //rel2 := &DBRel{RelOneToManyThrough, ct, col2.Name, fc2.Name} - rel2 := &DBRel{RelOneToManyThrough, ct, col1.Name, fc1.Name, col2.Name} - schema.RelMap[TTKey{t2, t1}] = rel2 - } + updateSchema(schema, t, cols) } return schema, nil } +func initSchema() *DBSchema { + return &DBSchema{ + ColMap: make(map[TCKey]*DBColumn), + ColIDMap: make(map[int]*DBColumn), + PCols: make(map[string]*DBColumn), + RelMap: make(map[TTKey]*DBRel), + } +} + +func updateSchema(schema *DBSchema, t *DBTable, cols []*DBColumn) { + // Current table + ct := strings.ToLower(t.Name) + + // Foreign key columns in current table + var jcols []*DBColumn + + for _, c := range cols { + schema.ColMap[TCKey{ct, strings.ToLower(c.Name)}] = c + schema.ColIDMap[c.ID] = c + } + + for _, c := range cols { + switch { + case c.PrimaryKey: + schema.PCols[ct] = c + + case len(c.FKeyTable) != 0: + if len(c.FKeyColID) == 0 { + continue + } + + // Foreign key column name + ft := strings.ToLower(c.FKeyTable) + fc, ok := schema.ColIDMap[c.FKeyColID[0]] + if !ok { + continue + } + + // Belongs-to relation between current table and the + // table in the foreign key + rel1 := &DBRel{RelBelongTo, "", "", c.Name, fc.Name} + schema.RelMap[TTKey{ct, ft}] = rel1 + + // One-to-many relation between the foreign key table and the + // the current table + rel2 := &DBRel{RelOneToMany, "", "", fc.Name, c.Name} + schema.RelMap[TTKey{ft, ct}] = rel2 + + jcols = append(jcols, c) + } + } + + // If table contains multiple foreign key columns it's a possible + // join table for many-to-many relationships or multiple one-to-many + // relations + + // Below one-to-many relations use the current table as the + // join table aka through table. + if len(jcols) > 1 { + col1, col2 := jcols[0], jcols[1] + + t1 := strings.ToLower(col1.FKeyTable) + t2 := strings.ToLower(col2.FKeyTable) + + fc1, ok := schema.ColIDMap[col1.FKeyColID[0]] + if !ok { + return + } + fc2, ok := schema.ColIDMap[col2.FKeyColID[0]] + if !ok { + return + } + + // One-to-many-through relation between 1nd foreign key table and the + // 2nd foreign key table + //rel1 := &DBRel{RelOneToManyThrough, ct, fc1.Name, col1.Name} + rel1 := &DBRel{RelOneToManyThrough, ct, col2.Name, fc2.Name, col1.Name} + schema.RelMap[TTKey{t1, t2}] = rel1 + + // One-to-many-through relation between 2nd foreign key table and the + // 1nd foreign key table + //rel2 := &DBRel{RelOneToManyThrough, ct, col2.Name, fc2.Name} + rel2 := &DBRel{RelOneToManyThrough, ct, col1.Name, fc1.Name, col2.Name} + schema.RelMap[TTKey{t2, t1}] = rel2 + } +} + type DBTable struct { Name string `sql:"name"` Type string `sql:"type"` diff --git a/psql/utils.go b/psql/utils.go new file mode 100644 index 0000000..b52bcc7 --- /dev/null +++ b/psql/utils.go @@ -0,0 +1,13 @@ +package psql + +import "regexp" + +func NewVariables(varlist map[string]string) map[string]string { + re := regexp.MustCompile(`(?mi)\$([a-zA-Z0-9_.]+)`) + vars := make(map[string]string) + + for k, v := range varlist { + vars[k] = re.ReplaceAllString(v, `{{$1}}`) + } + return vars +} diff --git a/qcode/lex.go b/qcode/lex.go index 7048f7b..79c4651 100644 --- a/qcode/lex.go +++ b/qcode/lex.go @@ -172,7 +172,7 @@ func (l *lexer) accept(valid string) bool { return false } -// accept onsumes a run of runes while they are alpha nums +// acceptAlphaNum consumes a run of runes while they are alpha nums func (l *lexer) acceptAlphaNum() bool { n := 0 for r := l.next(); isAlphaNumeric(r); r = l.next() { @@ -182,6 +182,14 @@ func (l *lexer) acceptAlphaNum() bool { return (n != 0) } +// acceptComment consumes a run of runes while till the end of line +func (l *lexer) acceptComment() { + n := 0 + for r := l.next(); !isEndOfLine(r); r = l.next() { + n++ + } +} + // acceptRun consumes a run of runes from the valid set. func (l *lexer) acceptRun(valid string) { for strings.ContainsRune(valid, l.next()) { @@ -231,6 +239,10 @@ func lexRoot(l *lexer) stateFn { l.ignore() case isSpace(r): l.ignore() + case r == '#': + l.ignore() + l.acceptComment() + l.ignore() case r == '@': l.ignore() if l.acceptAlphaNum() { diff --git a/qcode/utils.go b/qcode/utils.go new file mode 100644 index 0000000..065b383 --- /dev/null +++ b/qcode/utils.go @@ -0,0 +1,31 @@ +package qcode + +import ( + "fmt" + "regexp" + "strings" +) + +func NewBlacklist(list []string) *regexp.Regexp { + var bl *regexp.Regexp + + if len(list) != 0 { + re := fmt.Sprintf("(?i)%s", strings.Join(list, "|")) + bl = regexp.MustCompile(re) + } + return bl +} + +func NewFilterMap(filters map[string]string) FilterMap { + fm := make(FilterMap) + + for k, v := range filters { + fil, err := CompileFilter(v) + if err != nil { + panic(err) + } + key := strings.ToLower(k) + fm[key] = fil + } + return fm +} diff --git a/serv/serv.go b/serv/serv.go index 0f58bb2..a816f9f 100644 --- a/serv/serv.go +++ b/serv/serv.go @@ -6,7 +6,6 @@ import ( "fmt" "net/http" "os" - "regexp" "strings" "github.com/dosco/super-graph/psql" @@ -125,23 +124,11 @@ func initDB() { } func initCompilers() { - fv := conf.GetStringMapString("database.filters") - fm := make(qcode.FilterMap) - for k, v := range fv { - fil, err := qcode.CompileFilter(v) - if err != nil { - panic(err) - } - key := strings.ToLower(k) - fm[key] = fil - } + filters := conf.GetStringMapString("database.filters") + blacklist := conf.GetStringSlice("database.blacklist") - bv := conf.GetStringSlice("database.blacklist") - var bl *regexp.Regexp - if len(bv) != 0 { - re := fmt.Sprintf("(?i)%s", strings.Join(bv, "|")) - bl = regexp.MustCompile(re) - } + fm := qcode.NewFilterMap(filters) + bl := qcode.NewBlacklist(blacklist) qcompile = qcode.NewCompiler(fm, bl) schema, err := psql.NewDBSchema(db) @@ -149,13 +136,8 @@ func initCompilers() { logger.Fatal(err) } - re := regexp.MustCompile(`(?mi)\$([a-zA-Z0-9_.]+)`) - vl := conf.GetStringMapString("database.variables") - vars := make(map[string]string) - - for k, v := range vl { - vars[k] = re.ReplaceAllString(v, `{{$1}}`) - } + varlist := conf.GetStringMapString("database.variables") + vars := psql.NewVariables(varlist) pcompile = psql.NewCompiler(schema, vars) }