Compare commits
7 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
7557a4c29c | ||
|
dd4accfdd2 | ||
|
06214a3850 | ||
|
7b5548a2c6 | ||
|
00cfa251a2 | ||
|
9f35f85857 | ||
|
f4f6420a30 |
@ -85,12 +85,11 @@ type SuperGraph struct {
|
||||
allowList *allow.List
|
||||
encKey [32]byte
|
||||
hashSeed maphash.Seed
|
||||
queries map[uint64]query
|
||||
queries map[uint64]*query
|
||||
roles map[string]*Role
|
||||
getRole *sql.Stmt
|
||||
rmap map[uint64]resolvFn
|
||||
abacEnabled bool
|
||||
anonExists bool
|
||||
qc *qcode.Compiler
|
||||
pc *psql.Compiler
|
||||
ge *graphql.Engine
|
||||
@ -140,7 +139,7 @@ func newSuperGraph(conf *Config, db *sql.DB, dbinfo *psql.DBInfo) (*SuperGraph,
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(conf.SecretKey) != 0 {
|
||||
if conf.SecretKey != "" {
|
||||
sk := sha256.Sum256([]byte(conf.SecretKey))
|
||||
conf.SecretKey = ""
|
||||
sg.encKey = sk
|
||||
|
@ -1,41 +0,0 @@
|
||||
INF roles_query not defined: attribute based access control disabled
|
||||
all expectations were already fulfilled, call to Query 'SELECT jsonb_build_object('users', "__sj_0"."json", 'products', "__sj_1"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "__sj_2"."json" AS "customers", "__sj_3"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('20') :: integer) AS "products_1" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "users_3"."full_name" AS "full_name", "users_3"."phone" AS "phone", "users_3"."email" AS "email" FROM (SELECT "users"."full_name", "users"."phone", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_1"."user_id"))) LIMIT ('1') :: integer) AS "users_3") AS "__sr_3") AS "__sj_3" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_1"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true')) AS "__sr_1") AS "__sj_1") AS "__sj_1", (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."name" AS "name" FROM (SELECT "users"."id" FROM "users" GROUP BY "users"."id" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"' with args [] was not expected
|
||||
goos: darwin
|
||||
goarch: amd64
|
||||
pkg: github.com/dosco/super-graph/core
|
||||
BenchmarkGraphQL-16 INF roles_query not defined: attribute based access control disabled
|
||||
all expectations were already fulfilled, call to Query 'SELECT jsonb_build_object('users', "__sj_0"."json", 'products', "__sj_1"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "__sj_2"."json" AS "customers", "__sj_3"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('20') :: integer) AS "products_1" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "users_3"."full_name" AS "full_name", "users_3"."phone" AS "phone", "users_3"."email" AS "email" FROM (SELECT "users"."full_name", "users"."phone", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_1"."user_id"))) LIMIT ('1') :: integer) AS "users_3") AS "__sr_3") AS "__sj_3" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_1"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true')) AS "__sr_1") AS "__sj_1") AS "__sj_1", (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."name" AS "name" FROM (SELECT "users"."id" FROM "users" GROUP BY "users"."id" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"' with args [] was not expected
|
||||
INF roles_query not defined: attribute based access control disabled
|
||||
all expectations were already fulfilled, call to Query 'SELECT jsonb_build_object('users', "__sj_0"."json", 'products', "__sj_1"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "__sj_2"."json" AS "customers", "__sj_3"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('20') :: integer) AS "products_1" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "users_3"."full_name" AS "full_name", "users_3"."phone" AS "phone", "users_3"."email" AS "email" FROM (SELECT "users"."full_name", "users"."phone", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_1"."user_id"))) LIMIT ('1') :: integer) AS "users_3") AS "__sr_3") AS "__sj_3" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_1"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true')) AS "__sr_1") AS "__sj_1") AS "__sj_1", (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."name" AS "name" FROM (SELECT "users"."id" FROM "users" GROUP BY "users"."id" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"' with args [] was not expected
|
||||
INF roles_query not defined: attribute based access control disabled
|
||||
all expectations were already fulfilled, call to Query 'SELECT jsonb_build_object('users', "__sj_0"."json", 'products', "__sj_1"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "__sj_2"."json" AS "customers", "__sj_3"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('20') :: integer) AS "products_1" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "users_3"."full_name" AS "full_name", "users_3"."phone" AS "phone", "users_3"."email" AS "email" FROM (SELECT "users"."full_name", "users"."phone", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_1"."user_id"))) LIMIT ('1') :: integer) AS "users_3") AS "__sr_3") AS "__sj_3" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_1"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true')) AS "__sr_1") AS "__sj_1") AS "__sj_1", (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."name" AS "name" FROM (SELECT "users"."id" FROM "users" GROUP BY "users"."id" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"' with args [] was not expected
|
||||
105048 10398 ns/op 18342 B/op 55 allocs/op
|
||||
PASS
|
||||
ok github.com/dosco/super-graph/core 1.328s
|
||||
PASS
|
||||
ok github.com/dosco/super-graph/core/internal/allow 0.088s
|
||||
? github.com/dosco/super-graph/core/internal/crypto [no test files]
|
||||
? github.com/dosco/super-graph/core/internal/integration_tests [no test files]
|
||||
PASS
|
||||
ok github.com/dosco/super-graph/core/internal/integration_tests/cockroachdb 0.121s
|
||||
PASS
|
||||
ok github.com/dosco/super-graph/core/internal/integration_tests/postgresql 0.118s
|
||||
goos: darwin
|
||||
goarch: amd64
|
||||
pkg: github.com/dosco/super-graph/core/internal/psql
|
||||
BenchmarkCompile-16 79845 14428 ns/op 4584 B/op 39 allocs/op
|
||||
BenchmarkCompileParallel-16 326205 3918 ns/op 4633 B/op 39 allocs/op
|
||||
PASS
|
||||
ok github.com/dosco/super-graph/core/internal/psql 2.696s
|
||||
goos: darwin
|
||||
goarch: amd64
|
||||
pkg: github.com/dosco/super-graph/core/internal/qcode
|
||||
BenchmarkQCompile-16 146953 8049 ns/op 3756 B/op 28 allocs/op
|
||||
BenchmarkQCompileP-16 475936 2447 ns/op 3790 B/op 28 allocs/op
|
||||
BenchmarkParse-16 140811 8163 ns/op 3902 B/op 18 allocs/op
|
||||
BenchmarkParseP-16 571345 2041 ns/op 3903 B/op 18 allocs/op
|
||||
BenchmarkSchemaParse-16 230715 5012 ns/op 3968 B/op 57 allocs/op
|
||||
BenchmarkSchemaParseP-16 802426 1565 ns/op 3968 B/op 57 allocs/op
|
||||
PASS
|
||||
ok github.com/dosco/super-graph/core/internal/qcode 8.427s
|
||||
? github.com/dosco/super-graph/core/internal/util [no test files]
|
@ -82,7 +82,7 @@ func (sg *SuperGraph) buildMultiStmt(query, vars []byte) ([]stmt, error) {
|
||||
}
|
||||
}
|
||||
|
||||
if len(sg.conf.RolesQuery) == 0 {
|
||||
if sg.conf.RolesQuery == "" {
|
||||
return nil, errors.New("roles_query not defined")
|
||||
}
|
||||
|
||||
@ -133,7 +133,7 @@ func (sg *SuperGraph) renderUserQuery(md psql.Metadata, stmts []stmt) (string, e
|
||||
io.WriteString(w, `SELECT "_sg_auth_info"."role", (CASE "_sg_auth_info"."role" `)
|
||||
|
||||
for _, s := range stmts {
|
||||
if len(s.role.Match) == 0 &&
|
||||
if s.role.Match == "" &&
|
||||
s.role.Name != "user" && s.role.Name != "anon" {
|
||||
continue
|
||||
}
|
||||
@ -150,7 +150,7 @@ func (sg *SuperGraph) renderUserQuery(md psql.Metadata, stmts []stmt) (string, e
|
||||
|
||||
io.WriteString(w, `(SELECT (CASE`)
|
||||
for _, s := range stmts {
|
||||
if len(s.role.Match) == 0 {
|
||||
if s.role.Match == "" {
|
||||
continue
|
||||
}
|
||||
io.WriteString(w, ` WHEN `)
|
||||
|
@ -72,6 +72,7 @@ type Config struct {
|
||||
type Table struct {
|
||||
Name string
|
||||
Table string
|
||||
Type string
|
||||
Blocklist []string
|
||||
Remotes []Remote
|
||||
Columns []Column
|
||||
@ -151,7 +152,7 @@ type Delete struct {
|
||||
|
||||
// AddRoleTable function is a helper function to make it easy to add per-table
|
||||
// row-level config
|
||||
func (c *Config) AddRoleTable(role string, table string, conf interface{}) error {
|
||||
func (c *Config) AddRoleTable(role, table string, conf interface{}) error {
|
||||
var r *Role
|
||||
|
||||
for i := range c.Roles {
|
||||
|
@ -172,14 +172,15 @@ func (c *scontext) resolvePreparedSQL() ([]byte, *stmt, error) {
|
||||
|
||||
h := maphash.Hash{}
|
||||
h.SetSeed(c.sg.hashSeed)
|
||||
id := queryID(&h, c.res.name, role)
|
||||
|
||||
q, ok := c.sg.queries[queryID(&h, c.res.name, role)]
|
||||
q, ok := c.sg.queries[id]
|
||||
if !ok {
|
||||
return nil, nil, errNotFound
|
||||
}
|
||||
|
||||
if q.sd == nil {
|
||||
q.Do(func() { c.sg.prepare(&q, role) })
|
||||
q.Do(func() { c.sg.prepare(q, role) })
|
||||
|
||||
if q.err != nil {
|
||||
return nil, nil, err
|
||||
@ -304,7 +305,7 @@ func (c *scontext) resolveSQL() ([]byte, *stmt, error) {
|
||||
err = row.Scan(&root)
|
||||
}
|
||||
|
||||
if len(role) == 0 {
|
||||
if role == "" {
|
||||
c.res.role = c.role
|
||||
} else {
|
||||
c.res.role = role
|
||||
|
98
core/init.go
98
core/init.go
@ -21,7 +21,7 @@ func (sg *SuperGraph) initConfig() error {
|
||||
|
||||
for i := 0; i < len(c.Tables); i++ {
|
||||
t := &c.Tables[i]
|
||||
t.Name = flect.Pluralize(strings.ToLower(t.Name))
|
||||
// t.Name = flect.Pluralize(strings.ToLower(t.Name))
|
||||
|
||||
if _, ok := tm[t.Name]; ok {
|
||||
sg.conf.Tables = append(c.Tables[:i], c.Tables[i+1:]...)
|
||||
@ -100,21 +100,26 @@ func getDBTableAliases(c *Config) map[string][]string {
|
||||
for i := range c.Tables {
|
||||
t := c.Tables[i]
|
||||
|
||||
if len(t.Table) == 0 || len(t.Columns) != 0 {
|
||||
continue
|
||||
if t.Table != "" && t.Type == "" {
|
||||
m[t.Table] = append(m[t.Table], t.Name)
|
||||
}
|
||||
|
||||
m[t.Table] = append(m[t.Table], t.Name)
|
||||
}
|
||||
return m
|
||||
}
|
||||
|
||||
func addTables(c *Config, di *psql.DBInfo) error {
|
||||
var err error
|
||||
|
||||
for _, t := range c.Tables {
|
||||
if t.Table == "" || len(t.Columns) == 0 {
|
||||
continue
|
||||
switch t.Type {
|
||||
case "json", "jsonb":
|
||||
err = addJsonTable(di, t.Columns, t)
|
||||
|
||||
case "polymorphic":
|
||||
err = addVirtualTable(di, t.Columns, t)
|
||||
}
|
||||
if err := addTable(di, t.Columns, t); err != nil {
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@ -122,17 +127,18 @@ func addTables(c *Config, di *psql.DBInfo) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func addTable(di *psql.DBInfo, cols []Column, t Table) error {
|
||||
func addJsonTable(di *psql.DBInfo, cols []Column, t Table) error {
|
||||
// This is for jsonb columns that want to be tables.
|
||||
bc, ok := di.GetColumn(t.Table, t.Name)
|
||||
if !ok {
|
||||
return fmt.Errorf(
|
||||
"Column '%s' not found on table '%s'",
|
||||
"json table: column '%s' not found on table '%s'",
|
||||
t.Name, t.Table)
|
||||
}
|
||||
|
||||
if bc.Type != "json" && bc.Type != "jsonb" {
|
||||
return fmt.Errorf(
|
||||
"Column '%s' in table '%s' is of type '%s'. Only JSON or JSONB is valid",
|
||||
"json table: column '%s' in table '%s' is of type '%s'. Only JSON or JSONB is valid",
|
||||
t.Name, t.Table, bc.Type)
|
||||
}
|
||||
|
||||
@ -159,8 +165,38 @@ func addTable(di *psql.DBInfo, cols []Column, t Table) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func addVirtualTable(di *psql.DBInfo, cols []Column, t Table) error {
|
||||
if len(cols) == 0 {
|
||||
return fmt.Errorf("polymorphic table: no id column specified")
|
||||
}
|
||||
|
||||
c := cols[0]
|
||||
|
||||
if c.ForeignKey == "" {
|
||||
return fmt.Errorf("polymorphic table: no 'related_to' specified on id column")
|
||||
}
|
||||
|
||||
s := strings.SplitN(c.ForeignKey, ".", 2)
|
||||
|
||||
if len(s) != 2 {
|
||||
return fmt.Errorf("polymorphic table: foreign key must be <type column>.<foreign key column>")
|
||||
}
|
||||
|
||||
di.VTables = append(di.VTables, psql.VirtualTable{
|
||||
Name: t.Name,
|
||||
IDColumn: c.Name,
|
||||
TypeColumn: s[0],
|
||||
FKeyColumn: s[1],
|
||||
})
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func addForeignKeys(c *Config, di *psql.DBInfo) error {
|
||||
for _, t := range c.Tables {
|
||||
if t.Type != "" {
|
||||
continue
|
||||
}
|
||||
for _, c := range t.Columns {
|
||||
if c.ForeignKey == "" {
|
||||
continue
|
||||
@ -174,30 +210,52 @@ func addForeignKeys(c *Config, di *psql.DBInfo) error {
|
||||
}
|
||||
|
||||
func addForeignKey(di *psql.DBInfo, c Column, t Table) error {
|
||||
c1, ok := di.GetColumn(t.Name, c.Name)
|
||||
var tn string
|
||||
|
||||
if t.Type == "polymorphic" {
|
||||
tn = t.Table
|
||||
} else {
|
||||
tn = t.Name
|
||||
}
|
||||
|
||||
c1, ok := di.GetColumn(tn, c.Name)
|
||||
if !ok {
|
||||
return fmt.Errorf(
|
||||
"Invalid table '%s' or column '%s' in Config",
|
||||
t.Name, c.Name)
|
||||
"config: invalid table '%s' or column '%s' defined",
|
||||
tn, c.Name)
|
||||
}
|
||||
|
||||
v := strings.SplitN(c.ForeignKey, ".", 2)
|
||||
if len(v) != 2 {
|
||||
return fmt.Errorf(
|
||||
"Invalid foreign_key in Config for table '%s' and column '%s",
|
||||
t.Name, c.Name)
|
||||
"config: invalid foreign_key defined for table '%s' and column '%s': %s",
|
||||
tn, c.Name, c.ForeignKey)
|
||||
}
|
||||
|
||||
// check if it's a polymorphic foreign key
|
||||
if _, ok := di.GetColumn(tn, v[0]); ok {
|
||||
c2, ok := di.GetColumn(tn, v[1])
|
||||
if !ok {
|
||||
return fmt.Errorf(
|
||||
"config: invalid column '%s' for polymorphic relationship on table '%s' and column '%s'",
|
||||
v[1], tn, c.Name)
|
||||
}
|
||||
|
||||
c1.FKeyTable = v[0]
|
||||
c1.FKeyColID = []int16{c2.ID}
|
||||
return nil
|
||||
}
|
||||
|
||||
fkt, fkc := v[0], v[1]
|
||||
c2, ok := di.GetColumn(fkt, fkc)
|
||||
c3, ok := di.GetColumn(fkt, fkc)
|
||||
if !ok {
|
||||
return fmt.Errorf(
|
||||
"Invalid foreign_key in Config for table '%s' and column '%s",
|
||||
t.Name, c.Name)
|
||||
"config: foreign_key for table '%s' and column '%s' points to unknown table '%s' and column '%s'",
|
||||
t.Name, c.Name, v[0], v[1])
|
||||
}
|
||||
|
||||
c1.FKeyTable = fkt
|
||||
c1.FKeyColID = []int16{c2.ID}
|
||||
c1.FKeyColID = []int16{c3.ID}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
@ -146,7 +146,7 @@ func (al *List) Load() ([]Item, error) {
|
||||
return parse(string(b), al.filepath)
|
||||
}
|
||||
|
||||
func parse(b string, filename string) ([]Item, error) {
|
||||
func parse(b, filename string) ([]Item, error) {
|
||||
var items []Item
|
||||
|
||||
var s scanner.Scanner
|
||||
@ -299,9 +299,13 @@ func (al *List) save(item Item) error {
|
||||
|
||||
for _, v := range list {
|
||||
if v.Comment != "" {
|
||||
f.WriteString(fmt.Sprintf("/* %s */\n\n", v.Comment))
|
||||
_, err = f.WriteString(fmt.Sprintf("/* %s */\n\n", v.Comment))
|
||||
} else {
|
||||
f.WriteString(fmt.Sprintf("/* %s */\n\n", v.Name))
|
||||
_, err = f.WriteString(fmt.Sprintf("/* %s */\n\n", v.Name))
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if v.Vars != "" {
|
||||
@ -320,18 +324,6 @@ func (al *List) save(item Item) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func matchPrefix(b []byte, i int, s string) bool {
|
||||
if (len(b) - i) < len(s) {
|
||||
return false
|
||||
}
|
||||
for n := 0; n < len(s); n++ {
|
||||
if b[(i+n)] != s[n] {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func QueryName(b string) string {
|
||||
state, s := 0, 0
|
||||
|
||||
|
@ -14,7 +14,7 @@ func TestGQLName1(t *testing.T) {
|
||||
|
||||
name := QueryName(q)
|
||||
|
||||
if len(name) != 0 {
|
||||
if name != "" {
|
||||
t.Fatal("Name should be empty, not ", name)
|
||||
}
|
||||
}
|
||||
|
@ -156,27 +156,29 @@ func (co *Compiler) compileQueryWithMetadata(
|
||||
if id < closeBlock {
|
||||
sel := &c.s[id]
|
||||
|
||||
if len(sel.Cols) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
ti, err := c.schema.GetTable(sel.Name)
|
||||
if err != nil {
|
||||
return c.md, err
|
||||
}
|
||||
|
||||
if sel.ParentID == -1 {
|
||||
io.WriteString(c.w, `(`)
|
||||
} else {
|
||||
c.renderLateralJoin(sel)
|
||||
}
|
||||
if sel.Type != qcode.STUnion {
|
||||
if len(sel.Cols) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
if !ti.IsSingular {
|
||||
c.renderPluralSelect(sel, ti)
|
||||
}
|
||||
if sel.ParentID == -1 {
|
||||
io.WriteString(c.w, `(`)
|
||||
} else {
|
||||
c.renderLateralJoin(sel)
|
||||
}
|
||||
|
||||
if err := c.renderSelect(sel, ti, vars); err != nil {
|
||||
return c.md, err
|
||||
if !ti.IsSingular {
|
||||
c.renderPluralSelect(sel, ti)
|
||||
}
|
||||
|
||||
if err := c.renderSelect(sel, ti, vars); err != nil {
|
||||
return c.md, err
|
||||
}
|
||||
}
|
||||
|
||||
for _, cid := range sel.Children {
|
||||
@ -184,10 +186,10 @@ func (co *Compiler) compileQueryWithMetadata(
|
||||
continue
|
||||
}
|
||||
child := &c.s[cid]
|
||||
|
||||
if child.SkipRender {
|
||||
continue
|
||||
}
|
||||
|
||||
st.Push(child.ID + closeBlock)
|
||||
st.Push(child.ID)
|
||||
}
|
||||
@ -195,35 +197,37 @@ func (co *Compiler) compileQueryWithMetadata(
|
||||
} else {
|
||||
sel := &c.s[(id - closeBlock)]
|
||||
|
||||
ti, err := c.schema.GetTable(sel.Name)
|
||||
if err != nil {
|
||||
return c.md, err
|
||||
}
|
||||
if sel.Type != qcode.STUnion {
|
||||
ti, err := c.schema.GetTable(sel.Name)
|
||||
if err != nil {
|
||||
return c.md, err
|
||||
}
|
||||
|
||||
io.WriteString(c.w, `)`)
|
||||
aliasWithID(c.w, "__sr", sel.ID)
|
||||
io.WriteString(c.w, `)`)
|
||||
aliasWithID(c.w, "__sr", sel.ID)
|
||||
|
||||
io.WriteString(c.w, `)`)
|
||||
aliasWithID(c.w, "__sj", sel.ID)
|
||||
|
||||
if !ti.IsSingular {
|
||||
io.WriteString(c.w, `)`)
|
||||
aliasWithID(c.w, "__sj", sel.ID)
|
||||
}
|
||||
|
||||
if sel.ParentID == -1 {
|
||||
if st.Len() != 0 {
|
||||
io.WriteString(c.w, `, `)
|
||||
if !ti.IsSingular {
|
||||
io.WriteString(c.w, `)`)
|
||||
aliasWithID(c.w, "__sj", sel.ID)
|
||||
}
|
||||
|
||||
if sel.ParentID == -1 {
|
||||
if st.Len() != 0 {
|
||||
io.WriteString(c.w, `, `)
|
||||
}
|
||||
} else {
|
||||
c.renderLateralJoinClose(sel)
|
||||
}
|
||||
} else {
|
||||
c.renderLateralJoinClose(sel)
|
||||
}
|
||||
|
||||
if len(sel.Args) != 0 {
|
||||
i := 0
|
||||
for _, v := range sel.Args {
|
||||
qcode.FreeNode(v, 500)
|
||||
i++
|
||||
if sel.Type != qcode.STMember {
|
||||
if len(sel.Args) != 0 {
|
||||
for _, v := range sel.Args {
|
||||
qcode.FreeNode(v)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -361,6 +365,16 @@ func (c *compilerContext) initSelect(sel *qcode.Select, ti *DBTableInfo, vars Va
|
||||
c.md.skipped |= (1 << uint(id))
|
||||
}
|
||||
|
||||
case RelPolymorphic:
|
||||
if _, ok := colmap[rel.Left.Col]; !ok {
|
||||
cols = append(cols, &qcode.Column{Table: ti.Name, Name: rel.Left.Col, FieldName: rel.Left.Col})
|
||||
colmap[rel.Left.Col] = struct{}{}
|
||||
}
|
||||
if _, ok := colmap[rel.Right.Table]; !ok {
|
||||
cols = append(cols, &qcode.Column{Table: ti.Name, Name: rel.Right.Table, FieldName: rel.Right.Table})
|
||||
colmap[rel.Right.Table] = struct{}{}
|
||||
}
|
||||
|
||||
default:
|
||||
return nil, fmt.Errorf("unknown relationship %s", rel)
|
||||
}
|
||||
@ -439,15 +453,23 @@ func (c *compilerContext) renderSelect(sel *qcode.Select, ti *DBTableInfo, vars
|
||||
var rel *DBRel
|
||||
var err error
|
||||
|
||||
// Relationships must be between union parents and their parents
|
||||
if sel.ParentID != -1 {
|
||||
parent := c.s[sel.ParentID]
|
||||
if sel.Type == qcode.STMember && sel.UParentID != -1 {
|
||||
cn := c.s[sel.ParentID].Name
|
||||
pn := c.s[sel.UParentID].Name
|
||||
rel, err = c.schema.GetRel(cn, pn)
|
||||
|
||||
rel, err = c.schema.GetRel(ti.Name, parent.Name)
|
||||
if err != nil {
|
||||
return err
|
||||
} else {
|
||||
pn := c.s[sel.ParentID].Name
|
||||
rel, err = c.schema.GetRel(ti.Name, pn)
|
||||
}
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
childCols, err := c.initSelect(sel, ti, vars)
|
||||
if err != nil {
|
||||
return err
|
||||
@ -531,30 +553,27 @@ func (c *compilerContext) renderJoin(sel *qcode.Select, ti *DBTableInfo) error {
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderJoinByName(table, parent string, id int32) error {
|
||||
rel, err := c.schema.GetRel(table, parent)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
rel, _ := c.schema.GetRel(table, parent)
|
||||
|
||||
// This join is only required for one-to-many relations since
|
||||
// these make use of join tables that need to be pulled in.
|
||||
if rel.Type != RelOneToManyThrough {
|
||||
return err
|
||||
if rel == nil || rel.Type != RelOneToManyThrough {
|
||||
return nil
|
||||
}
|
||||
|
||||
pt, err := c.schema.GetTable(parent)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// pt, err := c.schema.GetTable(parent)
|
||||
// if err != nil {
|
||||
// return err
|
||||
// }
|
||||
|
||||
//fmt.Fprintf(w, ` LEFT OUTER JOIN "%s" ON (("%s"."%s") = ("%s_%d"."%s"))`,
|
||||
//rel.Through, rel.Through, rel.ColT, c.parent.Name, c.parent.ID, rel.Left.Col)
|
||||
io.WriteString(c.w, ` LEFT OUTER JOIN "`)
|
||||
io.WriteString(c.w, rel.Through)
|
||||
io.WriteString(c.w, rel.Through.Table)
|
||||
io.WriteString(c.w, `" ON ((`)
|
||||
colWithTable(c.w, rel.Through, rel.ColT)
|
||||
colWithTable(c.w, rel.Through.Table, rel.Through.ColL)
|
||||
io.WriteString(c.w, `) = (`)
|
||||
colWithTableID(c.w, pt.Name, id, rel.Left.Col)
|
||||
colWithTable(c.w, rel.Left.Table, rel.Left.Col)
|
||||
io.WriteString(c.w, `))`)
|
||||
|
||||
return nil
|
||||
@ -641,10 +660,33 @@ func (c *compilerContext) renderJoinColumns(sel *qcode.Select, ti *DBTableInfo,
|
||||
continue
|
||||
}
|
||||
|
||||
io.WriteString(c.w, `"__sj_`)
|
||||
int32String(c.w, childSel.ID)
|
||||
io.WriteString(c.w, `"."json"`)
|
||||
alias(c.w, childSel.FieldName)
|
||||
if childSel.Type == qcode.STUnion {
|
||||
rel, err := c.schema.GetRel(childSel.Name, ti.Name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
io.WriteString(c.w, `(CASE `)
|
||||
for _, uid := range childSel.Children {
|
||||
unionSel := &c.s[uid]
|
||||
|
||||
io.WriteString(c.w, `WHEN `)
|
||||
colWithTableID(c.w, ti.Name, sel.ID, rel.Right.Table)
|
||||
io.WriteString(c.w, ` = `)
|
||||
squoted(c.w, unionSel.Name)
|
||||
io.WriteString(c.w, ` THEN `)
|
||||
io.WriteString(c.w, `"__sj_`)
|
||||
int32String(c.w, unionSel.ID)
|
||||
io.WriteString(c.w, `"."json"`)
|
||||
}
|
||||
io.WriteString(c.w, `END)`)
|
||||
alias(c.w, childSel.FieldName)
|
||||
|
||||
} else {
|
||||
io.WriteString(c.w, `"__sj_`)
|
||||
int32String(c.w, childSel.ID)
|
||||
io.WriteString(c.w, `"."json"`)
|
||||
alias(c.w, childSel.FieldName)
|
||||
}
|
||||
|
||||
if childSel.Paging.Type != qcode.PtOffset {
|
||||
io.WriteString(c.w, `, "__sj_`)
|
||||
@ -699,7 +741,8 @@ func (c *compilerContext) renderBaseSelect(sel *qcode.Select, ti *DBTableInfo, r
|
||||
}
|
||||
|
||||
io.WriteString(c.w, ` WHERE (`)
|
||||
if err := c.renderRelationship(sel, ti); err != nil {
|
||||
|
||||
if err := c.renderRelationship(sel, rel); err != nil {
|
||||
return err
|
||||
}
|
||||
if isFil {
|
||||
@ -731,7 +774,7 @@ func (c *compilerContext) renderBaseSelect(sel *qcode.Select, ti *DBTableInfo, r
|
||||
case ti.IsSingular:
|
||||
io.WriteString(c.w, ` LIMIT ('1') :: integer`)
|
||||
|
||||
case len(sel.Paging.Limit) != 0:
|
||||
case sel.Paging.Limit != "":
|
||||
//fmt.Fprintf(w, ` LIMIT ('%s') :: integer`, c.sel.Paging.Limit)
|
||||
io.WriteString(c.w, ` LIMIT ('`)
|
||||
io.WriteString(c.w, sel.Paging.Limit)
|
||||
@ -744,7 +787,7 @@ func (c *compilerContext) renderBaseSelect(sel *qcode.Select, ti *DBTableInfo, r
|
||||
io.WriteString(c.w, ` LIMIT ('20') :: integer`)
|
||||
}
|
||||
|
||||
if len(sel.Paging.Offset) != 0 {
|
||||
if sel.Paging.Offset != "" {
|
||||
//fmt.Fprintf(w, ` OFFSET ('%s') :: integer`, c.sel.Paging.Offset)
|
||||
io.WriteString(c.w, ` OFFSET ('`)
|
||||
io.WriteString(c.w, sel.Paging.Offset)
|
||||
@ -813,22 +856,25 @@ func (c *compilerContext) renderCursorCTE(sel *qcode.Select) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderRelationship(sel *qcode.Select, ti *DBTableInfo) error {
|
||||
parent := c.s[sel.ParentID]
|
||||
|
||||
pti, err := c.schema.GetTable(parent.Name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return c.renderRelationshipByName(ti.Name, pti.Name, parent.ID)
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderRelationshipByName(table, parent string, id int32) error {
|
||||
func (c *compilerContext) renderRelationshipByName(table, parent string) error {
|
||||
rel, err := c.schema.GetRel(table, parent)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return c.renderRelationship(nil, rel)
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderRelationship(sel *qcode.Select, rel *DBRel) error {
|
||||
var pid int32
|
||||
|
||||
switch {
|
||||
case sel == nil:
|
||||
pid = int32(-1)
|
||||
case sel.Type == qcode.STMember:
|
||||
pid = sel.UParentID
|
||||
default:
|
||||
pid = sel.ParentID
|
||||
}
|
||||
|
||||
io.WriteString(c.w, `((`)
|
||||
|
||||
@ -840,19 +886,19 @@ func (c *compilerContext) renderRelationshipByName(table, parent string, id int3
|
||||
|
||||
switch {
|
||||
case !rel.Left.Array && rel.Right.Array:
|
||||
colWithTable(c.w, table, rel.Left.Col)
|
||||
colWithTable(c.w, rel.Left.Table, rel.Left.Col)
|
||||
io.WriteString(c.w, `) = any (`)
|
||||
colWithTableID(c.w, parent, id, rel.Right.Col)
|
||||
colWithTableID(c.w, rel.Right.Table, pid, rel.Right.Col)
|
||||
|
||||
case rel.Left.Array && !rel.Right.Array:
|
||||
colWithTableID(c.w, parent, id, rel.Right.Col)
|
||||
colWithTableID(c.w, rel.Right.Table, pid, rel.Right.Col)
|
||||
io.WriteString(c.w, `) = any (`)
|
||||
colWithTable(c.w, table, rel.Left.Col)
|
||||
colWithTable(c.w, rel.Left.Table, rel.Left.Col)
|
||||
|
||||
default:
|
||||
colWithTable(c.w, table, rel.Left.Col)
|
||||
colWithTable(c.w, rel.Left.Table, rel.Left.Col)
|
||||
io.WriteString(c.w, `) = (`)
|
||||
colWithTableID(c.w, parent, id, rel.Right.Col)
|
||||
colWithTableID(c.w, rel.Right.Table, pid, rel.Right.Col)
|
||||
}
|
||||
|
||||
case RelOneToManyThrough:
|
||||
@ -862,25 +908,34 @@ func (c *compilerContext) renderRelationshipByName(table, parent string, id int3
|
||||
|
||||
switch {
|
||||
case !rel.Left.Array && rel.Right.Array:
|
||||
colWithTable(c.w, table, rel.Left.Col)
|
||||
colWithTable(c.w, rel.Left.Table, rel.Left.Col)
|
||||
io.WriteString(c.w, `) = any (`)
|
||||
colWithTable(c.w, rel.Through, rel.Right.Col)
|
||||
colWithTable(c.w, rel.Through.Table, rel.Through.ColR)
|
||||
|
||||
case rel.Left.Array && !rel.Right.Array:
|
||||
colWithTable(c.w, rel.Through, rel.Right.Col)
|
||||
colWithTable(c.w, rel.Through.Table, rel.Through.ColR)
|
||||
io.WriteString(c.w, `) = any (`)
|
||||
colWithTable(c.w, table, rel.Left.Col)
|
||||
colWithTable(c.w, rel.Left.Table, rel.Left.Col)
|
||||
|
||||
default:
|
||||
colWithTable(c.w, table, rel.Left.Col)
|
||||
colWithTable(c.w, rel.Through.Table, rel.Through.ColR)
|
||||
io.WriteString(c.w, `) = (`)
|
||||
colWithTable(c.w, rel.Through, rel.Right.Col)
|
||||
colWithTable(c.w, rel.Right.Table, rel.Right.Col)
|
||||
}
|
||||
|
||||
case RelEmbedded:
|
||||
colWithTable(c.w, rel.Left.Table, rel.Left.Col)
|
||||
io.WriteString(c.w, `) = (`)
|
||||
colWithTableID(c.w, parent, id, rel.Left.Col)
|
||||
colWithTableID(c.w, rel.Left.Table, pid, rel.Left.Col)
|
||||
|
||||
case RelPolymorphic:
|
||||
colWithTable(c.w, sel.Name, rel.Right.Col)
|
||||
io.WriteString(c.w, `) = (`)
|
||||
colWithTableID(c.w, rel.Left.Table, pid, rel.Left.Col)
|
||||
io.WriteString(c.w, `) AND (`)
|
||||
colWithTableID(c.w, rel.Left.Table, pid, rel.Right.Table)
|
||||
io.WriteString(c.w, `) = (`)
|
||||
squoted(c.w, sel.Name)
|
||||
}
|
||||
|
||||
io.WriteString(c.w, `))`)
|
||||
@ -956,11 +1011,8 @@ func (c *compilerContext) renderExp(ex *qcode.Exp, ti *DBTableInfo, skipNested b
|
||||
return err
|
||||
}
|
||||
|
||||
} else {
|
||||
//fmt.Fprintf(w, `(("%s"."%s") `, c.sel.Name, val.Col)
|
||||
if err := c.renderOp(val, ti); err != nil {
|
||||
return err
|
||||
}
|
||||
} else if err := c.renderOp(val, ti); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
//qcode.FreeExp(val)
|
||||
@ -993,7 +1045,7 @@ func (c *compilerContext) renderNestedWhere(ex *qcode.Exp, ti *DBTableInfo) erro
|
||||
|
||||
io.WriteString(c.w, ` WHERE `)
|
||||
|
||||
if err := c.renderRelationshipByName(cti.Name, ti.Name, -1); err != nil {
|
||||
if err := c.renderRelationshipByName(cti.Name, ti.Name); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@ -1022,7 +1074,7 @@ func (c *compilerContext) renderOp(ex *qcode.Exp, ti *DBTableInfo) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
if len(ex.Col) != 0 {
|
||||
if ex.Col != "" {
|
||||
if col, ok = ti.ColMap[ex.Col]; !ok {
|
||||
return fmt.Errorf("no column '%s' found ", ex.Col)
|
||||
}
|
||||
@ -1262,7 +1314,7 @@ func funcPrefixLen(fm map[string]*DBFunction, fn string) int {
|
||||
return 0
|
||||
}
|
||||
|
||||
func hasBit(n uint32, pos uint32) bool {
|
||||
func hasBit(n, pos uint32) bool {
|
||||
val := n & (1 << pos)
|
||||
return (val > 0)
|
||||
}
|
||||
|
@ -381,6 +381,26 @@ func withFragment3(t *testing.T) {
|
||||
compileGQLToPSQL(t, gql, nil, "anon")
|
||||
}
|
||||
|
||||
// func withInlineFragment(t *testing.T) {
|
||||
// gql := `
|
||||
// query {
|
||||
// users {
|
||||
// ... on users {
|
||||
// id
|
||||
// email
|
||||
// }
|
||||
// created_at
|
||||
// ... on user {
|
||||
// first_name
|
||||
// last_name
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// `
|
||||
|
||||
// compileGQLToPSQL(t, gql, nil, "anon")
|
||||
// }
|
||||
|
||||
func withCursor(t *testing.T) {
|
||||
gql := `query {
|
||||
Products(
|
||||
@ -477,6 +497,7 @@ func TestCompileQuery(t *testing.T) {
|
||||
t.Run("withFragment1", withFragment1)
|
||||
t.Run("withFragment2", withFragment2)
|
||||
t.Run("withFragment3", withFragment3)
|
||||
//t.Run("withInlineFragment", withInlineFragment)
|
||||
t.Run("jsonColumnAsTable", jsonColumnAsTable)
|
||||
t.Run("withCursor", withCursor)
|
||||
t.Run("nullForAuthRequiredInAnon", nullForAuthRequiredInAnon)
|
||||
|
@ -11,6 +11,7 @@ type DBSchema struct {
|
||||
ver int
|
||||
t map[string]*DBTableInfo
|
||||
rm map[string]map[string]*DBRel
|
||||
vt map[string]*VirtualTable
|
||||
fm map[string]*DBFunction
|
||||
}
|
||||
|
||||
@ -33,15 +34,19 @@ const (
|
||||
RelOneToOne RelType = iota + 1
|
||||
RelOneToMany
|
||||
RelOneToManyThrough
|
||||
RelPolymorphic
|
||||
RelEmbedded
|
||||
RelRemote
|
||||
)
|
||||
|
||||
type DBRel struct {
|
||||
Type RelType
|
||||
Through string
|
||||
ColT string
|
||||
Left struct {
|
||||
Through struct {
|
||||
Table string
|
||||
ColL string
|
||||
ColR string
|
||||
}
|
||||
Left struct {
|
||||
col *DBColumn
|
||||
Table string
|
||||
Col string
|
||||
@ -60,6 +65,7 @@ func NewDBSchema(info *DBInfo, aliases map[string][]string) (*DBSchema, error) {
|
||||
ver: info.Version,
|
||||
t: make(map[string]*DBTableInfo),
|
||||
rm: make(map[string]map[string]*DBRel),
|
||||
vt: make(map[string]*VirtualTable),
|
||||
fm: make(map[string]*DBFunction, len(info.Functions)),
|
||||
}
|
||||
|
||||
@ -70,6 +76,10 @@ func NewDBSchema(info *DBInfo, aliases map[string][]string) (*DBSchema, error) {
|
||||
}
|
||||
}
|
||||
|
||||
if err := schema.virtualRels(info.VTables); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for i, t := range info.Tables {
|
||||
err := schema.firstDegreeRels(t, info.Columns[i])
|
||||
if err != nil {
|
||||
@ -102,7 +112,7 @@ func (s *DBSchema) addTable(
|
||||
singular := flect.Singularize(t.Key)
|
||||
plural := flect.Pluralize(t.Key)
|
||||
|
||||
s.t[singular] = &DBTableInfo{
|
||||
ts := &DBTableInfo{
|
||||
Name: t.Name,
|
||||
Type: t.Type,
|
||||
IsSingular: true,
|
||||
@ -112,8 +122,9 @@ func (s *DBSchema) addTable(
|
||||
Singular: singular,
|
||||
Plural: plural,
|
||||
}
|
||||
s.t[singular] = ts
|
||||
|
||||
s.t[plural] = &DBTableInfo{
|
||||
tp := &DBTableInfo{
|
||||
Name: t.Name,
|
||||
Type: t.Type,
|
||||
IsSingular: false,
|
||||
@ -123,14 +134,15 @@ func (s *DBSchema) addTable(
|
||||
Singular: singular,
|
||||
Plural: plural,
|
||||
}
|
||||
s.t[plural] = tp
|
||||
|
||||
if al, ok := aliases[t.Key]; ok {
|
||||
for i := range al {
|
||||
k1 := flect.Singularize(al[i])
|
||||
s.t[k1] = s.t[singular]
|
||||
s.t[k1] = ts
|
||||
|
||||
k2 := flect.Pluralize(al[i])
|
||||
s.t[k2] = s.t[plural]
|
||||
s.t[k2] = tp
|
||||
}
|
||||
}
|
||||
|
||||
@ -154,6 +166,54 @@ func (s *DBSchema) addTable(
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *DBSchema) virtualRels(vts []VirtualTable) error {
|
||||
for _, vt := range vts {
|
||||
s.vt[vt.Name] = &vt
|
||||
|
||||
for _, t := range s.t {
|
||||
idCol, ok := t.ColMap[vt.IDColumn]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
if _, ok = t.ColMap[vt.TypeColumn]; !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
nt := DBTable{
|
||||
ID: -1,
|
||||
Name: vt.Name,
|
||||
Key: strings.ToLower(vt.Name),
|
||||
Type: "virtual",
|
||||
}
|
||||
|
||||
if err := s.addTable(nt, nil, nil); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
rel := &DBRel{Type: RelPolymorphic}
|
||||
rel.Left.col = idCol
|
||||
rel.Left.Table = t.Name
|
||||
rel.Left.Col = idCol.Name
|
||||
|
||||
rcol := DBColumn{
|
||||
Name: vt.FKeyColumn,
|
||||
Key: strings.ToLower(vt.FKeyColumn),
|
||||
Type: idCol.Type,
|
||||
}
|
||||
|
||||
rel.Right.col = &rcol
|
||||
rel.Right.Table = vt.TypeColumn
|
||||
rel.Right.Col = rcol.Name
|
||||
|
||||
if err := s.SetRel(vt.Name, t.Name, rel); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *DBSchema) firstDegreeRels(t DBTable, cols []DBColumn) error {
|
||||
ct := t.Key
|
||||
cti, ok := s.t[ct]
|
||||
@ -164,7 +224,7 @@ func (s *DBSchema) firstDegreeRels(t DBTable, cols []DBColumn) error {
|
||||
for i := range cols {
|
||||
c := cols[i]
|
||||
|
||||
if len(c.FKeyTable) == 0 {
|
||||
if c.FKeyTable == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
@ -268,7 +328,7 @@ func (s *DBSchema) secondDegreeRels(t DBTable, cols []DBColumn) error {
|
||||
for i := range cols {
|
||||
c := cols[i]
|
||||
|
||||
if len(c.FKeyTable) == 0 {
|
||||
if c.FKeyTable == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
@ -344,16 +404,17 @@ func (s *DBSchema) updateSchemaOTMT(
|
||||
// One-to-many-through relation between 1nd foreign key table and the
|
||||
// 2nd foreign key table
|
||||
rel1 := &DBRel{Type: RelOneToManyThrough}
|
||||
rel1.Through = ti.Name
|
||||
rel1.ColT = col2.Name
|
||||
rel1.Through.Table = ti.Name
|
||||
rel1.Through.ColL = col1.Name
|
||||
rel1.Through.ColR = col2.Name
|
||||
|
||||
rel1.Left.col = &col2
|
||||
rel1.Left.Table = col2.FKeyTable
|
||||
rel1.Left.Col = fc2.Name
|
||||
rel1.Left.col = fc1
|
||||
rel1.Left.Table = col1.FKeyTable
|
||||
rel1.Left.Col = fc1.Name
|
||||
|
||||
rel1.Right.col = &col1
|
||||
rel1.Right.Table = ti.Name
|
||||
rel1.Right.Col = col1.Name
|
||||
rel1.Right.col = fc2
|
||||
rel1.Right.Table = t2
|
||||
rel1.Right.Col = fc2.Name
|
||||
|
||||
if err := s.SetRel(t1, t2, rel1); err != nil {
|
||||
return err
|
||||
@ -362,16 +423,17 @@ func (s *DBSchema) updateSchemaOTMT(
|
||||
// One-to-many-through relation between 2nd foreign key table and the
|
||||
// 1nd foreign key table
|
||||
rel2 := &DBRel{Type: RelOneToManyThrough}
|
||||
rel2.Through = ti.Name
|
||||
rel2.ColT = col1.Name
|
||||
rel2.Through.Table = ti.Name
|
||||
rel2.Through.ColL = col2.Name
|
||||
rel2.Through.ColR = col1.Name
|
||||
|
||||
rel1.Left.col = fc1
|
||||
rel2.Left.Table = col1.FKeyTable
|
||||
rel2.Left.Col = fc1.Name
|
||||
rel2.Left.col = fc2
|
||||
rel2.Left.Table = col2.FKeyTable
|
||||
rel2.Left.Col = fc2.Name
|
||||
|
||||
rel1.Right.col = &col2
|
||||
rel2.Right.Table = ti.Name
|
||||
rel2.Right.Col = col2.Name
|
||||
rel2.Right.col = fc1
|
||||
rel2.Right.Table = t1
|
||||
rel2.Right.Col = fc1.Name
|
||||
|
||||
if err := s.SetRel(t2, t1, rel2); err != nil {
|
||||
return err
|
||||
|
@ -14,14 +14,18 @@ func (rt RelType) String() string {
|
||||
return "remote"
|
||||
case RelEmbedded:
|
||||
return "embedded"
|
||||
case RelPolymorphic:
|
||||
return "polymorphic"
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (re *DBRel) String() string {
|
||||
if re.Type == RelOneToManyThrough {
|
||||
return fmt.Sprintf("'%s.%s' --(Through: %s)--> '%s.%s'",
|
||||
re.Left.Table, re.Left.Col, re.Through, re.Right.Table, re.Right.Col)
|
||||
return fmt.Sprintf("'%s.%s' --(%s.%s, %s.%s)--> '%s.%s'",
|
||||
re.Left.Table, re.Left.Col,
|
||||
re.Through.Table, re.Through.ColL, re.Through.Table, re.Through.ColR,
|
||||
re.Right.Table, re.Right.Col)
|
||||
}
|
||||
return fmt.Sprintf("'%s.%s' --(%s)--> '%s.%s'",
|
||||
re.Left.Table, re.Left.Col, re.Type, re.Right.Table, re.Right.Col)
|
||||
|
@ -14,9 +14,17 @@ type DBInfo struct {
|
||||
Tables []DBTable
|
||||
Columns [][]DBColumn
|
||||
Functions []DBFunction
|
||||
VTables []VirtualTable
|
||||
colMap map[string]map[string]*DBColumn
|
||||
}
|
||||
|
||||
type VirtualTable struct {
|
||||
Name string
|
||||
IDColumn string
|
||||
TypeColumn string
|
||||
FKeyColumn string
|
||||
}
|
||||
|
||||
func GetDBInfo(db *sql.DB, schema string) (*DBInfo, error) {
|
||||
di := &DBInfo{}
|
||||
var version string
|
||||
|
@ -8,8 +8,8 @@ WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (INSERT INTO "produc
|
||||
=== RUN TestCompileInsert/simpleInsertWithPresets
|
||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), 'now' :: timestamp without time zone, 'now' :: timestamp without time zone, $2 :: bigint FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id" FROM (SELECT "products"."id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileInsert/nestedInsertManyToMany
|
||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "customers" AS (INSERT INTO "customers" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i RETURNING *), "purchases" AS (INSERT INTO "purchases" ("sale_type", "quantity", "due_date", "product_id", "customer_id") SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone), "products"."id", "customers"."id" FROM "_sg_input" i, "products", "customers" RETURNING *) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (INSERT INTO "products" ("name", "price") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i RETURNING *), "customers" AS (INSERT INTO "customers" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *), "purchases" AS (INSERT INTO "purchases" ("sale_type", "quantity", "due_date", "customer_id", "product_id") SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone), "customers"."id", "products"."id" FROM "_sg_input" i, "customers", "products" RETURNING *) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "customers" AS (INSERT INTO "customers" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i RETURNING *), "purchases" AS (INSERT INTO "purchases" ("sale_type", "quantity", "due_date", "product_id", "customer_id") SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone), "products"."id", "customers"."id" FROM "_sg_input" i, "products", "customers" RETURNING *) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileInsert/nestedInsertOneToMany
|
||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "users"."id" FROM "_sg_input" i, "users" RETURNING *) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileInsert/nestedInsertOneToOne
|
||||
@ -20,7 +20,7 @@ WITH "_sg_input" AS (SELECT $1 :: json AS j), "users" AS (INSERT INTO "users" ("
|
||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint LIMIT 1), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "_x_users"."id" FROM "_sg_input" i, "_x_users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user", "__sj_2"."json" AS "tags" FROM (SELECT "products"."id", "products"."name", "products"."user_id", "products"."tags" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "tags_2"."id" AS "id", "tags_2"."name" AS "name" FROM (SELECT "tags"."id", "tags"."name" FROM "tags" WHERE ((("tags"."slug") = any ("products_0"."tags"))) LIMIT ('20') :: integer) AS "tags_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileInsert/nestedInsertOneToOneWithConnectArray
|
||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id" = ANY((select a::bigint AS list from json_array_elements_text((i.j->'user'->'connect'->>'id')::json) AS a)) LIMIT 1), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "_x_users"."id" FROM "_sg_input" i, "_x_users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
--- PASS: TestCompileInsert (0.02s)
|
||||
--- PASS: TestCompileInsert (0.03s)
|
||||
--- PASS: TestCompileInsert/simpleInsert (0.00s)
|
||||
--- PASS: TestCompileInsert/singleInsert (0.00s)
|
||||
--- PASS: TestCompileInsert/bulkInsert (0.00s)
|
||||
@ -67,9 +67,9 @@ SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT
|
||||
=== RUN TestCompileQuery/oneToManyArray
|
||||
SELECT jsonb_build_object('tags', "__sj_0"."json", 'product', "__sj_2"."json") as "__root" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "products_2"."name" AS "name", "products_2"."price" AS "price", "__sj_3"."json" AS "tags" FROM (SELECT "products"."name", "products"."price", "products"."tags" FROM "products" LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_3"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "tags_3"."id" AS "id", "tags_3"."name" AS "name" FROM (SELECT "tags"."id", "tags"."name" FROM "tags" WHERE ((("tags"."slug") = any ("products_2"."tags"))) LIMIT ('20') :: integer) AS "tags_3") AS "__sr_3") AS "__sj_3") AS "__sj_3" ON ('true')) AS "__sr_2") AS "__sj_2", (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "tags_0"."name" AS "name", "__sj_1"."json" AS "product" FROM (SELECT "tags"."name", "tags"."slug" FROM "tags" LIMIT ('20') :: integer) AS "tags_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."name" AS "name" FROM (SELECT "products"."name" FROM "products" WHERE ((("tags_0"."slug") = any ("products"."tags"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/manyToMany
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."name" AS "name", "__sj_1"."json" AS "customers" FROM (SELECT "products"."name", "products"."id" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "customers_1"."email" AS "email", "customers_1"."full_name" AS "full_name" FROM (SELECT "customers"."email", "customers"."full_name" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_0"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."name" AS "name", "__sj_1"."json" AS "customers" FROM (SELECT "products"."name", "products"."id" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "customers_1"."email" AS "email", "customers_1"."full_name" AS "full_name" FROM (SELECT "customers"."email", "customers"."full_name" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."customer_id") = ("customers"."id")) WHERE ((("purchases"."product_id") = ("products"."id"))) LIMIT ('20') :: integer) AS "customers_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/manyToManyReverse
|
||||
SELECT jsonb_build_object('customers', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "customers_0"."email" AS "email", "customers_0"."full_name" AS "full_name", "__sj_1"."json" AS "products" FROM (SELECT "customers"."email", "customers"."full_name", "customers"."id" FROM "customers" LIMIT ('20') :: integer) AS "customers_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."name" AS "name" FROM (SELECT "products"."name" FROM "products" LEFT OUTER JOIN "purchases" ON (("purchases"."customer_id") = ("customers_0"."id")) WHERE ((("products"."id") = ("purchases"."product_id")) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
SELECT jsonb_build_object('customers', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "customers_0"."email" AS "email", "customers_0"."full_name" AS "full_name", "__sj_1"."json" AS "products" FROM (SELECT "customers"."email", "customers"."full_name", "customers"."id" FROM "customers" LIMIT ('20') :: integer) AS "customers_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."name" AS "name" FROM (SELECT "products"."name" FROM "products" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products"."id")) WHERE ((("purchases"."customer_id") = ("customers"."id")) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/aggFunction
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."name" AS "name", "products_0"."count_price" AS "count_price" FROM (SELECT "products"."name", count("products"."price") AS "count_price" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) GROUP BY "products"."name" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/aggFunctionBlockedByCol
|
||||
@ -85,7 +85,7 @@ SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT t
|
||||
=== RUN TestCompileQuery/withWhereOnRelations
|
||||
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" WHERE (NOT EXISTS (SELECT 1 FROM products WHERE (("products"."user_id") = ("users"."id")) AND ((("products"."price") > '3' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/multiRoot
|
||||
SELECT jsonb_build_object('customer', "__sj_0"."json", 'user', "__sj_1"."json", 'product', "__sj_2"."json") as "__root" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "products_2"."id" AS "id", "products_2"."name" AS "name", "__sj_3"."json" AS "customers", "__sj_4"."json" AS "customer" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_4".*) AS "json"FROM (SELECT "customers_4"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_2"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('1') :: integer) AS "customers_4") AS "__sr_4") AS "__sj_4" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_3"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "customers_3"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_2"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_3") AS "__sr_3") AS "__sj_3") AS "__sj_3" ON ('true')) AS "__sr_2") AS "__sj_2", (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1", (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "customers_0"."id" AS "id" FROM (SELECT "customers"."id" FROM "customers" LIMIT ('1') :: integer) AS "customers_0") AS "__sr_0") AS "__sj_0"
|
||||
SELECT jsonb_build_object('customer', "__sj_0"."json", 'user', "__sj_1"."json", 'product', "__sj_2"."json") as "__root" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "products_2"."id" AS "id", "products_2"."name" AS "name", "__sj_3"."json" AS "customers", "__sj_4"."json" AS "customer" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_4".*) AS "json"FROM (SELECT "customers_4"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."customer_id") = ("customers"."id")) WHERE ((("purchases"."product_id") = ("products"."id"))) LIMIT ('1') :: integer) AS "customers_4") AS "__sr_4") AS "__sj_4" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_3"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "customers_3"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."customer_id") = ("customers"."id")) WHERE ((("purchases"."product_id") = ("products"."id"))) LIMIT ('20') :: integer) AS "customers_3") AS "__sr_3") AS "__sj_3") AS "__sj_3" ON ('true')) AS "__sr_2") AS "__sj_2", (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1", (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "customers_0"."id" AS "id" FROM (SELECT "customers"."id" FROM "customers" LIMIT ('1') :: integer) AS "customers_0") AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/withFragment1
|
||||
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."first_name" AS "first_name", "users_0"."last_name" AS "last_name", "users_0"."created_at" AS "created_at", "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT , "users"."created_at", "users"."id", "users"."email" FROM "users" GROUP BY "users"."created_at", "users"."id", "users"."email" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/withFragment2
|
||||
@ -146,8 +146,8 @@ WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (UPDATE "products" S
|
||||
=== RUN TestCompileUpdate/nestedUpdateOneToManyWithConnect
|
||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "users" AS (UPDATE "users" SET ("full_name", "email", "created_at", "updated_at") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("users"."id") = $2 :: bigint) RETURNING "users".*), "products_c" AS ( UPDATE "products" SET "user_id" = "users"."id" FROM "users" WHERE ("products"."id"= ((i.j->'product'->'connect'->>'id'))::bigint) RETURNING "products".*), "products_d" AS ( UPDATE "products" SET "user_id" = NULL FROM "users" WHERE ("products"."id"= ((i.j->'product'->'disconnect'->>'id'))::bigint) RETURNING "products".*), "products" AS (SELECT * FROM "products_c" UNION ALL SELECT * FROM "products_d") SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileUpdate/nestedUpdateOneToOneWithConnect
|
||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint AND "users"."email"= ((i.j->'user'->'connect'->>'email'))::character varying LIMIT 1), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = $2 :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."email"= ((i.j->'user'->'connect'->>'email'))::character varying AND "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint LIMIT 1), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = $2 :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint AND "users"."email"= ((i.j->'user'->'connect'->>'email'))::character varying LIMIT 1), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = $2 :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileUpdate/nestedUpdateOneToOneWithDisconnect
|
||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT * FROM (VALUES(NULL::bigint)) AS LOOKUP("id")), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = $2 :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."user_id" AS "user_id" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
--- PASS: TestCompileUpdate (0.02s)
|
||||
@ -160,4 +160,4 @@ WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT * FROM (VALU
|
||||
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithConnect (0.00s)
|
||||
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithDisconnect (0.00s)
|
||||
PASS
|
||||
ok github.com/dosco/super-graph/core/internal/psql 0.374s
|
||||
ok github.com/dosco/super-graph/core/internal/psql 0.323s
|
||||
|
@ -121,12 +121,10 @@ func (c *compilerContext) renderUpdateStmt(w io.Writer, qc *qcode.QCode, item re
|
||||
}
|
||||
io.WriteString(w, `)`)
|
||||
|
||||
} else {
|
||||
if qc.Selects[0].Where != nil {
|
||||
io.WriteString(w, `WHERE `)
|
||||
if err := c.renderWhere(&qc.Selects[0], ti); err != nil {
|
||||
return err
|
||||
}
|
||||
} else if qc.Selects[0].Where != nil {
|
||||
io.WriteString(w, `WHERE `)
|
||||
if err := c.renderWhere(&qc.Selects[0], ti); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
|
13
core/internal/qcode/bench.11
Normal file
13
core/internal/qcode/bench.11
Normal file
@ -0,0 +1,13 @@
|
||||
goos: darwin
|
||||
goarch: amd64
|
||||
pkg: github.com/dosco/super-graph/core/internal/qcode
|
||||
BenchmarkQCompile-16 118282 9686 ns/op 4031 B/op 30 allocs/op
|
||||
BenchmarkQCompileP-16 427531 2710 ns/op 4077 B/op 30 allocs/op
|
||||
BenchmarkQCompileFragment-16 140588 8328 ns/op 8903 B/op 13 allocs/op
|
||||
BenchmarkParse-16 131396 9212 ns/op 4175 B/op 18 allocs/op
|
||||
BenchmarkParseP-16 503778 2310 ns/op 4176 B/op 18 allocs/op
|
||||
BenchmarkParseFragment-16 143725 8158 ns/op 10193 B/op 9 allocs/op
|
||||
BenchmarkSchemaParse-16 240609 5060 ns/op 3968 B/op 57 allocs/op
|
||||
BenchmarkSchemaParseP-16 785116 1534 ns/op 3968 B/op 57 allocs/op
|
||||
PASS
|
||||
ok github.com/dosco/super-graph/core/internal/qcode 11.092s
|
@ -141,8 +141,7 @@ func (l *lexer) current() (Pos, Pos) {
|
||||
func (l *lexer) emit(t itemType) {
|
||||
l.items = append(l.items, item{t, l.start, l.pos, l.line})
|
||||
// Some items contain text internally. If so, count their newlines.
|
||||
switch t {
|
||||
case itemStringVal:
|
||||
if t == itemStringVal {
|
||||
for i := l.start; i < l.pos; i++ {
|
||||
if l.input[i] == '\n' {
|
||||
l.line++
|
||||
@ -404,15 +403,15 @@ func isAlphaNumeric(r rune) bool {
|
||||
return r == '_' || unicode.IsLetter(r) || unicode.IsDigit(r)
|
||||
}
|
||||
|
||||
func equals(b []byte, s Pos, e Pos, val []byte) bool {
|
||||
func equals(b []byte, s, e Pos, val []byte) bool {
|
||||
return bytes.EqualFold(b[s:e], val)
|
||||
}
|
||||
|
||||
func contains(b []byte, s Pos, e Pos, chars string) bool {
|
||||
func contains(b []byte, s, e Pos, chars string) bool {
|
||||
return bytes.ContainsAny(b[s:e], chars)
|
||||
}
|
||||
|
||||
func lowercase(b []byte, s Pos, e Pos) {
|
||||
func lowercase(b []byte, s, e Pos) {
|
||||
for i := s; i < e; i++ {
|
||||
if b[i] >= 'A' && b[i] <= 'Z' {
|
||||
b[i] = ('a' + (b[i] - 'A'))
|
||||
|
@ -1,7 +1,6 @@
|
||||
package qcode
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
"errors"
|
||||
"fmt"
|
||||
"hash/maphash"
|
||||
@ -35,7 +34,8 @@ const (
|
||||
NodeVar
|
||||
)
|
||||
|
||||
type SelectionSet struct {
|
||||
type Operation struct {
|
||||
Type parserType
|
||||
Name string
|
||||
Args []Arg
|
||||
argsA [10]Arg
|
||||
@ -43,11 +43,6 @@ type SelectionSet struct {
|
||||
fieldsA [10]Field
|
||||
}
|
||||
|
||||
type Operation struct {
|
||||
Type parserType
|
||||
SelectionSet
|
||||
}
|
||||
|
||||
var zeroOperation = Operation{}
|
||||
|
||||
func (o *Operation) Reset() {
|
||||
@ -55,9 +50,10 @@ func (o *Operation) Reset() {
|
||||
}
|
||||
|
||||
type Fragment struct {
|
||||
Name string
|
||||
On string
|
||||
SelectionSet
|
||||
Name string
|
||||
On string
|
||||
Fields []Field
|
||||
fieldsA [10]Field
|
||||
}
|
||||
|
||||
var zeroFragment = Fragment{}
|
||||
@ -75,11 +71,13 @@ type Field struct {
|
||||
argsA [5]Arg
|
||||
Children []int32
|
||||
childrenA [5]int32
|
||||
Union bool
|
||||
}
|
||||
|
||||
type Arg struct {
|
||||
Name string
|
||||
Val *Node
|
||||
df bool
|
||||
}
|
||||
|
||||
type Node struct {
|
||||
@ -158,9 +156,11 @@ func Parse(gql []byte) (*Operation, error) {
|
||||
|
||||
if p.peek(itemFragment) {
|
||||
p.ignore()
|
||||
if err = p.parseFragment(op); err != nil {
|
||||
if f, err := p.parseFragment(); err != nil {
|
||||
fragPool.Put(f)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
} else {
|
||||
if !qf && p.peek(itemQuery, itemMutation, itemSub, itemObjOpen) {
|
||||
s = p.pos
|
||||
@ -175,40 +175,47 @@ func Parse(gql []byte) (*Operation, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, v := range p.frags {
|
||||
fragPool.Put(v)
|
||||
}
|
||||
|
||||
return op, nil
|
||||
}
|
||||
|
||||
func (p *Parser) parseFragment(op *Operation) error {
|
||||
func (p *Parser) parseFragment() (*Fragment, error) {
|
||||
var err error
|
||||
|
||||
frag := fragPool.Get().(*Fragment)
|
||||
frag.Reset()
|
||||
|
||||
frag.Fields = frag.fieldsA[:0]
|
||||
frag.Args = frag.argsA[:0]
|
||||
|
||||
if p.peek(itemName) {
|
||||
frag.Name = p.val(p.next())
|
||||
} else {
|
||||
return frag, errors.New("fragment: missing name")
|
||||
}
|
||||
|
||||
if p.peek(itemOn) {
|
||||
p.ignore()
|
||||
} else {
|
||||
return errors.New("fragment: missing 'on' keyword")
|
||||
return frag, errors.New("fragment: missing 'on' keyword")
|
||||
}
|
||||
|
||||
if p.peek(itemName) {
|
||||
frag.On = p.vall(p.next())
|
||||
} else {
|
||||
return errors.New("fragment: missing table name after 'on' keyword")
|
||||
return frag, errors.New("fragment: missing table name after 'on' keyword")
|
||||
}
|
||||
|
||||
if p.peek(itemObjOpen) {
|
||||
p.ignore()
|
||||
} else {
|
||||
return fmt.Errorf("fragment: expecting a '{', got: %s", p.next())
|
||||
return frag, fmt.Errorf("fragment: expecting a '{', got: %s", p.next())
|
||||
}
|
||||
|
||||
if err := p.parseSelectionSet(&frag.SelectionSet); err != nil {
|
||||
return fmt.Errorf("fragment: %v", err)
|
||||
frag.Fields, err = p.parseFields(frag.Fields)
|
||||
if err != nil {
|
||||
return frag, fmt.Errorf("fragment: %v", err)
|
||||
}
|
||||
|
||||
if p.frags == nil {
|
||||
@ -221,7 +228,7 @@ func (p *Parser) parseFragment(op *Operation) error {
|
||||
|
||||
p.frags[k] = frag
|
||||
|
||||
return nil
|
||||
return frag, nil
|
||||
}
|
||||
|
||||
func (p *Parser) parseOp(op *Operation) error {
|
||||
@ -249,7 +256,7 @@ func (p *Parser) parseOp(op *Operation) error {
|
||||
break
|
||||
}
|
||||
|
||||
err = p.parseSelectionSet(&op.SelectionSet)
|
||||
op.Fields, err = p.parseFields(op.Fields)
|
||||
if err != nil {
|
||||
return fmt.Errorf("%s: %v", op.Type, err)
|
||||
}
|
||||
@ -293,17 +300,6 @@ func (p *Parser) parseOpTypeAndArgs(op *Operation) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *Parser) parseSelectionSet(selset *SelectionSet) error {
|
||||
var err error
|
||||
|
||||
selset.Fields, err = p.parseFields(selset.Fields)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func ParseArgValue(argVal string) (*Node, error) {
|
||||
l := lexPool.Get().(*lexer)
|
||||
l.Reset()
|
||||
@ -324,6 +320,7 @@ func ParseArgValue(argVal string) (*Node, error) {
|
||||
}
|
||||
|
||||
func (p *Parser) parseFields(fields []Field) ([]Field, error) {
|
||||
var err error
|
||||
st := NewStack()
|
||||
|
||||
if !p.peek(itemName, itemSpread) {
|
||||
@ -358,83 +355,123 @@ func (p *Parser) parseFields(fields []Field) ([]Field, error) {
|
||||
isFrag = true
|
||||
}
|
||||
|
||||
if !p.peek(itemName) {
|
||||
if isFrag {
|
||||
return nil, fmt.Errorf("expecting a fragment name, got: %s", p.next())
|
||||
} else {
|
||||
return nil, fmt.Errorf("expecting an alias or field name, got: %s", p.next())
|
||||
}
|
||||
}
|
||||
|
||||
var f *Field
|
||||
|
||||
if isFrag {
|
||||
name := p.val(p.next())
|
||||
p.h.WriteString(name)
|
||||
k := p.h.Sum64()
|
||||
p.h.Reset()
|
||||
|
||||
fr, ok := p.frags[k]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("no fragment named '%s' defined", name)
|
||||
}
|
||||
|
||||
n := int32(len(fields))
|
||||
fields = append(fields, fr.Fields...)
|
||||
|
||||
for i := 0; i < len(fr.Fields); i++ {
|
||||
k := (n + int32(i))
|
||||
f := &fields[k]
|
||||
f.ID = int32(k)
|
||||
|
||||
// If this is the top-level point the parent to the parent of the
|
||||
// previous field.
|
||||
if f.ParentID == -1 {
|
||||
pid := st.Peek()
|
||||
f.ParentID = pid
|
||||
if f.ParentID != -1 {
|
||||
fields[pid].Children = append(fields[f.ParentID].Children, f.ID)
|
||||
}
|
||||
// Update all the other parents id's by our new place in this new array
|
||||
} else {
|
||||
f.ParentID += n
|
||||
}
|
||||
|
||||
f.Children = make([]int32, len(f.Children))
|
||||
copy(f.Children, fr.Fields[i].Children)
|
||||
|
||||
// Update all the children which is needed.
|
||||
for j := range f.Children {
|
||||
f.Children[j] += n
|
||||
}
|
||||
}
|
||||
|
||||
fields, err = p.parseFragmentFields(st, fields)
|
||||
} else {
|
||||
fields = append(fields, Field{ID: int32(len(fields))})
|
||||
fields, err = p.parseNormalFields(st, fields)
|
||||
}
|
||||
|
||||
f = &fields[(len(fields) - 1)]
|
||||
f.Args = f.argsA[:0]
|
||||
f.Children = f.childrenA[:0]
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Parse the field
|
||||
if err := p.parseField(f); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return fields, nil
|
||||
}
|
||||
|
||||
if st.Len() == 0 {
|
||||
f.ParentID = -1
|
||||
} else {
|
||||
pid := st.Peek()
|
||||
f.ParentID = pid
|
||||
fields[pid].Children = append(fields[pid].Children, f.ID)
|
||||
func (p *Parser) parseNormalFields(st *Stack, fields []Field) ([]Field, error) {
|
||||
if !p.peek(itemName) {
|
||||
return nil, fmt.Errorf("expecting an alias or field name, got: %s", p.next())
|
||||
}
|
||||
|
||||
fields = append(fields, Field{ID: int32(len(fields))})
|
||||
|
||||
f := &fields[(len(fields) - 1)]
|
||||
f.Args = f.argsA[:0]
|
||||
f.Children = f.childrenA[:0]
|
||||
|
||||
// Parse the field
|
||||
if err := p.parseField(f); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if st.Len() == 0 {
|
||||
f.ParentID = -1
|
||||
} else {
|
||||
pid := st.Peek()
|
||||
f.ParentID = pid
|
||||
fields[pid].Children = append(fields[pid].Children, f.ID)
|
||||
}
|
||||
|
||||
// The first opening curley brackets after this
|
||||
// comes the columns or child fields
|
||||
if p.peek(itemObjOpen) {
|
||||
p.ignore()
|
||||
st.Push(f.ID)
|
||||
}
|
||||
|
||||
return fields, nil
|
||||
}
|
||||
|
||||
func (p *Parser) parseFragmentFields(st *Stack, fields []Field) ([]Field, error) {
|
||||
var err error
|
||||
pid := st.Peek()
|
||||
|
||||
if p.peek(itemOn) {
|
||||
p.ignore()
|
||||
fields[pid].Union = true
|
||||
|
||||
if fields, err = p.parseNormalFields(st, fields); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// If parent is a union selector than copy over args from the parent
|
||||
// to the first child which is the root selector for each union type.
|
||||
for i := pid + 1; i < int32(len(fields)); i++ {
|
||||
f := &fields[i]
|
||||
if f.ParentID == pid {
|
||||
f.Args = fields[pid].Args
|
||||
}
|
||||
}
|
||||
|
||||
// The first opening curley brackets after this
|
||||
// comes the columns or child fields
|
||||
if p.peek(itemObjOpen) {
|
||||
p.ignore()
|
||||
st.Push(f.ID)
|
||||
} else {
|
||||
if !p.peek(itemName) {
|
||||
return nil, fmt.Errorf("expecting a fragment name, got: %s", p.next())
|
||||
}
|
||||
|
||||
name := p.val(p.next())
|
||||
_, _ = p.h.WriteString(name)
|
||||
id := p.h.Sum64()
|
||||
p.h.Reset()
|
||||
|
||||
fr, ok := p.frags[id]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("no fragment named '%s' defined", name)
|
||||
}
|
||||
ff := fr.Fields
|
||||
|
||||
n := int32(len(fields))
|
||||
fields = append(fields, ff...)
|
||||
|
||||
for i := 0; i < len(ff); i++ {
|
||||
k := (n + int32(i))
|
||||
f := &fields[k]
|
||||
f.ID = int32(k)
|
||||
|
||||
// If this is the top-level point the parent to the parent of the
|
||||
// previous field.
|
||||
if f.ParentID == -1 {
|
||||
f.ParentID = pid
|
||||
if f.ParentID != -1 {
|
||||
fields[pid].Children = append(fields[pid].Children, f.ID)
|
||||
}
|
||||
// Update all the other parents id's by our new place in this new array
|
||||
} else {
|
||||
f.ParentID += n
|
||||
}
|
||||
|
||||
// Copy over children since fields append is not a deep copy
|
||||
f.Children = make([]int32, len(f.Children))
|
||||
copy(f.Children, ff[i].Children)
|
||||
|
||||
// Copy over args since args append is not a deep copy
|
||||
f.Args = make([]Arg, len(f.Args))
|
||||
copy(f.Args, ff[i].Args)
|
||||
|
||||
// Update all the children which is needed.
|
||||
for j := range f.Children {
|
||||
f.Children[j] += n
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -534,10 +571,8 @@ func (p *Parser) parseList() (*Node, error) {
|
||||
}
|
||||
if ty == 0 {
|
||||
ty = node.Type
|
||||
} else {
|
||||
if ty != node.Type {
|
||||
return nil, errors.New("All values in a list must be of the same type")
|
||||
}
|
||||
} else if ty != node.Type {
|
||||
return nil, errors.New("All values in a list must be of the same type")
|
||||
}
|
||||
node.Parent = parent
|
||||
nodes = append(nodes, node)
|
||||
@ -676,11 +711,6 @@ func (p *Parser) ignore() {
|
||||
p.pos = n
|
||||
}
|
||||
|
||||
func (p *Parser) peekCurrent() string {
|
||||
item := p.items[p.pos]
|
||||
return b2s(p.input[item.pos:item.end])
|
||||
}
|
||||
|
||||
func (p *Parser) peekNext() string {
|
||||
item := p.items[p.pos+1]
|
||||
return b2s(p.input[item.pos:item.end])
|
||||
@ -690,16 +720,6 @@ func (p *Parser) reset(to int) {
|
||||
p.pos = to
|
||||
}
|
||||
|
||||
func (p *Parser) fHash(name string, parentID int32) uint64 {
|
||||
var b []byte
|
||||
binary.LittleEndian.PutUint32(b, uint32(parentID))
|
||||
p.h.WriteString(name)
|
||||
p.h.Write(b)
|
||||
v := p.h.Sum64()
|
||||
p.h.Reset()
|
||||
return v
|
||||
}
|
||||
|
||||
func b2s(b []byte) string {
|
||||
return *(*string)(unsafe.Pointer(&b))
|
||||
}
|
||||
@ -736,31 +756,6 @@ func (t parserType) String() string {
|
||||
return v
|
||||
}
|
||||
|
||||
// type Frees struct {
|
||||
// n *Node
|
||||
// loc int
|
||||
// }
|
||||
|
||||
// var freeList []Frees
|
||||
|
||||
// func FreeNode(n *Node, loc int) {
|
||||
// j := -1
|
||||
|
||||
// for i := range freeList {
|
||||
// if n == freeList[i].n {
|
||||
// j = i
|
||||
// break
|
||||
// }
|
||||
// }
|
||||
|
||||
// if j == -1 {
|
||||
// nodePool.Put(n)
|
||||
// freeList = append(freeList, Frees{n, loc})
|
||||
// } else {
|
||||
// fmt.Printf("(%d) RE_FREE %d %p %s %s\n", loc, freeList[j].loc, freeList[j].n, n.Name, n.Type)
|
||||
// }
|
||||
// }
|
||||
|
||||
func FreeNode(n *Node, loc int) {
|
||||
func FreeNode(n *Node) {
|
||||
nodePool.Put(n)
|
||||
}
|
||||
|
@ -12,6 +12,7 @@ import (
|
||||
)
|
||||
|
||||
type QType int
|
||||
type SType int
|
||||
type Action int
|
||||
|
||||
const (
|
||||
@ -19,7 +20,8 @@ const (
|
||||
)
|
||||
|
||||
const (
|
||||
QTQuery QType = iota + 1
|
||||
QTUnknown QType = iota
|
||||
QTQuery
|
||||
QTMutation
|
||||
QTInsert
|
||||
QTUpdate
|
||||
@ -27,6 +29,12 @@ const (
|
||||
QTUpsert
|
||||
)
|
||||
|
||||
const (
|
||||
STNone SType = iota
|
||||
STUnion
|
||||
STMember
|
||||
)
|
||||
|
||||
type QCode struct {
|
||||
Type QType
|
||||
ActionVar string
|
||||
@ -38,6 +46,8 @@ type QCode struct {
|
||||
type Select struct {
|
||||
ID int32
|
||||
ParentID int32
|
||||
UParentID int32
|
||||
Type SType
|
||||
Args map[string]*Node
|
||||
Name string
|
||||
FieldName string
|
||||
@ -277,6 +287,7 @@ func (com *Compiler) Compile(query []byte, role string) (*QCode, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
freeNodes(op)
|
||||
opPool.Put(op)
|
||||
|
||||
return &qc, nil
|
||||
@ -371,7 +382,11 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
|
||||
})
|
||||
s := &selects[(len(selects) - 1)]
|
||||
|
||||
if len(field.Alias) != 0 {
|
||||
if field.Union {
|
||||
s.Type = STUnion
|
||||
}
|
||||
|
||||
if field.Alias != "" {
|
||||
s.FieldName = field.Alias
|
||||
} else {
|
||||
s.FieldName = s.Name
|
||||
@ -382,6 +397,11 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
|
||||
} else {
|
||||
p := &selects[s.ParentID]
|
||||
p.Children = append(p.Children, s.ID)
|
||||
|
||||
if p.Type == STUnion {
|
||||
s.Type = STMember
|
||||
s.UParentID = p.ParentID
|
||||
}
|
||||
}
|
||||
|
||||
if skipRender {
|
||||
@ -461,6 +481,7 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
|
||||
}
|
||||
|
||||
qc.Selects = selects[:id]
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -492,50 +513,42 @@ func (com *Compiler) AddFilters(qc *QCode, sel *Select, role string) {
|
||||
func (com *Compiler) compileArgs(qc *QCode, sel *Select, args []Arg, role string) error {
|
||||
var err error
|
||||
|
||||
// don't free this arg either previously done or will be free'd
|
||||
// in the future like in psql
|
||||
var df bool
|
||||
|
||||
for i := range args {
|
||||
arg := &args[i]
|
||||
|
||||
switch arg.Name {
|
||||
case "id":
|
||||
err, df = com.compileArgID(sel, arg)
|
||||
err = com.compileArgID(sel, arg)
|
||||
|
||||
case "search":
|
||||
err, df = com.compileArgSearch(sel, arg)
|
||||
err = com.compileArgSearch(sel, arg)
|
||||
|
||||
case "where":
|
||||
err, df = com.compileArgWhere(sel, arg, role)
|
||||
err = com.compileArgWhere(sel, arg, role)
|
||||
|
||||
case "orderby", "order_by", "order":
|
||||
err, df = com.compileArgOrderBy(sel, arg)
|
||||
err = com.compileArgOrderBy(sel, arg)
|
||||
|
||||
case "distinct_on", "distinct":
|
||||
err, df = com.compileArgDistinctOn(sel, arg)
|
||||
err = com.compileArgDistinctOn(sel, arg)
|
||||
|
||||
case "limit":
|
||||
err, df = com.compileArgLimit(sel, arg)
|
||||
err = com.compileArgLimit(sel, arg)
|
||||
|
||||
case "offset":
|
||||
err, df = com.compileArgOffset(sel, arg)
|
||||
err = com.compileArgOffset(sel, arg)
|
||||
|
||||
case "first":
|
||||
err, df = com.compileArgFirstLast(sel, arg, PtForward)
|
||||
err = com.compileArgFirstLast(sel, arg, PtForward)
|
||||
|
||||
case "last":
|
||||
err, df = com.compileArgFirstLast(sel, arg, PtBackward)
|
||||
err = com.compileArgFirstLast(sel, arg, PtBackward)
|
||||
|
||||
case "after":
|
||||
err, df = com.compileArgAfterBefore(sel, arg, PtForward)
|
||||
err = com.compileArgAfterBefore(sel, arg, PtForward)
|
||||
|
||||
case "before":
|
||||
err, df = com.compileArgAfterBefore(sel, arg, PtBackward)
|
||||
}
|
||||
|
||||
if !df {
|
||||
FreeNode(arg.Val, 5)
|
||||
err = com.compileArgAfterBefore(sel, arg, PtBackward)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
@ -616,14 +629,12 @@ func (com *Compiler) compileArgNode(st *util.Stack, node *Node, usePool bool) (*
|
||||
}
|
||||
|
||||
// Objects inside a list
|
||||
if len(node.Name) == 0 {
|
||||
if node.Name == "" {
|
||||
pushChildren(st, node.exp, node)
|
||||
continue
|
||||
|
||||
} else {
|
||||
if _, ok := com.bl[node.Name]; ok {
|
||||
continue
|
||||
}
|
||||
} else if _, ok := com.bl[node.Name]; ok {
|
||||
continue
|
||||
}
|
||||
|
||||
ex, err := newExp(st, node, usePool)
|
||||
@ -646,39 +657,20 @@ func (com *Compiler) compileArgNode(st *util.Stack, node *Node, usePool bool) (*
|
||||
}
|
||||
}
|
||||
|
||||
if usePool {
|
||||
st.Push(node)
|
||||
|
||||
for {
|
||||
if st.Len() == 0 {
|
||||
break
|
||||
}
|
||||
intf := st.Pop()
|
||||
node, ok := intf.(*Node)
|
||||
if !ok || node == nil {
|
||||
continue
|
||||
}
|
||||
for i := range node.Children {
|
||||
st.Push(node.Children[i])
|
||||
}
|
||||
FreeNode(node, 1)
|
||||
}
|
||||
}
|
||||
|
||||
return root, needsUser, nil
|
||||
}
|
||||
|
||||
func (com *Compiler) compileArgID(sel *Select, arg *Arg) (error, bool) {
|
||||
func (com *Compiler) compileArgID(sel *Select, arg *Arg) error {
|
||||
if sel.ID != 0 {
|
||||
return nil, false
|
||||
return nil
|
||||
}
|
||||
|
||||
if sel.Where != nil && sel.Where.Op == OpEqID {
|
||||
return nil, false
|
||||
return nil
|
||||
}
|
||||
|
||||
if arg.Val.Type != NodeVar {
|
||||
return argErr("id", "variable"), false
|
||||
return argErr("id", "variable")
|
||||
}
|
||||
|
||||
ex := expPool.Get().(*Exp)
|
||||
@ -689,12 +681,12 @@ func (com *Compiler) compileArgID(sel *Select, arg *Arg) (error, bool) {
|
||||
ex.Val = arg.Val.Val
|
||||
|
||||
sel.Where = ex
|
||||
return nil, false
|
||||
return nil
|
||||
}
|
||||
|
||||
func (com *Compiler) compileArgSearch(sel *Select, arg *Arg) (error, bool) {
|
||||
func (com *Compiler) compileArgSearch(sel *Select, arg *Arg) error {
|
||||
if arg.Val.Type != NodeVar {
|
||||
return argErr("search", "variable"), false
|
||||
return argErr("search", "variable")
|
||||
}
|
||||
|
||||
ex := expPool.Get().(*Exp)
|
||||
@ -709,18 +701,19 @@ func (com *Compiler) compileArgSearch(sel *Select, arg *Arg) (error, bool) {
|
||||
}
|
||||
|
||||
sel.Args[arg.Name] = arg.Val
|
||||
arg.df = true
|
||||
AddFilter(sel, ex)
|
||||
|
||||
return nil, true
|
||||
return nil
|
||||
}
|
||||
|
||||
func (com *Compiler) compileArgWhere(sel *Select, arg *Arg, role string) (error, bool) {
|
||||
func (com *Compiler) compileArgWhere(sel *Select, arg *Arg, role string) error {
|
||||
st := util.NewStack()
|
||||
var err error
|
||||
|
||||
ex, nu, err := com.compileArgObj(st, arg)
|
||||
if err != nil {
|
||||
return err, false
|
||||
return err
|
||||
}
|
||||
|
||||
if nu && role == "anon" {
|
||||
@ -728,12 +721,12 @@ func (com *Compiler) compileArgWhere(sel *Select, arg *Arg, role string) (error,
|
||||
}
|
||||
AddFilter(sel, ex)
|
||||
|
||||
return nil, true
|
||||
return nil
|
||||
}
|
||||
|
||||
func (com *Compiler) compileArgOrderBy(sel *Select, arg *Arg) (error, bool) {
|
||||
func (com *Compiler) compileArgOrderBy(sel *Select, arg *Arg) error {
|
||||
if arg.Val.Type != NodeObj {
|
||||
return fmt.Errorf("expecting an object"), false
|
||||
return fmt.Errorf("expecting an object")
|
||||
}
|
||||
|
||||
st := util.NewStack()
|
||||
@ -751,16 +744,15 @@ func (com *Compiler) compileArgOrderBy(sel *Select, arg *Arg) (error, bool) {
|
||||
node, ok := intf.(*Node)
|
||||
|
||||
if !ok || node == nil {
|
||||
return fmt.Errorf("17: unexpected value %v (%t)", intf, intf), false
|
||||
return fmt.Errorf("17: unexpected value %v (%t)", intf, intf)
|
||||
}
|
||||
|
||||
if _, ok := com.bl[node.Name]; ok {
|
||||
FreeNode(node, 2)
|
||||
continue
|
||||
}
|
||||
|
||||
if node.Type != NodeStr && node.Type != NodeVar {
|
||||
return fmt.Errorf("expecting a string or variable"), false
|
||||
return fmt.Errorf("expecting a string or variable")
|
||||
}
|
||||
|
||||
ob := &OrderBy{}
|
||||
@ -779,25 +771,24 @@ func (com *Compiler) compileArgOrderBy(sel *Select, arg *Arg) (error, bool) {
|
||||
case "desc_nulls_last":
|
||||
ob.Order = OrderDescNullsLast
|
||||
default:
|
||||
return fmt.Errorf("valid values include asc, desc, asc_nulls_first and desc_nulls_first"), false
|
||||
return fmt.Errorf("valid values include asc, desc, asc_nulls_first and desc_nulls_first")
|
||||
}
|
||||
|
||||
setOrderByColName(ob, node)
|
||||
sel.OrderBy = append(sel.OrderBy, ob)
|
||||
FreeNode(node, 3)
|
||||
}
|
||||
return nil, false
|
||||
return nil
|
||||
}
|
||||
|
||||
func (com *Compiler) compileArgDistinctOn(sel *Select, arg *Arg) (error, bool) {
|
||||
func (com *Compiler) compileArgDistinctOn(sel *Select, arg *Arg) error {
|
||||
node := arg.Val
|
||||
|
||||
if _, ok := com.bl[node.Name]; ok {
|
||||
return nil, false
|
||||
return nil
|
||||
}
|
||||
|
||||
if node.Type != NodeList && node.Type != NodeStr {
|
||||
return fmt.Errorf("expecting a list of strings or just a string"), false
|
||||
return fmt.Errorf("expecting a list of strings or just a string")
|
||||
}
|
||||
|
||||
if node.Type == NodeStr {
|
||||
@ -806,58 +797,57 @@ func (com *Compiler) compileArgDistinctOn(sel *Select, arg *Arg) (error, bool) {
|
||||
|
||||
for i := range node.Children {
|
||||
sel.DistinctOn = append(sel.DistinctOn, node.Children[i].Val)
|
||||
FreeNode(node.Children[i], 5)
|
||||
}
|
||||
|
||||
return nil, false
|
||||
return nil
|
||||
}
|
||||
|
||||
func (com *Compiler) compileArgLimit(sel *Select, arg *Arg) (error, bool) {
|
||||
func (com *Compiler) compileArgLimit(sel *Select, arg *Arg) error {
|
||||
node := arg.Val
|
||||
|
||||
if node.Type != NodeInt {
|
||||
return argErr("limit", "number"), false
|
||||
return argErr("limit", "number")
|
||||
}
|
||||
|
||||
sel.Paging.Limit = node.Val
|
||||
|
||||
return nil, false
|
||||
return nil
|
||||
}
|
||||
|
||||
func (com *Compiler) compileArgOffset(sel *Select, arg *Arg) (error, bool) {
|
||||
func (com *Compiler) compileArgOffset(sel *Select, arg *Arg) error {
|
||||
node := arg.Val
|
||||
|
||||
if node.Type != NodeVar {
|
||||
return argErr("offset", "variable"), false
|
||||
return argErr("offset", "variable")
|
||||
}
|
||||
|
||||
sel.Paging.Offset = node.Val
|
||||
return nil, false
|
||||
return nil
|
||||
}
|
||||
|
||||
func (com *Compiler) compileArgFirstLast(sel *Select, arg *Arg, pt PagingType) (error, bool) {
|
||||
func (com *Compiler) compileArgFirstLast(sel *Select, arg *Arg, pt PagingType) error {
|
||||
node := arg.Val
|
||||
|
||||
if node.Type != NodeInt {
|
||||
return argErr(arg.Name, "number"), false
|
||||
return argErr(arg.Name, "number")
|
||||
}
|
||||
|
||||
sel.Paging.Type = pt
|
||||
sel.Paging.Limit = node.Val
|
||||
|
||||
return nil, false
|
||||
return nil
|
||||
}
|
||||
|
||||
func (com *Compiler) compileArgAfterBefore(sel *Select, arg *Arg, pt PagingType) (error, bool) {
|
||||
func (com *Compiler) compileArgAfterBefore(sel *Select, arg *Arg, pt PagingType) error {
|
||||
node := arg.Val
|
||||
|
||||
if node.Type != NodeVar || node.Val != "cursor" {
|
||||
return fmt.Errorf("value for argument '%s' must be a variable named $cursor", arg.Name), false
|
||||
return fmt.Errorf("value for argument '%s' must be a variable named $cursor", arg.Name)
|
||||
}
|
||||
sel.Paging.Type = pt
|
||||
sel.Paging.Cursor = true
|
||||
|
||||
return nil, false
|
||||
return nil
|
||||
}
|
||||
|
||||
// var zeroTrv = &trval{}
|
||||
@ -1058,7 +1048,7 @@ func setWhereColName(ex *Exp, node *Node) {
|
||||
if n.Type != NodeObj {
|
||||
continue
|
||||
}
|
||||
if len(n.Name) != 0 {
|
||||
if n.Name != "" {
|
||||
k := n.Name
|
||||
if k == "and" || k == "or" || k == "not" ||
|
||||
k == "_and" || k == "_or" || k == "_not" {
|
||||
@ -1237,3 +1227,81 @@ func FreeExp(ex *Exp) {
|
||||
func argErr(name, ty string) error {
|
||||
return fmt.Errorf("value for argument '%s' must be a %s", name, ty)
|
||||
}
|
||||
|
||||
func freeNodes(op *Operation) {
|
||||
var st *util.Stack
|
||||
fm := make(map[*Node]struct{})
|
||||
|
||||
for i := range op.Args {
|
||||
arg := op.Args[i]
|
||||
if arg.df {
|
||||
continue
|
||||
}
|
||||
|
||||
for i := range arg.Val.Children {
|
||||
if st == nil {
|
||||
st = util.NewStack()
|
||||
}
|
||||
c := arg.Val.Children[i]
|
||||
if _, ok := fm[c]; !ok {
|
||||
st.Push(c)
|
||||
}
|
||||
}
|
||||
|
||||
if _, ok := fm[arg.Val]; !ok {
|
||||
nodePool.Put(arg.Val)
|
||||
fm[arg.Val] = struct{}{}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
for i := range op.Fields {
|
||||
f := op.Fields[i]
|
||||
|
||||
for j := range f.Args {
|
||||
arg := f.Args[j]
|
||||
if arg.df {
|
||||
continue
|
||||
}
|
||||
|
||||
for k := range arg.Val.Children {
|
||||
if st == nil {
|
||||
st = util.NewStack()
|
||||
}
|
||||
c := arg.Val.Children[k]
|
||||
if _, ok := fm[c]; !ok {
|
||||
st.Push(c)
|
||||
}
|
||||
}
|
||||
|
||||
if _, ok := fm[arg.Val]; !ok {
|
||||
nodePool.Put(arg.Val)
|
||||
fm[arg.Val] = struct{}{}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if st == nil {
|
||||
return
|
||||
}
|
||||
|
||||
for {
|
||||
if st.Len() == 0 {
|
||||
break
|
||||
}
|
||||
intf := st.Pop()
|
||||
node, ok := intf.(*Node)
|
||||
if !ok || node == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
for i := range node.Children {
|
||||
st.Push(node.Children[i])
|
||||
}
|
||||
|
||||
if _, ok := fm[node]; !ok {
|
||||
nodePool.Put(node)
|
||||
fm[node] = struct{}{}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -29,6 +29,8 @@ func al(b byte) bool {
|
||||
|
||||
func (qt QType) String() string {
|
||||
switch qt {
|
||||
case QTUnknown:
|
||||
return "unknown"
|
||||
case QTQuery:
|
||||
return "query"
|
||||
case QTMutation:
|
||||
|
@ -64,7 +64,7 @@ func (sg *SuperGraph) initPrepared() error {
|
||||
return fmt.Errorf("role query: %w", err)
|
||||
}
|
||||
|
||||
sg.queries = make(map[uint64]query)
|
||||
sg.queries = make(map[uint64]*query)
|
||||
|
||||
list, err := sg.allowList.Load()
|
||||
if err != nil {
|
||||
@ -75,22 +75,20 @@ func (sg *SuperGraph) initPrepared() error {
|
||||
h.SetSeed(sg.hashSeed)
|
||||
|
||||
for _, v := range list {
|
||||
if len(v.Query) == 0 {
|
||||
if v.Query == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
qt := qcode.GetQType(v.Query)
|
||||
|
||||
switch qt {
|
||||
case qcode.QTQuery:
|
||||
sg.queries[queryID(&h, v.Name, "user")] = query{ai: v, qt: qt}
|
||||
|
||||
if sg.anonExists {
|
||||
sg.queries[queryID(&h, v.Name, "anon")] = query{ai: v, qt: qt}
|
||||
}
|
||||
sg.queries[queryID(&h, v.Name, "user")] = &query{ai: v, qt: qt}
|
||||
sg.queries[queryID(&h, v.Name, "anon")] = &query{ai: v, qt: qt}
|
||||
|
||||
case qcode.QTMutation:
|
||||
for _, role := range sg.conf.Roles {
|
||||
sg.queries[queryID(&h, v.Name, role.Name)] = query{ai: v, qt: qt}
|
||||
sg.queries[queryID(&h, v.Name, role.Name)] = &query{ai: v, qt: qt}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -115,7 +113,7 @@ func (sg *SuperGraph) prepareRoleStmt() error {
|
||||
|
||||
io.WriteString(w, `(SELECT (CASE`)
|
||||
for _, role := range sg.conf.Roles {
|
||||
if len(role.Match) == 0 {
|
||||
if role.Match == "" {
|
||||
continue
|
||||
}
|
||||
io.WriteString(w, ` WHEN `)
|
||||
@ -161,7 +159,7 @@ func (sg *SuperGraph) initAllowList() error {
|
||||
}
|
||||
|
||||
// nolint: errcheck
|
||||
func queryID(h *maphash.Hash, name string, role string) uint64 {
|
||||
func queryID(h *maphash.Hash, name, role string) uint64 {
|
||||
h.WriteString(name)
|
||||
h.WriteString(role)
|
||||
v := h.Sum64()
|
||||
|
@ -238,7 +238,7 @@ func (sg *SuperGraph) parentFieldIds(h *maphash.Hash, sel []qcode.Select, skippe
|
||||
return fm, sm
|
||||
}
|
||||
|
||||
func isSkipped(n uint32, pos uint32) bool {
|
||||
func isSkipped(n, pos uint32) bool {
|
||||
return ((n & (1 << pos)) != 0)
|
||||
}
|
||||
|
||||
|
@ -46,7 +46,7 @@ func (sg *SuperGraph) initRemotes(t Table) error {
|
||||
|
||||
// if no table column specified in the config then
|
||||
// use the primary key of the table as the id
|
||||
if len(idcol) == 0 {
|
||||
if idcol == "" {
|
||||
pcol, err := sg.pc.IDColumn(t.Name)
|
||||
if err != nil {
|
||||
return err
|
||||
|
@ -3,7 +3,7 @@ package core
|
||||
import "hash/maphash"
|
||||
|
||||
// nolint: errcheck
|
||||
func mkkey(h *maphash.Hash, k1 string, k2 string) uint64 {
|
||||
func mkkey(h *maphash.Hash, k1, k2 string) uint64 {
|
||||
h.WriteString(k1)
|
||||
h.WriteString(k2)
|
||||
v := h.Sum64()
|
||||
|
@ -155,7 +155,7 @@ func cmdVersion(cmd *cobra.Command, args []string) {
|
||||
}
|
||||
|
||||
func BuildDetails() string {
|
||||
if len(version) == 0 {
|
||||
if version == "" {
|
||||
return `
|
||||
Super Graph (unknown version)
|
||||
For documentation, visit https://supergraph.dev
|
||||
|
@ -88,6 +88,10 @@ func cmdNew(cmd *cobra.Command, args []string) {
|
||||
}
|
||||
})
|
||||
|
||||
ifNotExists(path.Join(appConfigPath, "allow.list"), func(p string) error {
|
||||
return ioutil.WriteFile(p, []byte{}, 0644)
|
||||
})
|
||||
|
||||
// Create app migrations folder and add relevant files
|
||||
|
||||
appMigrationsPath := path.Join(appConfigPath, "migrations")
|
||||
|
@ -80,7 +80,7 @@ func cmdDBSeed(cmd *cobra.Command, args []string) {
|
||||
func graphQLFunc(sg *core.SuperGraph, query string, data interface{}, opt map[string]string) map[string]interface{} {
|
||||
ct := context.Background()
|
||||
|
||||
if v, ok := opt["user_id"]; ok && len(v) != 0 {
|
||||
if v, ok := opt["user_id"]; ok && v != "" {
|
||||
ct = context.WithValue(ct, core.UserIDKey, v)
|
||||
}
|
||||
|
||||
@ -144,7 +144,7 @@ func (c *csvSource) Values() ([]interface{}, error) {
|
||||
|
||||
for _, v := range c.rows[c.i] {
|
||||
switch {
|
||||
case len(v) == 0:
|
||||
case v == "":
|
||||
vals = append(vals, "")
|
||||
case isDigit(v):
|
||||
var n int
|
||||
@ -243,7 +243,7 @@ func avatarURL(size int) string {
|
||||
return fmt.Sprintf("https://i.pravatar.cc/%d?%d", size, rand.Intn(5000))
|
||||
}
|
||||
|
||||
func imageURL(width int, height int) string {
|
||||
func imageURL(width, height int) string {
|
||||
return fmt.Sprintf("https://picsum.photos/%d/%d?%d", width, height, rand.Intn(5000))
|
||||
}
|
||||
|
||||
|
@ -90,7 +90,7 @@ func newViper(configPath, configFile string) *viper.Viper {
|
||||
}
|
||||
|
||||
func GetConfigName() string {
|
||||
if len(os.Getenv("GO_ENV")) == 0 {
|
||||
if os.Getenv("GO_ENV") == "" {
|
||||
return "dev"
|
||||
}
|
||||
|
||||
|
@ -105,7 +105,7 @@ func apiV1(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
if err == nil {
|
||||
if len(conf.CacheControl) != 0 && res.Operation() == core.OpQuery {
|
||||
if conf.CacheControl != "" && res.Operation() == core.OpQuery {
|
||||
w.Header().Set("Cache-Control", conf.CacheControl)
|
||||
}
|
||||
//nolint: errcheck
|
||||
|
@ -47,17 +47,17 @@ func SimpleHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
ctx := r.Context()
|
||||
|
||||
userIDProvider := r.Header.Get("X-User-ID-Provider")
|
||||
if len(userIDProvider) != 0 {
|
||||
if userIDProvider != "" {
|
||||
ctx = context.WithValue(ctx, core.UserIDProviderKey, userIDProvider)
|
||||
}
|
||||
|
||||
userID := r.Header.Get("X-User-ID")
|
||||
if len(userID) != 0 {
|
||||
if userID != "" {
|
||||
ctx = context.WithValue(ctx, core.UserIDKey, userID)
|
||||
}
|
||||
|
||||
userRole := r.Header.Get("X-User-Role")
|
||||
if len(userRole) != 0 {
|
||||
if userRole != "" {
|
||||
ctx = context.WithValue(ctx, core.UserRoleKey, userRole)
|
||||
}
|
||||
|
||||
@ -68,11 +68,11 @@ func SimpleHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
func HeaderHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
hdr := ac.Header
|
||||
|
||||
if len(hdr.Name) == 0 {
|
||||
if hdr.Name == "" {
|
||||
return nil, fmt.Errorf("auth '%s': no header.name defined", ac.Name)
|
||||
}
|
||||
|
||||
if !hdr.Exists && len(hdr.Value) == 0 {
|
||||
if !hdr.Exists && hdr.Value == "" {
|
||||
return nil, fmt.Errorf("auth '%s': no header.value defined", ac.Name)
|
||||
}
|
||||
|
||||
@ -82,7 +82,7 @@ func HeaderHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
|
||||
switch {
|
||||
case hdr.Exists:
|
||||
fo1 = (len(value) == 0)
|
||||
fo1 = (value == "")
|
||||
|
||||
default:
|
||||
fo1 = (value != hdr.Value)
|
||||
|
@ -44,10 +44,10 @@ func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
publicKeyFile := ac.JWT.PubKeyFile
|
||||
|
||||
switch {
|
||||
case len(secret) != 0:
|
||||
case secret != "":
|
||||
key = []byte(secret)
|
||||
|
||||
case len(publicKeyFile) != 0:
|
||||
case publicKeyFile != "":
|
||||
kd, err := ioutil.ReadFile(publicKeyFile)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@ -74,7 +74,7 @@ func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
|
||||
var tok string
|
||||
|
||||
if len(cookie) != 0 {
|
||||
if cookie != "" {
|
||||
ck, err := r.Cookie(cookie)
|
||||
if err != nil {
|
||||
next.ServeHTTP(w, r)
|
||||
|
@ -165,7 +165,7 @@ func railsAuth(ac *Auth) (*rails.Auth, error) {
|
||||
}
|
||||
|
||||
version := ac.Rails.Version
|
||||
if len(version) == 0 {
|
||||
if version == "" {
|
||||
return nil, errors.New("no auth.rails.version defined")
|
||||
}
|
||||
|
||||
|
@ -199,7 +199,7 @@ func (m *Migrator) LoadMigrations(path string) error {
|
||||
for _, v := range strings.Split(upSQL, "\n") {
|
||||
// Only account for regular single line comment, empty line and space/comment combination
|
||||
cleanString := strings.TrimSpace(v)
|
||||
if len(cleanString) != 0 &&
|
||||
if cleanString != "" &&
|
||||
!strings.HasPrefix(cleanString, "--") {
|
||||
containsSQL = true
|
||||
break
|
||||
|
File diff suppressed because one or more lines are too long
@ -27,7 +27,7 @@ func initWatcher() {
|
||||
}
|
||||
|
||||
var d dir
|
||||
if len(cpath) == 0 || cpath == "./" {
|
||||
if cpath == "" || cpath == "./" {
|
||||
d = Dir("./config", ReExec)
|
||||
} else {
|
||||
d = Dir(cpath, ReExec)
|
||||
@ -52,11 +52,11 @@ func startHTTP() {
|
||||
hp := strings.SplitN(conf.HostPort, ":", 2)
|
||||
|
||||
if len(hp) == 2 {
|
||||
if len(conf.Host) != 0 {
|
||||
if conf.Host != "" {
|
||||
hp[0] = conf.Host
|
||||
}
|
||||
|
||||
if len(conf.Port) != 0 {
|
||||
if conf.Port != "" {
|
||||
hp[1] = conf.Port
|
||||
}
|
||||
|
||||
@ -64,7 +64,7 @@ func startHTTP() {
|
||||
}
|
||||
}
|
||||
|
||||
if len(conf.hostPort) == 0 {
|
||||
if conf.hostPort == "" {
|
||||
conf.hostPort = defaultHP
|
||||
}
|
||||
|
||||
@ -123,7 +123,7 @@ func routeHandler() (http.Handler, error) {
|
||||
return mux, nil
|
||||
}
|
||||
|
||||
if len(conf.APIPath) != 0 {
|
||||
if conf.APIPath != "" {
|
||||
apiRoute = path.Join("/", conf.APIPath, "/v1/graphql")
|
||||
}
|
||||
|
||||
|
@ -134,7 +134,7 @@ database:
|
||||
type: postgres
|
||||
host: db
|
||||
port: 5432
|
||||
dbname: {{- .AppNameSlug -}}_development
|
||||
dbname: {{ .AppNameSlug -}}_development
|
||||
user: postgres
|
||||
password: postgres
|
||||
|
||||
|
@ -82,7 +82,7 @@ database:
|
||||
type: postgres
|
||||
host: db
|
||||
port: 5432
|
||||
dbname: {{- .AppNameSlug -}}_production
|
||||
dbname: {{ .AppNameSlug -}}_production
|
||||
user: postgres
|
||||
password: postgres
|
||||
#pool_size: 10
|
||||
|
Loading…
x
Reference in New Issue
Block a user