Compare commits

...

7 Commits

27 changed files with 758 additions and 480 deletions

View File

@ -85,12 +85,11 @@ type SuperGraph struct {
allowList *allow.List
encKey [32]byte
hashSeed maphash.Seed
queries map[uint64]query
queries map[uint64]*query
roles map[string]*Role
getRole *sql.Stmt
rmap map[uint64]resolvFn
abacEnabled bool
anonExists bool
qc *qcode.Compiler
pc *psql.Compiler
ge *graphql.Engine

View File

@ -1,41 +0,0 @@
INF roles_query not defined: attribute based access control disabled
all expectations were already fulfilled, call to Query 'SELECT jsonb_build_object('users', "__sj_0"."json", 'products', "__sj_1"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "__sj_2"."json" AS "customers", "__sj_3"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('20') :: integer) AS "products_1" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "users_3"."full_name" AS "full_name", "users_3"."phone" AS "phone", "users_3"."email" AS "email" FROM (SELECT "users"."full_name", "users"."phone", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_1"."user_id"))) LIMIT ('1') :: integer) AS "users_3") AS "__sr_3") AS "__sj_3" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_1"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true')) AS "__sr_1") AS "__sj_1") AS "__sj_1", (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."name" AS "name" FROM (SELECT "users"."id" FROM "users" GROUP BY "users"."id" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"' with args [] was not expected
goos: darwin
goarch: amd64
pkg: github.com/dosco/super-graph/core
BenchmarkGraphQL-16 INF roles_query not defined: attribute based access control disabled
all expectations were already fulfilled, call to Query 'SELECT jsonb_build_object('users', "__sj_0"."json", 'products', "__sj_1"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "__sj_2"."json" AS "customers", "__sj_3"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('20') :: integer) AS "products_1" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "users_3"."full_name" AS "full_name", "users_3"."phone" AS "phone", "users_3"."email" AS "email" FROM (SELECT "users"."full_name", "users"."phone", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_1"."user_id"))) LIMIT ('1') :: integer) AS "users_3") AS "__sr_3") AS "__sj_3" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_1"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true')) AS "__sr_1") AS "__sj_1") AS "__sj_1", (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."name" AS "name" FROM (SELECT "users"."id" FROM "users" GROUP BY "users"."id" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"' with args [] was not expected
INF roles_query not defined: attribute based access control disabled
all expectations were already fulfilled, call to Query 'SELECT jsonb_build_object('users', "__sj_0"."json", 'products', "__sj_1"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "__sj_2"."json" AS "customers", "__sj_3"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('20') :: integer) AS "products_1" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "users_3"."full_name" AS "full_name", "users_3"."phone" AS "phone", "users_3"."email" AS "email" FROM (SELECT "users"."full_name", "users"."phone", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_1"."user_id"))) LIMIT ('1') :: integer) AS "users_3") AS "__sr_3") AS "__sj_3" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_1"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true')) AS "__sr_1") AS "__sj_1") AS "__sj_1", (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."name" AS "name" FROM (SELECT "users"."id" FROM "users" GROUP BY "users"."id" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"' with args [] was not expected
INF roles_query not defined: attribute based access control disabled
all expectations were already fulfilled, call to Query 'SELECT jsonb_build_object('users', "__sj_0"."json", 'products', "__sj_1"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "__sj_2"."json" AS "customers", "__sj_3"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('20') :: integer) AS "products_1" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "users_3"."full_name" AS "full_name", "users_3"."phone" AS "phone", "users_3"."email" AS "email" FROM (SELECT "users"."full_name", "users"."phone", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_1"."user_id"))) LIMIT ('1') :: integer) AS "users_3") AS "__sr_3") AS "__sj_3" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_1"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true')) AS "__sr_1") AS "__sj_1") AS "__sj_1", (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."name" AS "name" FROM (SELECT "users"."id" FROM "users" GROUP BY "users"."id" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"' with args [] was not expected
105048 10398 ns/op 18342 B/op 55 allocs/op
PASS
ok github.com/dosco/super-graph/core 1.328s
PASS
ok github.com/dosco/super-graph/core/internal/allow 0.088s
? github.com/dosco/super-graph/core/internal/crypto [no test files]
? github.com/dosco/super-graph/core/internal/integration_tests [no test files]
PASS
ok github.com/dosco/super-graph/core/internal/integration_tests/cockroachdb 0.121s
PASS
ok github.com/dosco/super-graph/core/internal/integration_tests/postgresql 0.118s
goos: darwin
goarch: amd64
pkg: github.com/dosco/super-graph/core/internal/psql
BenchmarkCompile-16 79845 14428 ns/op 4584 B/op 39 allocs/op
BenchmarkCompileParallel-16 326205 3918 ns/op 4633 B/op 39 allocs/op
PASS
ok github.com/dosco/super-graph/core/internal/psql 2.696s
goos: darwin
goarch: amd64
pkg: github.com/dosco/super-graph/core/internal/qcode
BenchmarkQCompile-16 146953 8049 ns/op 3756 B/op 28 allocs/op
BenchmarkQCompileP-16 475936 2447 ns/op 3790 B/op 28 allocs/op
BenchmarkParse-16 140811 8163 ns/op 3902 B/op 18 allocs/op
BenchmarkParseP-16 571345 2041 ns/op 3903 B/op 18 allocs/op
BenchmarkSchemaParse-16 230715 5012 ns/op 3968 B/op 57 allocs/op
BenchmarkSchemaParseP-16 802426 1565 ns/op 3968 B/op 57 allocs/op
PASS
ok github.com/dosco/super-graph/core/internal/qcode 8.427s
? github.com/dosco/super-graph/core/internal/util [no test files]

View File

@ -72,6 +72,7 @@ type Config struct {
type Table struct {
Name string
Table string
Type string
Blocklist []string
Remotes []Remote
Columns []Column

View File

@ -172,14 +172,15 @@ func (c *scontext) resolvePreparedSQL() ([]byte, *stmt, error) {
h := maphash.Hash{}
h.SetSeed(c.sg.hashSeed)
id := queryID(&h, c.res.name, role)
q, ok := c.sg.queries[queryID(&h, c.res.name, role)]
q, ok := c.sg.queries[id]
if !ok {
return nil, nil, errNotFound
}
if q.sd == nil {
q.Do(func() { c.sg.prepare(&q, role) })
q.Do(func() { c.sg.prepare(q, role) })
if q.err != nil {
return nil, nil, err

View File

@ -21,7 +21,7 @@ func (sg *SuperGraph) initConfig() error {
for i := 0; i < len(c.Tables); i++ {
t := &c.Tables[i]
t.Name = flect.Pluralize(strings.ToLower(t.Name))
// t.Name = flect.Pluralize(strings.ToLower(t.Name))
if _, ok := tm[t.Name]; ok {
sg.conf.Tables = append(c.Tables[:i], c.Tables[i+1:]...)
@ -100,21 +100,26 @@ func getDBTableAliases(c *Config) map[string][]string {
for i := range c.Tables {
t := c.Tables[i]
if len(t.Table) == 0 || len(t.Columns) != 0 {
continue
}
if t.Table != "" && t.Type == "" {
m[t.Table] = append(m[t.Table], t.Name)
}
}
return m
}
func addTables(c *Config, di *psql.DBInfo) error {
var err error
for _, t := range c.Tables {
if t.Table == "" || len(t.Columns) == 0 {
continue
switch t.Type {
case "json", "jsonb":
err = addJsonTable(di, t.Columns, t)
case "polymorphic":
err = addVirtualTable(di, t.Columns, t)
}
if err := addTable(di, t.Columns, t); err != nil {
if err != nil {
return err
}
@ -122,17 +127,18 @@ func addTables(c *Config, di *psql.DBInfo) error {
return nil
}
func addTable(di *psql.DBInfo, cols []Column, t Table) error {
func addJsonTable(di *psql.DBInfo, cols []Column, t Table) error {
// This is for jsonb columns that want to be tables.
bc, ok := di.GetColumn(t.Table, t.Name)
if !ok {
return fmt.Errorf(
"Column '%s' not found on table '%s'",
"json table: column '%s' not found on table '%s'",
t.Name, t.Table)
}
if bc.Type != "json" && bc.Type != "jsonb" {
return fmt.Errorf(
"Column '%s' in table '%s' is of type '%s'. Only JSON or JSONB is valid",
"json table: column '%s' in table '%s' is of type '%s'. Only JSON or JSONB is valid",
t.Name, t.Table, bc.Type)
}
@ -159,8 +165,38 @@ func addTable(di *psql.DBInfo, cols []Column, t Table) error {
return nil
}
func addVirtualTable(di *psql.DBInfo, cols []Column, t Table) error {
if len(cols) == 0 {
return fmt.Errorf("polymorphic table: no id column specified")
}
c := cols[0]
if c.ForeignKey == "" {
return fmt.Errorf("polymorphic table: no 'related_to' specified on id column")
}
s := strings.SplitN(c.ForeignKey, ".", 2)
if len(s) != 2 {
return fmt.Errorf("polymorphic table: foreign key must be <type column>.<foreign key column>")
}
di.VTables = append(di.VTables, psql.VirtualTable{
Name: t.Name,
IDColumn: c.Name,
TypeColumn: s[0],
FKeyColumn: s[1],
})
return nil
}
func addForeignKeys(c *Config, di *psql.DBInfo) error {
for _, t := range c.Tables {
if t.Type != "" {
continue
}
for _, c := range t.Columns {
if c.ForeignKey == "" {
continue
@ -174,30 +210,52 @@ func addForeignKeys(c *Config, di *psql.DBInfo) error {
}
func addForeignKey(di *psql.DBInfo, c Column, t Table) error {
c1, ok := di.GetColumn(t.Name, c.Name)
var tn string
if t.Type == "polymorphic" {
tn = t.Table
} else {
tn = t.Name
}
c1, ok := di.GetColumn(tn, c.Name)
if !ok {
return fmt.Errorf(
"Invalid table '%s' or column '%s' in Config",
t.Name, c.Name)
"config: invalid table '%s' or column '%s' defined",
tn, c.Name)
}
v := strings.SplitN(c.ForeignKey, ".", 2)
if len(v) != 2 {
return fmt.Errorf(
"Invalid foreign_key in Config for table '%s' and column '%s",
t.Name, c.Name)
"config: invalid foreign_key defined for table '%s' and column '%s': %s",
tn, c.Name, c.ForeignKey)
}
// check if it's a polymorphic foreign key
if _, ok := di.GetColumn(tn, v[0]); ok {
c2, ok := di.GetColumn(tn, v[1])
if !ok {
return fmt.Errorf(
"config: invalid column '%s' for polymorphic relationship on table '%s' and column '%s'",
v[1], tn, c.Name)
}
c1.FKeyTable = v[0]
c1.FKeyColID = []int16{c2.ID}
return nil
}
fkt, fkc := v[0], v[1]
c2, ok := di.GetColumn(fkt, fkc)
c3, ok := di.GetColumn(fkt, fkc)
if !ok {
return fmt.Errorf(
"Invalid foreign_key in Config for table '%s' and column '%s",
t.Name, c.Name)
"config: foreign_key for table '%s' and column '%s' points to unknown table '%s' and column '%s'",
t.Name, c.Name, v[0], v[1])
}
c1.FKeyTable = fkt
c1.FKeyColID = []int16{c2.ID}
c1.FKeyColID = []int16{c3.ID}
return nil
}

View File

@ -299,9 +299,13 @@ func (al *List) save(item Item) error {
for _, v := range list {
if v.Comment != "" {
f.WriteString(fmt.Sprintf("/* %s */\n\n", v.Comment))
_, err = f.WriteString(fmt.Sprintf("/* %s */\n\n", v.Comment))
} else {
f.WriteString(fmt.Sprintf("/* %s */\n\n", v.Name))
_, err = f.WriteString(fmt.Sprintf("/* %s */\n\n", v.Name))
}
if err != nil {
return err
}
if v.Vars != "" {
@ -320,18 +324,6 @@ func (al *List) save(item Item) error {
return nil
}
func matchPrefix(b []byte, i int, s string) bool {
if (len(b) - i) < len(s) {
return false
}
for n := 0; n < len(s); n++ {
if b[(i+n)] != s[n] {
return false
}
}
return true
}
func QueryName(b string) string {
state, s := 0, 0

View File

@ -156,15 +156,16 @@ func (co *Compiler) compileQueryWithMetadata(
if id < closeBlock {
sel := &c.s[id]
if len(sel.Cols) == 0 {
continue
}
ti, err := c.schema.GetTable(sel.Name)
if err != nil {
return c.md, err
}
if sel.Type != qcode.STUnion {
if len(sel.Cols) == 0 {
continue
}
if sel.ParentID == -1 {
io.WriteString(c.w, `(`)
} else {
@ -178,16 +179,17 @@ func (co *Compiler) compileQueryWithMetadata(
if err := c.renderSelect(sel, ti, vars); err != nil {
return c.md, err
}
}
for _, cid := range sel.Children {
if hasBit(c.md.skipped, uint32(cid)) {
continue
}
child := &c.s[cid]
if child.SkipRender {
continue
}
st.Push(child.ID + closeBlock)
st.Push(child.ID)
}
@ -195,6 +197,7 @@ func (co *Compiler) compileQueryWithMetadata(
} else {
sel := &c.s[(id - closeBlock)]
if sel.Type != qcode.STUnion {
ti, err := c.schema.GetTable(sel.Name)
if err != nil {
return c.md, err
@ -218,12 +221,13 @@ func (co *Compiler) compileQueryWithMetadata(
} else {
c.renderLateralJoinClose(sel)
}
}
if sel.Type != qcode.STMember {
if len(sel.Args) != 0 {
i := 0
for _, v := range sel.Args {
qcode.FreeNode(v, 500)
i++
qcode.FreeNode(v)
}
}
}
}
@ -361,6 +365,16 @@ func (c *compilerContext) initSelect(sel *qcode.Select, ti *DBTableInfo, vars Va
c.md.skipped |= (1 << uint(id))
}
case RelPolymorphic:
if _, ok := colmap[rel.Left.Col]; !ok {
cols = append(cols, &qcode.Column{Table: ti.Name, Name: rel.Left.Col, FieldName: rel.Left.Col})
colmap[rel.Left.Col] = struct{}{}
}
if _, ok := colmap[rel.Right.Table]; !ok {
cols = append(cols, &qcode.Column{Table: ti.Name, Name: rel.Right.Table, FieldName: rel.Right.Table})
colmap[rel.Right.Table] = struct{}{}
}
default:
return nil, fmt.Errorf("unknown relationship %s", rel)
}
@ -439,14 +453,22 @@ func (c *compilerContext) renderSelect(sel *qcode.Select, ti *DBTableInfo, vars
var rel *DBRel
var err error
// Relationships must be between union parents and their parents
if sel.ParentID != -1 {
parent := c.s[sel.ParentID]
if sel.Type == qcode.STMember && sel.UParentID != -1 {
cn := c.s[sel.ParentID].Name
pn := c.s[sel.UParentID].Name
rel, err = c.schema.GetRel(cn, pn)
} else {
pn := c.s[sel.ParentID].Name
rel, err = c.schema.GetRel(ti.Name, pn)
}
}
rel, err = c.schema.GetRel(ti.Name, parent.Name)
if err != nil {
return err
}
}
childCols, err := c.initSelect(sel, ti, vars)
if err != nil {
@ -531,30 +553,27 @@ func (c *compilerContext) renderJoin(sel *qcode.Select, ti *DBTableInfo) error {
}
func (c *compilerContext) renderJoinByName(table, parent string, id int32) error {
rel, err := c.schema.GetRel(table, parent)
if err != nil {
return err
}
rel, _ := c.schema.GetRel(table, parent)
// This join is only required for one-to-many relations since
// these make use of join tables that need to be pulled in.
if rel.Type != RelOneToManyThrough {
return err
if rel == nil || rel.Type != RelOneToManyThrough {
return nil
}
pt, err := c.schema.GetTable(parent)
if err != nil {
return err
}
// pt, err := c.schema.GetTable(parent)
// if err != nil {
// return err
// }
//fmt.Fprintf(w, ` LEFT OUTER JOIN "%s" ON (("%s"."%s") = ("%s_%d"."%s"))`,
//rel.Through, rel.Through, rel.ColT, c.parent.Name, c.parent.ID, rel.Left.Col)
io.WriteString(c.w, ` LEFT OUTER JOIN "`)
io.WriteString(c.w, rel.Through)
io.WriteString(c.w, rel.Through.Table)
io.WriteString(c.w, `" ON ((`)
colWithTable(c.w, rel.Through, rel.ColT)
colWithTable(c.w, rel.Through.Table, rel.Through.ColL)
io.WriteString(c.w, `) = (`)
colWithTableID(c.w, pt.Name, id, rel.Left.Col)
colWithTable(c.w, rel.Left.Table, rel.Left.Col)
io.WriteString(c.w, `))`)
return nil
@ -641,10 +660,33 @@ func (c *compilerContext) renderJoinColumns(sel *qcode.Select, ti *DBTableInfo,
continue
}
if childSel.Type == qcode.STUnion {
rel, err := c.schema.GetRel(childSel.Name, ti.Name)
if err != nil {
return err
}
io.WriteString(c.w, `(CASE `)
for _, uid := range childSel.Children {
unionSel := &c.s[uid]
io.WriteString(c.w, `WHEN `)
colWithTableID(c.w, ti.Name, sel.ID, rel.Right.Table)
io.WriteString(c.w, ` = `)
squoted(c.w, unionSel.Name)
io.WriteString(c.w, ` THEN `)
io.WriteString(c.w, `"__sj_`)
int32String(c.w, unionSel.ID)
io.WriteString(c.w, `"."json"`)
}
io.WriteString(c.w, `END)`)
alias(c.w, childSel.FieldName)
} else {
io.WriteString(c.w, `"__sj_`)
int32String(c.w, childSel.ID)
io.WriteString(c.w, `"."json"`)
alias(c.w, childSel.FieldName)
}
if childSel.Paging.Type != qcode.PtOffset {
io.WriteString(c.w, `, "__sj_`)
@ -699,7 +741,8 @@ func (c *compilerContext) renderBaseSelect(sel *qcode.Select, ti *DBTableInfo, r
}
io.WriteString(c.w, ` WHERE (`)
if err := c.renderRelationship(sel, ti); err != nil {
if err := c.renderRelationship(sel, rel); err != nil {
return err
}
if isFil {
@ -813,22 +856,25 @@ func (c *compilerContext) renderCursorCTE(sel *qcode.Select) error {
return nil
}
func (c *compilerContext) renderRelationship(sel *qcode.Select, ti *DBTableInfo) error {
parent := c.s[sel.ParentID]
pti, err := c.schema.GetTable(parent.Name)
if err != nil {
return err
}
return c.renderRelationshipByName(ti.Name, pti.Name, parent.ID)
}
func (c *compilerContext) renderRelationshipByName(table, parent string, id int32) error {
func (c *compilerContext) renderRelationshipByName(table, parent string) error {
rel, err := c.schema.GetRel(table, parent)
if err != nil {
return err
}
return c.renderRelationship(nil, rel)
}
func (c *compilerContext) renderRelationship(sel *qcode.Select, rel *DBRel) error {
var pid int32
switch {
case sel == nil:
pid = int32(-1)
case sel.Type == qcode.STMember:
pid = sel.UParentID
default:
pid = sel.ParentID
}
io.WriteString(c.w, `((`)
@ -840,19 +886,19 @@ func (c *compilerContext) renderRelationshipByName(table, parent string, id int3
switch {
case !rel.Left.Array && rel.Right.Array:
colWithTable(c.w, table, rel.Left.Col)
colWithTable(c.w, rel.Left.Table, rel.Left.Col)
io.WriteString(c.w, `) = any (`)
colWithTableID(c.w, parent, id, rel.Right.Col)
colWithTableID(c.w, rel.Right.Table, pid, rel.Right.Col)
case rel.Left.Array && !rel.Right.Array:
colWithTableID(c.w, parent, id, rel.Right.Col)
colWithTableID(c.w, rel.Right.Table, pid, rel.Right.Col)
io.WriteString(c.w, `) = any (`)
colWithTable(c.w, table, rel.Left.Col)
colWithTable(c.w, rel.Left.Table, rel.Left.Col)
default:
colWithTable(c.w, table, rel.Left.Col)
colWithTable(c.w, rel.Left.Table, rel.Left.Col)
io.WriteString(c.w, `) = (`)
colWithTableID(c.w, parent, id, rel.Right.Col)
colWithTableID(c.w, rel.Right.Table, pid, rel.Right.Col)
}
case RelOneToManyThrough:
@ -862,25 +908,34 @@ func (c *compilerContext) renderRelationshipByName(table, parent string, id int3
switch {
case !rel.Left.Array && rel.Right.Array:
colWithTable(c.w, table, rel.Left.Col)
colWithTable(c.w, rel.Left.Table, rel.Left.Col)
io.WriteString(c.w, `) = any (`)
colWithTable(c.w, rel.Through, rel.Right.Col)
colWithTable(c.w, rel.Through.Table, rel.Through.ColR)
case rel.Left.Array && !rel.Right.Array:
colWithTable(c.w, rel.Through, rel.Right.Col)
colWithTable(c.w, rel.Through.Table, rel.Through.ColR)
io.WriteString(c.w, `) = any (`)
colWithTable(c.w, table, rel.Left.Col)
colWithTable(c.w, rel.Left.Table, rel.Left.Col)
default:
colWithTable(c.w, table, rel.Left.Col)
colWithTable(c.w, rel.Through.Table, rel.Through.ColR)
io.WriteString(c.w, `) = (`)
colWithTable(c.w, rel.Through, rel.Right.Col)
colWithTable(c.w, rel.Right.Table, rel.Right.Col)
}
case RelEmbedded:
colWithTable(c.w, rel.Left.Table, rel.Left.Col)
io.WriteString(c.w, `) = (`)
colWithTableID(c.w, parent, id, rel.Left.Col)
colWithTableID(c.w, rel.Left.Table, pid, rel.Left.Col)
case RelPolymorphic:
colWithTable(c.w, sel.Name, rel.Right.Col)
io.WriteString(c.w, `) = (`)
colWithTableID(c.w, rel.Left.Table, pid, rel.Left.Col)
io.WriteString(c.w, `) AND (`)
colWithTableID(c.w, rel.Left.Table, pid, rel.Right.Table)
io.WriteString(c.w, `) = (`)
squoted(c.w, sel.Name)
}
io.WriteString(c.w, `))`)
@ -993,7 +1048,7 @@ func (c *compilerContext) renderNestedWhere(ex *qcode.Exp, ti *DBTableInfo) erro
io.WriteString(c.w, ` WHERE `)
if err := c.renderRelationshipByName(cti.Name, ti.Name, -1); err != nil {
if err := c.renderRelationshipByName(cti.Name, ti.Name); err != nil {
return err
}

View File

@ -381,6 +381,26 @@ func withFragment3(t *testing.T) {
compileGQLToPSQL(t, gql, nil, "anon")
}
func withInlineFragment(t *testing.T) {
gql := `
query {
users {
... on users {
id
email
}
created_at
... on user {
first_name
last_name
}
}
}
`
compileGQLToPSQL(t, gql, nil, "anon")
}
func withCursor(t *testing.T) {
gql := `query {
Products(
@ -477,6 +497,7 @@ func TestCompileQuery(t *testing.T) {
t.Run("withFragment1", withFragment1)
t.Run("withFragment2", withFragment2)
t.Run("withFragment3", withFragment3)
//t.Run("withInlineFragment", withInlineFragment)
t.Run("jsonColumnAsTable", jsonColumnAsTable)
t.Run("withCursor", withCursor)
t.Run("nullForAuthRequiredInAnon", nullForAuthRequiredInAnon)

View File

@ -11,6 +11,7 @@ type DBSchema struct {
ver int
t map[string]*DBTableInfo
rm map[string]map[string]*DBRel
vt map[string]*VirtualTable
fm map[string]*DBFunction
}
@ -33,14 +34,18 @@ const (
RelOneToOne RelType = iota + 1
RelOneToMany
RelOneToManyThrough
RelPolymorphic
RelEmbedded
RelRemote
)
type DBRel struct {
Type RelType
Through string
ColT string
Through struct {
Table string
ColL string
ColR string
}
Left struct {
col *DBColumn
Table string
@ -60,6 +65,7 @@ func NewDBSchema(info *DBInfo, aliases map[string][]string) (*DBSchema, error) {
ver: info.Version,
t: make(map[string]*DBTableInfo),
rm: make(map[string]map[string]*DBRel),
vt: make(map[string]*VirtualTable),
fm: make(map[string]*DBFunction, len(info.Functions)),
}
@ -70,6 +76,10 @@ func NewDBSchema(info *DBInfo, aliases map[string][]string) (*DBSchema, error) {
}
}
if err := schema.virtualRels(info.VTables); err != nil {
return nil, err
}
for i, t := range info.Tables {
err := schema.firstDegreeRels(t, info.Columns[i])
if err != nil {
@ -102,7 +112,7 @@ func (s *DBSchema) addTable(
singular := flect.Singularize(t.Key)
plural := flect.Pluralize(t.Key)
s.t[singular] = &DBTableInfo{
ts := &DBTableInfo{
Name: t.Name,
Type: t.Type,
IsSingular: true,
@ -112,8 +122,9 @@ func (s *DBSchema) addTable(
Singular: singular,
Plural: plural,
}
s.t[singular] = ts
s.t[plural] = &DBTableInfo{
tp := &DBTableInfo{
Name: t.Name,
Type: t.Type,
IsSingular: false,
@ -123,14 +134,15 @@ func (s *DBSchema) addTable(
Singular: singular,
Plural: plural,
}
s.t[plural] = tp
if al, ok := aliases[t.Key]; ok {
for i := range al {
k1 := flect.Singularize(al[i])
s.t[k1] = s.t[singular]
s.t[k1] = ts
k2 := flect.Pluralize(al[i])
s.t[k2] = s.t[plural]
s.t[k2] = tp
}
}
@ -154,6 +166,54 @@ func (s *DBSchema) addTable(
return nil
}
func (s *DBSchema) virtualRels(vts []VirtualTable) error {
for _, vt := range vts {
s.vt[vt.Name] = &vt
for _, t := range s.t {
idCol, ok := t.ColMap[vt.IDColumn]
if !ok {
continue
}
if _, ok = t.ColMap[vt.TypeColumn]; !ok {
continue
}
nt := DBTable{
ID: -1,
Name: vt.Name,
Key: strings.ToLower(vt.Name),
Type: "virtual",
}
if err := s.addTable(nt, nil, nil); err != nil {
return err
}
rel := &DBRel{Type: RelPolymorphic}
rel.Left.col = idCol
rel.Left.Table = t.Name
rel.Left.Col = idCol.Name
rcol := DBColumn{
Name: vt.FKeyColumn,
Key: strings.ToLower(vt.FKeyColumn),
Type: idCol.Type,
}
rel.Right.col = &rcol
rel.Right.Table = vt.TypeColumn
rel.Right.Col = rcol.Name
if err := s.SetRel(vt.Name, t.Name, rel); err != nil {
return err
}
}
}
return nil
}
func (s *DBSchema) firstDegreeRels(t DBTable, cols []DBColumn) error {
ct := t.Key
cti, ok := s.t[ct]
@ -164,7 +224,7 @@ func (s *DBSchema) firstDegreeRels(t DBTable, cols []DBColumn) error {
for i := range cols {
c := cols[i]
if len(c.FKeyTable) == 0 {
if c.FKeyTable == "" {
continue
}
@ -344,16 +404,17 @@ func (s *DBSchema) updateSchemaOTMT(
// One-to-many-through relation between 1nd foreign key table and the
// 2nd foreign key table
rel1 := &DBRel{Type: RelOneToManyThrough}
rel1.Through = ti.Name
rel1.ColT = col2.Name
rel1.Through.Table = ti.Name
rel1.Through.ColL = col1.Name
rel1.Through.ColR = col2.Name
rel1.Left.col = &col2
rel1.Left.Table = col2.FKeyTable
rel1.Left.Col = fc2.Name
rel1.Left.col = fc1
rel1.Left.Table = col1.FKeyTable
rel1.Left.Col = fc1.Name
rel1.Right.col = &col1
rel1.Right.Table = ti.Name
rel1.Right.Col = col1.Name
rel1.Right.col = fc2
rel1.Right.Table = t2
rel1.Right.Col = fc2.Name
if err := s.SetRel(t1, t2, rel1); err != nil {
return err
@ -362,16 +423,17 @@ func (s *DBSchema) updateSchemaOTMT(
// One-to-many-through relation between 2nd foreign key table and the
// 1nd foreign key table
rel2 := &DBRel{Type: RelOneToManyThrough}
rel2.Through = ti.Name
rel2.ColT = col1.Name
rel2.Through.Table = ti.Name
rel2.Through.ColL = col2.Name
rel2.Through.ColR = col1.Name
rel1.Left.col = fc1
rel2.Left.Table = col1.FKeyTable
rel2.Left.Col = fc1.Name
rel2.Left.col = fc2
rel2.Left.Table = col2.FKeyTable
rel2.Left.Col = fc2.Name
rel1.Right.col = &col2
rel2.Right.Table = ti.Name
rel2.Right.Col = col2.Name
rel2.Right.col = fc1
rel2.Right.Table = t1
rel2.Right.Col = fc1.Name
if err := s.SetRel(t2, t1, rel2); err != nil {
return err

View File

@ -14,14 +14,18 @@ func (rt RelType) String() string {
return "remote"
case RelEmbedded:
return "embedded"
case RelPolymorphic:
return "polymorphic"
}
return ""
}
func (re *DBRel) String() string {
if re.Type == RelOneToManyThrough {
return fmt.Sprintf("'%s.%s' --(Through: %s)--> '%s.%s'",
re.Left.Table, re.Left.Col, re.Through, re.Right.Table, re.Right.Col)
return fmt.Sprintf("'%s.%s' --(%s.%s, %s.%s)--> '%s.%s'",
re.Left.Table, re.Left.Col,
re.Through.Table, re.Through.ColL, re.Through.Table, re.Through.ColR,
re.Right.Table, re.Right.Col)
}
return fmt.Sprintf("'%s.%s' --(%s)--> '%s.%s'",
re.Left.Table, re.Left.Col, re.Type, re.Right.Table, re.Right.Col)

View File

@ -14,9 +14,17 @@ type DBInfo struct {
Tables []DBTable
Columns [][]DBColumn
Functions []DBFunction
VTables []VirtualTable
colMap map[string]map[string]*DBColumn
}
type VirtualTable struct {
Name string
IDColumn string
TypeColumn string
FKeyColumn string
}
func GetDBInfo(db *sql.DB, schema string) (*DBInfo, error) {
di := &DBInfo{}
var version string

View File

@ -8,8 +8,8 @@ WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (INSERT INTO "produc
=== RUN TestCompileInsert/simpleInsertWithPresets
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), 'now' :: timestamp without time zone, 'now' :: timestamp without time zone, $2 :: bigint FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id" FROM (SELECT "products"."id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
=== RUN TestCompileInsert/nestedInsertManyToMany
WITH "_sg_input" AS (SELECT $1 :: json AS j), "customers" AS (INSERT INTO "customers" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i RETURNING *), "purchases" AS (INSERT INTO "purchases" ("sale_type", "quantity", "due_date", "product_id", "customer_id") SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone), "products"."id", "customers"."id" FROM "_sg_input" i, "products", "customers" RETURNING *) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (INSERT INTO "products" ("name", "price") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i RETURNING *), "customers" AS (INSERT INTO "customers" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *), "purchases" AS (INSERT INTO "purchases" ("sale_type", "quantity", "due_date", "customer_id", "product_id") SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone), "customers"."id", "products"."id" FROM "_sg_input" i, "customers", "products" RETURNING *) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
WITH "_sg_input" AS (SELECT $1 :: json AS j), "customers" AS (INSERT INTO "customers" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i RETURNING *), "purchases" AS (INSERT INTO "purchases" ("sale_type", "quantity", "due_date", "product_id", "customer_id") SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone), "products"."id", "customers"."id" FROM "_sg_input" i, "products", "customers" RETURNING *) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
=== RUN TestCompileInsert/nestedInsertOneToMany
WITH "_sg_input" AS (SELECT $1 :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "users"."id" FROM "_sg_input" i, "users" RETURNING *) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
=== RUN TestCompileInsert/nestedInsertOneToOne
@ -20,7 +20,7 @@ WITH "_sg_input" AS (SELECT $1 :: json AS j), "users" AS (INSERT INTO "users" ("
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint LIMIT 1), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "_x_users"."id" FROM "_sg_input" i, "_x_users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user", "__sj_2"."json" AS "tags" FROM (SELECT "products"."id", "products"."name", "products"."user_id", "products"."tags" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "tags_2"."id" AS "id", "tags_2"."name" AS "name" FROM (SELECT "tags"."id", "tags"."name" FROM "tags" WHERE ((("tags"."slug") = any ("products_0"."tags"))) LIMIT ('20') :: integer) AS "tags_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
=== RUN TestCompileInsert/nestedInsertOneToOneWithConnectArray
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id" = ANY((select a::bigint AS list from json_array_elements_text((i.j->'user'->'connect'->>'id')::json) AS a)) LIMIT 1), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "_x_users"."id" FROM "_sg_input" i, "_x_users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
--- PASS: TestCompileInsert (0.02s)
--- PASS: TestCompileInsert (0.03s)
--- PASS: TestCompileInsert/simpleInsert (0.00s)
--- PASS: TestCompileInsert/singleInsert (0.00s)
--- PASS: TestCompileInsert/bulkInsert (0.00s)
@ -67,9 +67,9 @@ SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT
=== RUN TestCompileQuery/oneToManyArray
SELECT jsonb_build_object('tags', "__sj_0"."json", 'product', "__sj_2"."json") as "__root" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "products_2"."name" AS "name", "products_2"."price" AS "price", "__sj_3"."json" AS "tags" FROM (SELECT "products"."name", "products"."price", "products"."tags" FROM "products" LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_3"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "tags_3"."id" AS "id", "tags_3"."name" AS "name" FROM (SELECT "tags"."id", "tags"."name" FROM "tags" WHERE ((("tags"."slug") = any ("products_2"."tags"))) LIMIT ('20') :: integer) AS "tags_3") AS "__sr_3") AS "__sj_3") AS "__sj_3" ON ('true')) AS "__sr_2") AS "__sj_2", (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "tags_0"."name" AS "name", "__sj_1"."json" AS "product" FROM (SELECT "tags"."name", "tags"."slug" FROM "tags" LIMIT ('20') :: integer) AS "tags_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."name" AS "name" FROM (SELECT "products"."name" FROM "products" WHERE ((("tags_0"."slug") = any ("products"."tags"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
=== RUN TestCompileQuery/manyToMany
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."name" AS "name", "__sj_1"."json" AS "customers" FROM (SELECT "products"."name", "products"."id" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "customers_1"."email" AS "email", "customers_1"."full_name" AS "full_name" FROM (SELECT "customers"."email", "customers"."full_name" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_0"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."name" AS "name", "__sj_1"."json" AS "customers" FROM (SELECT "products"."name", "products"."id" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "customers_1"."email" AS "email", "customers_1"."full_name" AS "full_name" FROM (SELECT "customers"."email", "customers"."full_name" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."customer_id") = ("customers"."id")) WHERE ((("purchases"."product_id") = ("products"."id"))) LIMIT ('20') :: integer) AS "customers_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
=== RUN TestCompileQuery/manyToManyReverse
SELECT jsonb_build_object('customers', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "customers_0"."email" AS "email", "customers_0"."full_name" AS "full_name", "__sj_1"."json" AS "products" FROM (SELECT "customers"."email", "customers"."full_name", "customers"."id" FROM "customers" LIMIT ('20') :: integer) AS "customers_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."name" AS "name" FROM (SELECT "products"."name" FROM "products" LEFT OUTER JOIN "purchases" ON (("purchases"."customer_id") = ("customers_0"."id")) WHERE ((("products"."id") = ("purchases"."product_id")) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
SELECT jsonb_build_object('customers', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "customers_0"."email" AS "email", "customers_0"."full_name" AS "full_name", "__sj_1"."json" AS "products" FROM (SELECT "customers"."email", "customers"."full_name", "customers"."id" FROM "customers" LIMIT ('20') :: integer) AS "customers_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."name" AS "name" FROM (SELECT "products"."name" FROM "products" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products"."id")) WHERE ((("purchases"."customer_id") = ("customers"."id")) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
=== RUN TestCompileQuery/aggFunction
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."name" AS "name", "products_0"."count_price" AS "count_price" FROM (SELECT "products"."name", count("products"."price") AS "count_price" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) GROUP BY "products"."name" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
=== RUN TestCompileQuery/aggFunctionBlockedByCol
@ -85,7 +85,7 @@ SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT t
=== RUN TestCompileQuery/withWhereOnRelations
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" WHERE (NOT EXISTS (SELECT 1 FROM products WHERE (("products"."user_id") = ("users"."id")) AND ((("products"."price") > '3' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
=== RUN TestCompileQuery/multiRoot
SELECT jsonb_build_object('customer', "__sj_0"."json", 'user', "__sj_1"."json", 'product', "__sj_2"."json") as "__root" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "products_2"."id" AS "id", "products_2"."name" AS "name", "__sj_3"."json" AS "customers", "__sj_4"."json" AS "customer" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_4".*) AS "json"FROM (SELECT "customers_4"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_2"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('1') :: integer) AS "customers_4") AS "__sr_4") AS "__sj_4" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_3"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "customers_3"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_2"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_3") AS "__sr_3") AS "__sj_3") AS "__sj_3" ON ('true')) AS "__sr_2") AS "__sj_2", (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1", (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "customers_0"."id" AS "id" FROM (SELECT "customers"."id" FROM "customers" LIMIT ('1') :: integer) AS "customers_0") AS "__sr_0") AS "__sj_0"
SELECT jsonb_build_object('customer', "__sj_0"."json", 'user', "__sj_1"."json", 'product', "__sj_2"."json") as "__root" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "products_2"."id" AS "id", "products_2"."name" AS "name", "__sj_3"."json" AS "customers", "__sj_4"."json" AS "customer" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_4".*) AS "json"FROM (SELECT "customers_4"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."customer_id") = ("customers"."id")) WHERE ((("purchases"."product_id") = ("products"."id"))) LIMIT ('1') :: integer) AS "customers_4") AS "__sr_4") AS "__sj_4" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_3"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "customers_3"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."customer_id") = ("customers"."id")) WHERE ((("purchases"."product_id") = ("products"."id"))) LIMIT ('20') :: integer) AS "customers_3") AS "__sr_3") AS "__sj_3") AS "__sj_3" ON ('true')) AS "__sr_2") AS "__sj_2", (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1", (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "customers_0"."id" AS "id" FROM (SELECT "customers"."id" FROM "customers" LIMIT ('1') :: integer) AS "customers_0") AS "__sr_0") AS "__sj_0"
=== RUN TestCompileQuery/withFragment1
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."first_name" AS "first_name", "users_0"."last_name" AS "last_name", "users_0"."created_at" AS "created_at", "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT , "users"."created_at", "users"."id", "users"."email" FROM "users" GROUP BY "users"."created_at", "users"."id", "users"."email" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
=== RUN TestCompileQuery/withFragment2
@ -146,8 +146,8 @@ WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (UPDATE "products" S
=== RUN TestCompileUpdate/nestedUpdateOneToManyWithConnect
WITH "_sg_input" AS (SELECT $1 :: json AS j), "users" AS (UPDATE "users" SET ("full_name", "email", "created_at", "updated_at") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("users"."id") = $2 :: bigint) RETURNING "users".*), "products_c" AS ( UPDATE "products" SET "user_id" = "users"."id" FROM "users" WHERE ("products"."id"= ((i.j->'product'->'connect'->>'id'))::bigint) RETURNING "products".*), "products_d" AS ( UPDATE "products" SET "user_id" = NULL FROM "users" WHERE ("products"."id"= ((i.j->'product'->'disconnect'->>'id'))::bigint) RETURNING "products".*), "products" AS (SELECT * FROM "products_c" UNION ALL SELECT * FROM "products_d") SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
=== RUN TestCompileUpdate/nestedUpdateOneToOneWithConnect
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint AND "users"."email"= ((i.j->'user'->'connect'->>'email'))::character varying LIMIT 1), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = $2 :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."email"= ((i.j->'user'->'connect'->>'email'))::character varying AND "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint LIMIT 1), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = $2 :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint AND "users"."email"= ((i.j->'user'->'connect'->>'email'))::character varying LIMIT 1), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = $2 :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
=== RUN TestCompileUpdate/nestedUpdateOneToOneWithDisconnect
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT * FROM (VALUES(NULL::bigint)) AS LOOKUP("id")), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = $2 :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."user_id" AS "user_id" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
--- PASS: TestCompileUpdate (0.02s)
@ -160,4 +160,4 @@ WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT * FROM (VALU
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithConnect (0.00s)
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithDisconnect (0.00s)
PASS
ok github.com/dosco/super-graph/core/internal/psql 0.374s
ok github.com/dosco/super-graph/core/internal/psql 0.323s

View File

@ -0,0 +1,13 @@
goos: darwin
goarch: amd64
pkg: github.com/dosco/super-graph/core/internal/qcode
BenchmarkQCompile-16 118282 9686 ns/op 4031 B/op 30 allocs/op
BenchmarkQCompileP-16 427531 2710 ns/op 4077 B/op 30 allocs/op
BenchmarkQCompileFragment-16 140588 8328 ns/op 8903 B/op 13 allocs/op
BenchmarkParse-16 131396 9212 ns/op 4175 B/op 18 allocs/op
BenchmarkParseP-16 503778 2310 ns/op 4176 B/op 18 allocs/op
BenchmarkParseFragment-16 143725 8158 ns/op 10193 B/op 9 allocs/op
BenchmarkSchemaParse-16 240609 5060 ns/op 3968 B/op 57 allocs/op
BenchmarkSchemaParseP-16 785116 1534 ns/op 3968 B/op 57 allocs/op
PASS
ok github.com/dosco/super-graph/core/internal/qcode 11.092s

View File

@ -1,7 +1,6 @@
package qcode
import (
"encoding/binary"
"errors"
"fmt"
"hash/maphash"
@ -35,7 +34,8 @@ const (
NodeVar
)
type SelectionSet struct {
type Operation struct {
Type parserType
Name string
Args []Arg
argsA [10]Arg
@ -43,11 +43,6 @@ type SelectionSet struct {
fieldsA [10]Field
}
type Operation struct {
Type parserType
SelectionSet
}
var zeroOperation = Operation{}
func (o *Operation) Reset() {
@ -57,7 +52,8 @@ func (o *Operation) Reset() {
type Fragment struct {
Name string
On string
SelectionSet
Fields []Field
fieldsA [10]Field
}
var zeroFragment = Fragment{}
@ -75,11 +71,13 @@ type Field struct {
argsA [5]Arg
Children []int32
childrenA [5]int32
Union bool
}
type Arg struct {
Name string
Val *Node
df bool
}
type Node struct {
@ -158,9 +156,11 @@ func Parse(gql []byte) (*Operation, error) {
if p.peek(itemFragment) {
p.ignore()
if err = p.parseFragment(op); err != nil {
if f, err := p.parseFragment(); err != nil {
fragPool.Put(f)
return nil, err
}
} else {
if !qf && p.peek(itemQuery, itemMutation, itemSub, itemObjOpen) {
s = p.pos
@ -175,40 +175,47 @@ func Parse(gql []byte) (*Operation, error) {
return nil, err
}
for _, v := range p.frags {
fragPool.Put(v)
}
return op, nil
}
func (p *Parser) parseFragment(op *Operation) error {
func (p *Parser) parseFragment() (*Fragment, error) {
var err error
frag := fragPool.Get().(*Fragment)
frag.Reset()
frag.Fields = frag.fieldsA[:0]
frag.Args = frag.argsA[:0]
if p.peek(itemName) {
frag.Name = p.val(p.next())
} else {
return frag, errors.New("fragment: missing name")
}
if p.peek(itemOn) {
p.ignore()
} else {
return errors.New("fragment: missing 'on' keyword")
return frag, errors.New("fragment: missing 'on' keyword")
}
if p.peek(itemName) {
frag.On = p.vall(p.next())
} else {
return errors.New("fragment: missing table name after 'on' keyword")
return frag, errors.New("fragment: missing table name after 'on' keyword")
}
if p.peek(itemObjOpen) {
p.ignore()
} else {
return fmt.Errorf("fragment: expecting a '{', got: %s", p.next())
return frag, fmt.Errorf("fragment: expecting a '{', got: %s", p.next())
}
if err := p.parseSelectionSet(&frag.SelectionSet); err != nil {
return fmt.Errorf("fragment: %v", err)
frag.Fields, err = p.parseFields(frag.Fields)
if err != nil {
return frag, fmt.Errorf("fragment: %v", err)
}
if p.frags == nil {
@ -221,7 +228,7 @@ func (p *Parser) parseFragment(op *Operation) error {
p.frags[k] = frag
return nil
return frag, nil
}
func (p *Parser) parseOp(op *Operation) error {
@ -249,7 +256,7 @@ func (p *Parser) parseOp(op *Operation) error {
break
}
err = p.parseSelectionSet(&op.SelectionSet)
op.Fields, err = p.parseFields(op.Fields)
if err != nil {
return fmt.Errorf("%s: %v", op.Type, err)
}
@ -293,17 +300,6 @@ func (p *Parser) parseOpTypeAndArgs(op *Operation) error {
return nil
}
func (p *Parser) parseSelectionSet(selset *SelectionSet) error {
var err error
selset.Fields, err = p.parseFields(selset.Fields)
if err != nil {
return err
}
return nil
}
func ParseArgValue(argVal string) (*Node, error) {
l := lexPool.Get().(*lexer)
l.Reset()
@ -324,14 +320,13 @@ func ParseArgValue(argVal string) (*Node, error) {
}
func (p *Parser) parseFields(fields []Field) ([]Field, error) {
var err error
st := NewStack()
if !p.peek(itemName, itemSpread) {
return nil, fmt.Errorf("unexpected token: %s", p.peekNext())
}
// fm := make(map[uint64]struct{})
for {
if p.peek(itemEOF) {
p.ignore()
@ -360,71 +355,28 @@ func (p *Parser) parseFields(fields []Field) ([]Field, error) {
isFrag = true
}
if !p.peek(itemName) {
if isFrag {
return nil, fmt.Errorf("expecting a fragment name, got: %s", p.next())
fields, err = p.parseFragmentFields(st, fields)
} else {
fields, err = p.parseNormalFields(st, fields)
}
if err != nil {
return nil, err
}
}
return fields, nil
}
func (p *Parser) parseNormalFields(st *Stack, fields []Field) ([]Field, error) {
if !p.peek(itemName) {
return nil, fmt.Errorf("expecting an alias or field name, got: %s", p.next())
}
}
var f *Field
if isFrag {
name := p.val(p.next())
p.h.WriteString(name)
k := p.h.Sum64()
p.h.Reset()
fr, ok := p.frags[k]
if !ok {
return nil, fmt.Errorf("no fragment named '%s' defined", name)
}
n := int32(len(fields))
fields = append(fields, fr.Fields...)
for i := int(n); i < len(fields); i++ {
f := &fields[i]
f.ID = int32(i)
// var name string
// if f.Alias != "" {
// name = f.Alias
// } else {
// name = f.Name
// }
// if _, ok := fm[name]; ok {
// continue
// } else {
// fm[name] = struct{}{}
// }
// If this is the top-level point the parent to the parent of the
// previous field.
if f.ParentID == -1 {
pid := st.Peek()
f.ParentID = pid
if f.ParentID != -1 {
fields[pid].Children = append(fields[f.ParentID].Children, f.ID)
}
// Update all the other parents id's by our new place in this new array
} else {
f.ParentID += n
}
// Update all the children which is needed.
for j := range f.Children {
f.Children[j] += n
}
}
} else {
fields = append(fields, Field{ID: int32(len(fields))})
f = &fields[(len(fields) - 1)]
f := &fields[(len(fields) - 1)]
f.Args = f.argsA[:0]
f.Children = f.childrenA[:0]
@ -433,20 +385,6 @@ func (p *Parser) parseFields(fields []Field) ([]Field, error) {
return nil, err
}
// var name string
// if f.Alias != "" {
// name = f.Alias
// } else {
// name = f.Name
// }
// if _, ok := fm[name]; ok {
// continue
// } else {
// fm[name] = struct{}{}
// }
if st.Len() == 0 {
f.ParentID = -1
} else {
@ -454,7 +392,6 @@ func (p *Parser) parseFields(fields []Field) ([]Field, error) {
f.ParentID = pid
fields[pid].Children = append(fields[pid].Children, f.ID)
}
}
// The first opening curley brackets after this
// comes the columns or child fields
@ -462,6 +399,83 @@ func (p *Parser) parseFields(fields []Field) ([]Field, error) {
p.ignore()
st.Push(f.ID)
}
return fields, nil
}
func (p *Parser) parseFragmentFields(st *Stack, fields []Field) ([]Field, error) {
var err error
pid := st.Peek()
if p.peek(itemOn) {
p.ignore()
fields[pid].Union = true
if fields, err = p.parseNormalFields(st, fields); err != nil {
return nil, err
}
// If parent is a union selector than copy over args from the parent
// to the first child which is the root selector for each union type.
for i := pid + 1; i < int32(len(fields)); i++ {
f := &fields[i]
if f.ParentID == pid {
f.Args = fields[pid].Args
}
}
} else {
if !p.peek(itemName) {
return nil, fmt.Errorf("expecting a fragment name, got: %s", p.next())
}
name := p.val(p.next())
_, _ = p.h.WriteString(name)
id := p.h.Sum64()
p.h.Reset()
fr, ok := p.frags[id]
if !ok {
return nil, fmt.Errorf("no fragment named '%s' defined", name)
}
ff := fr.Fields
parent := &fields[pid]
n := int32(len(fields))
fields = append(fields, ff...)
for i := 0; i < len(ff); i++ {
k := (n + int32(i))
f := &fields[k]
f.ID = int32(k)
// If this is the top-level point the parent to the parent of the
// previous field.
if f.ParentID == -1 {
f.ParentID = pid
if f.ParentID != -1 {
parent.Children = append(parent.Children, f.ID)
}
// Update all the other parents id's by our new place in this new array
} else {
f.ParentID += n
}
// Copy over children since fields append is not a deep copy
f.Children = make([]int32, len(f.Children))
copy(f.Children, ff[i].Children)
// Copy over args since args append is not a deep copy
f.Args = make([]Arg, len(f.Args))
copy(f.Args, ff[i].Args)
// Update all the children which is needed.
for j := range f.Children {
f.Children[j] += n
}
}
}
return fields, nil
@ -702,11 +716,6 @@ func (p *Parser) ignore() {
p.pos = n
}
func (p *Parser) peekCurrent() string {
item := p.items[p.pos]
return b2s(p.input[item.pos:item.end])
}
func (p *Parser) peekNext() string {
item := p.items[p.pos+1]
return b2s(p.input[item.pos:item.end])
@ -716,16 +725,6 @@ func (p *Parser) reset(to int) {
p.pos = to
}
func (p *Parser) fHash(name string, parentID int32) uint64 {
var b []byte
binary.LittleEndian.PutUint32(b, uint32(parentID))
p.h.WriteString(name)
p.h.Write(b)
v := p.h.Sum64()
p.h.Reset()
return v
}
func b2s(b []byte) string {
return *(*string)(unsafe.Pointer(&b))
}
@ -762,31 +761,6 @@ func (t parserType) String() string {
return v
}
// type Frees struct {
// n *Node
// loc int
// }
// var freeList []Frees
// func FreeNode(n *Node, loc int) {
// j := -1
// for i := range freeList {
// if n == freeList[i].n {
// j = i
// break
// }
// }
// if j == -1 {
// nodePool.Put(n)
// freeList = append(freeList, Frees{n, loc})
// } else {
// fmt.Printf("(%d) RE_FREE %d %p %s %s\n", loc, freeList[j].loc, freeList[j].n, n.Name, n.Type)
// }
// }
func FreeNode(n *Node, loc int) {
func FreeNode(n *Node) {
nodePool.Put(n)
}

View File

@ -12,6 +12,7 @@ import (
)
type QType int
type SType int
type Action int
const (
@ -19,7 +20,8 @@ const (
)
const (
QTQuery QType = iota + 1
QTUnknown QType = iota
QTQuery
QTMutation
QTInsert
QTUpdate
@ -27,6 +29,12 @@ const (
QTUpsert
)
const (
STNone SType = iota
STUnion
STMember
)
type QCode struct {
Type QType
ActionVar string
@ -38,6 +46,8 @@ type QCode struct {
type Select struct {
ID int32
ParentID int32
UParentID int32
Type SType
Args map[string]*Node
Name string
FieldName string
@ -277,6 +287,7 @@ func (com *Compiler) Compile(query []byte, role string) (*QCode, error) {
return nil, err
}
freeNodes(op)
opPool.Put(op)
return &qc, nil
@ -371,6 +382,10 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
})
s := &selects[(len(selects) - 1)]
if field.Union {
s.Type = STUnion
}
if len(field.Alias) != 0 {
s.FieldName = field.Alias
} else {
@ -382,6 +397,11 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
} else {
p := &selects[s.ParentID]
p.Children = append(p.Children, s.ID)
if p.Type == STUnion {
s.Type = STMember
s.UParentID = p.ParentID
}
}
if skipRender {
@ -450,7 +470,6 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
}
col := Column{Name: f.Name, FieldName: fname}
s.Cols = append(s.Cols, col)
}
@ -493,50 +512,42 @@ func (com *Compiler) AddFilters(qc *QCode, sel *Select, role string) {
func (com *Compiler) compileArgs(qc *QCode, sel *Select, args []Arg, role string) error {
var err error
// don't free this arg either previously done or will be free'd
// in the future like in psql
var df bool
for i := range args {
arg := &args[i]
switch arg.Name {
case "id":
err, df = com.compileArgID(sel, arg)
err = com.compileArgID(sel, arg)
case "search":
err, df = com.compileArgSearch(sel, arg)
err = com.compileArgSearch(sel, arg)
case "where":
err, df = com.compileArgWhere(sel, arg, role)
err = com.compileArgWhere(sel, arg, role)
case "orderby", "order_by", "order":
err, df = com.compileArgOrderBy(sel, arg)
err = com.compileArgOrderBy(sel, arg)
case "distinct_on", "distinct":
err, df = com.compileArgDistinctOn(sel, arg)
err = com.compileArgDistinctOn(sel, arg)
case "limit":
err, df = com.compileArgLimit(sel, arg)
err = com.compileArgLimit(sel, arg)
case "offset":
err, df = com.compileArgOffset(sel, arg)
err = com.compileArgOffset(sel, arg)
case "first":
err, df = com.compileArgFirstLast(sel, arg, PtForward)
err = com.compileArgFirstLast(sel, arg, PtForward)
case "last":
err, df = com.compileArgFirstLast(sel, arg, PtBackward)
err = com.compileArgFirstLast(sel, arg, PtBackward)
case "after":
err, df = com.compileArgAfterBefore(sel, arg, PtForward)
err = com.compileArgAfterBefore(sel, arg, PtForward)
case "before":
err, df = com.compileArgAfterBefore(sel, arg, PtBackward)
}
if !df {
FreeNode(arg.Val, 5)
err = com.compileArgAfterBefore(sel, arg, PtBackward)
}
if err != nil {
@ -647,39 +658,20 @@ func (com *Compiler) compileArgNode(st *util.Stack, node *Node, usePool bool) (*
}
}
if usePool {
st.Push(node)
for {
if st.Len() == 0 {
break
}
intf := st.Pop()
node, ok := intf.(*Node)
if !ok || node == nil {
continue
}
for i := range node.Children {
st.Push(node.Children[i])
}
FreeNode(node, 1)
}
}
return root, needsUser, nil
}
func (com *Compiler) compileArgID(sel *Select, arg *Arg) (error, bool) {
func (com *Compiler) compileArgID(sel *Select, arg *Arg) error {
if sel.ID != 0 {
return nil, false
return nil
}
if sel.Where != nil && sel.Where.Op == OpEqID {
return nil, false
return nil
}
if arg.Val.Type != NodeVar {
return argErr("id", "variable"), false
return argErr("id", "variable")
}
ex := expPool.Get().(*Exp)
@ -690,12 +682,12 @@ func (com *Compiler) compileArgID(sel *Select, arg *Arg) (error, bool) {
ex.Val = arg.Val.Val
sel.Where = ex
return nil, false
return nil
}
func (com *Compiler) compileArgSearch(sel *Select, arg *Arg) (error, bool) {
func (com *Compiler) compileArgSearch(sel *Select, arg *Arg) error {
if arg.Val.Type != NodeVar {
return argErr("search", "variable"), false
return argErr("search", "variable")
}
ex := expPool.Get().(*Exp)
@ -710,18 +702,19 @@ func (com *Compiler) compileArgSearch(sel *Select, arg *Arg) (error, bool) {
}
sel.Args[arg.Name] = arg.Val
arg.df = true
AddFilter(sel, ex)
return nil, true
return nil
}
func (com *Compiler) compileArgWhere(sel *Select, arg *Arg, role string) (error, bool) {
func (com *Compiler) compileArgWhere(sel *Select, arg *Arg, role string) error {
st := util.NewStack()
var err error
ex, nu, err := com.compileArgObj(st, arg)
if err != nil {
return err, false
return err
}
if nu && role == "anon" {
@ -729,12 +722,12 @@ func (com *Compiler) compileArgWhere(sel *Select, arg *Arg, role string) (error,
}
AddFilter(sel, ex)
return nil, true
return nil
}
func (com *Compiler) compileArgOrderBy(sel *Select, arg *Arg) (error, bool) {
func (com *Compiler) compileArgOrderBy(sel *Select, arg *Arg) error {
if arg.Val.Type != NodeObj {
return fmt.Errorf("expecting an object"), false
return fmt.Errorf("expecting an object")
}
st := util.NewStack()
@ -752,16 +745,15 @@ func (com *Compiler) compileArgOrderBy(sel *Select, arg *Arg) (error, bool) {
node, ok := intf.(*Node)
if !ok || node == nil {
return fmt.Errorf("17: unexpected value %v (%t)", intf, intf), false
return fmt.Errorf("17: unexpected value %v (%t)", intf, intf)
}
if _, ok := com.bl[node.Name]; ok {
FreeNode(node, 2)
continue
}
if node.Type != NodeStr && node.Type != NodeVar {
return fmt.Errorf("expecting a string or variable"), false
return fmt.Errorf("expecting a string or variable")
}
ob := &OrderBy{}
@ -780,25 +772,24 @@ func (com *Compiler) compileArgOrderBy(sel *Select, arg *Arg) (error, bool) {
case "desc_nulls_last":
ob.Order = OrderDescNullsLast
default:
return fmt.Errorf("valid values include asc, desc, asc_nulls_first and desc_nulls_first"), false
return fmt.Errorf("valid values include asc, desc, asc_nulls_first and desc_nulls_first")
}
setOrderByColName(ob, node)
sel.OrderBy = append(sel.OrderBy, ob)
FreeNode(node, 3)
}
return nil, false
return nil
}
func (com *Compiler) compileArgDistinctOn(sel *Select, arg *Arg) (error, bool) {
func (com *Compiler) compileArgDistinctOn(sel *Select, arg *Arg) error {
node := arg.Val
if _, ok := com.bl[node.Name]; ok {
return nil, false
return nil
}
if node.Type != NodeList && node.Type != NodeStr {
return fmt.Errorf("expecting a list of strings or just a string"), false
return fmt.Errorf("expecting a list of strings or just a string")
}
if node.Type == NodeStr {
@ -807,58 +798,57 @@ func (com *Compiler) compileArgDistinctOn(sel *Select, arg *Arg) (error, bool) {
for i := range node.Children {
sel.DistinctOn = append(sel.DistinctOn, node.Children[i].Val)
FreeNode(node.Children[i], 5)
}
return nil, false
return nil
}
func (com *Compiler) compileArgLimit(sel *Select, arg *Arg) (error, bool) {
func (com *Compiler) compileArgLimit(sel *Select, arg *Arg) error {
node := arg.Val
if node.Type != NodeInt {
return argErr("limit", "number"), false
return argErr("limit", "number")
}
sel.Paging.Limit = node.Val
return nil, false
return nil
}
func (com *Compiler) compileArgOffset(sel *Select, arg *Arg) (error, bool) {
func (com *Compiler) compileArgOffset(sel *Select, arg *Arg) error {
node := arg.Val
if node.Type != NodeVar {
return argErr("offset", "variable"), false
return argErr("offset", "variable")
}
sel.Paging.Offset = node.Val
return nil, false
return nil
}
func (com *Compiler) compileArgFirstLast(sel *Select, arg *Arg, pt PagingType) (error, bool) {
func (com *Compiler) compileArgFirstLast(sel *Select, arg *Arg, pt PagingType) error {
node := arg.Val
if node.Type != NodeInt {
return argErr(arg.Name, "number"), false
return argErr(arg.Name, "number")
}
sel.Paging.Type = pt
sel.Paging.Limit = node.Val
return nil, false
return nil
}
func (com *Compiler) compileArgAfterBefore(sel *Select, arg *Arg, pt PagingType) (error, bool) {
func (com *Compiler) compileArgAfterBefore(sel *Select, arg *Arg, pt PagingType) error {
node := arg.Val
if node.Type != NodeVar || node.Val != "cursor" {
return fmt.Errorf("value for argument '%s' must be a variable named $cursor", arg.Name), false
return fmt.Errorf("value for argument '%s' must be a variable named $cursor", arg.Name)
}
sel.Paging.Type = pt
sel.Paging.Cursor = true
return nil, false
return nil
}
// var zeroTrv = &trval{}
@ -1238,3 +1228,81 @@ func FreeExp(ex *Exp) {
func argErr(name, ty string) error {
return fmt.Errorf("value for argument '%s' must be a %s", name, ty)
}
func freeNodes(op *Operation) {
var st *util.Stack
fm := make(map[*Node]struct{})
for i := range op.Args {
arg := op.Args[i]
if arg.df {
continue
}
for i := range arg.Val.Children {
if st == nil {
st = util.NewStack()
}
c := arg.Val.Children[i]
if _, ok := fm[c]; !ok {
st.Push(c)
}
}
if _, ok := fm[arg.Val]; !ok {
nodePool.Put(arg.Val)
fm[arg.Val] = struct{}{}
}
}
for i := range op.Fields {
f := op.Fields[i]
for j := range f.Args {
arg := f.Args[j]
if arg.df {
continue
}
for k := range arg.Val.Children {
if st == nil {
st = util.NewStack()
}
c := arg.Val.Children[k]
if _, ok := fm[c]; !ok {
st.Push(c)
}
}
if _, ok := fm[arg.Val]; !ok {
nodePool.Put(arg.Val)
fm[arg.Val] = struct{}{}
}
}
}
if st == nil {
return
}
for {
if st.Len() == 0 {
break
}
intf := st.Pop()
node, ok := intf.(*Node)
if !ok || node == nil {
continue
}
for i := range node.Children {
st.Push(node.Children[i])
}
if _, ok := fm[node]; !ok {
nodePool.Put(node)
fm[node] = struct{}{}
}
}
}

View File

@ -29,6 +29,8 @@ func al(b byte) bool {
func (qt QType) String() string {
switch qt {
case QTUnknown:
return "unknown"
case QTQuery:
return "query"
case QTMutation:

View File

@ -64,7 +64,7 @@ func (sg *SuperGraph) initPrepared() error {
return fmt.Errorf("role query: %w", err)
}
sg.queries = make(map[uint64]query)
sg.queries = make(map[uint64]*query)
list, err := sg.allowList.Load()
if err != nil {
@ -78,19 +78,17 @@ func (sg *SuperGraph) initPrepared() error {
if len(v.Query) == 0 {
continue
}
qt := qcode.GetQType(v.Query)
switch qt {
case qcode.QTQuery:
sg.queries[queryID(&h, v.Name, "user")] = query{ai: v, qt: qt}
if sg.anonExists {
sg.queries[queryID(&h, v.Name, "anon")] = query{ai: v, qt: qt}
}
sg.queries[queryID(&h, v.Name, "user")] = &query{ai: v, qt: qt}
sg.queries[queryID(&h, v.Name, "anon")] = &query{ai: v, qt: qt}
case qcode.QTMutation:
for _, role := range sg.conf.Roles {
sg.queries[queryID(&h, v.Name, role.Name)] = query{ai: v, qt: qt}
sg.queries[queryID(&h, v.Name, role.Name)] = &query{ai: v, qt: qt}
}
}
}

View File

@ -55,6 +55,30 @@ query {
}
```
### Fragments
Fragments make it easy to build large complex queries with small composible and re-usable fragment blocks.
```graphql
query {
users {
...userFields2
...userFields1
picture_url
}
}
fragment userFields1 on user {
id
email
}
fragment userFields2 on user {
first_name
last_name
}
```
### Sorting
To sort or ordering results just use the `order_by` argument. This can be combined with `where`, `search`, etc to build complex queries to fit you needs.

View File

@ -4,6 +4,8 @@ title: Introduction
sidebar_label: Introduction
---
import useBaseUrl from '@docusaurus/useBaseUrl'; // Add to the top of the file below the front matter.
Super Graph is a service that instantly and without code gives you a high performance and secure GraphQL API. Your GraphQL queries are auto translated into a single fast SQL query. No more spending weeks or months writing backend API code. Just make the query you need and Super Graph will do the rest.
Super Graph has a rich feature set like integrating with your existing Ruby on Rails apps, joining your DB with data from remote APIs, Role and Attribute based access control, Support for JWT tokens, DB migrations, seeding and a lot more.
@ -134,3 +136,9 @@ mutation {
}
}
```
### Built-in GraphQL Editor
Quickly craft and test your queries with a full-featured GraphQL editor. Auto-complete and schema documentation is automatically available.
<img alt="Zipkin Traces" src={useBaseUrl("img/webui.jpg")} />

View File

@ -95,7 +95,7 @@ auth:
type: jwt
jwt:
# the two providers are 'auth0' and 'none'
# valid providers are auth0, firebase and none
provider: auth0
secret: abc335bfcfdb04e50db5bb0a4d67ab9
public_key_file: /secrets/public_key.pem
@ -108,6 +108,19 @@ We can get the JWT token either from the `authorization` header where we expect
For validation a `secret` or a public key (ecdsa or rsa) is required. When using public keys they have to be in a PEM format file.
### Firebase Auth
```yaml
auth:
type: jwt
jwt:
provider: firebase
audience: <firebase-project-id>
```
Firebase auth also uses JWT the keys are auto-fetched from Google and used according to their documentation mechanism. The `audience` config value needs to be set to your project id and everything else is taken care for you.
### HTTP Headers
```yaml

View File

@ -0,0 +1,13 @@
---
id: webui
title: Web UI / GraphQL Editor
sidebar_label: Web UI
---
import useBaseUrl from '@docusaurus/useBaseUrl'; // Add to the top of the file below the front matter.
<img alt="Zipkin Traces" src={useBaseUrl("img/webui.jpg")} />
Super Graph comes with a build-in GraphQL editor that only runs in development. Use it to craft your queries and copy-paste them into you're app once you're ready. The editor supports auto-completation and schema documentation. This makes it super easy to craft and test your query all in one go without knowing anything about the underlying database structure.
You can even set query variables or http headers as required. To simulate an authenticated user set the http header `"X-USER-ID": 5` to the user id of the user you want to test with.

View File

@ -3,6 +3,7 @@ module.exports = {
Docusaurus: [
"home",
"intro",
"webui",
"start",
"why",
"graphql",

Binary file not shown.

After

Width:  |  Height:  |  Size: 117 KiB

View File

@ -88,6 +88,10 @@ func cmdNew(cmd *cobra.Command, args []string) {
}
})
ifNotExists(path.Join(appConfigPath, "allow.list"), func(p string) error {
return ioutil.WriteFile(p, []byte{}, 0644)
})
// Create app migrations folder and add relevant files
appMigrationsPath := path.Join(appConfigPath, "migrations")

File diff suppressed because one or more lines are too long

View File

@ -134,7 +134,7 @@ database:
type: postgres
host: db
port: 5432
dbname: {{- .AppNameSlug -}}_development
dbname: {{ .AppNameSlug -}}_development
user: postgres
password: postgres

View File

@ -82,7 +82,7 @@ database:
type: postgres
host: db
port: 5432
dbname: {{- .AppNameSlug -}}_production
dbname: {{ .AppNameSlug -}}_production
user: postgres
password: postgres
#pool_size: 10