Compare commits
No commits in common. "master" and "v0.14.12" have entirely different histories.
@ -85,11 +85,12 @@ type SuperGraph struct {
|
|||||||
allowList *allow.List
|
allowList *allow.List
|
||||||
encKey [32]byte
|
encKey [32]byte
|
||||||
hashSeed maphash.Seed
|
hashSeed maphash.Seed
|
||||||
queries map[uint64]*query
|
queries map[uint64]query
|
||||||
roles map[string]*Role
|
roles map[string]*Role
|
||||||
getRole *sql.Stmt
|
getRole *sql.Stmt
|
||||||
rmap map[uint64]resolvFn
|
rmap map[uint64]resolvFn
|
||||||
abacEnabled bool
|
abacEnabled bool
|
||||||
|
anonExists bool
|
||||||
qc *qcode.Compiler
|
qc *qcode.Compiler
|
||||||
pc *psql.Compiler
|
pc *psql.Compiler
|
||||||
ge *graphql.Engine
|
ge *graphql.Engine
|
||||||
@ -139,7 +140,7 @@ func newSuperGraph(conf *Config, db *sql.DB, dbinfo *psql.DBInfo) (*SuperGraph,
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if conf.SecretKey != "" {
|
if len(conf.SecretKey) != 0 {
|
||||||
sk := sha256.Sum256([]byte(conf.SecretKey))
|
sk := sha256.Sum256([]byte(conf.SecretKey))
|
||||||
conf.SecretKey = ""
|
conf.SecretKey = ""
|
||||||
sg.encKey = sk
|
sg.encKey = sk
|
||||||
|
41
core/bench.11
Normal file
41
core/bench.11
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
INF roles_query not defined: attribute based access control disabled
|
||||||
|
all expectations were already fulfilled, call to Query 'SELECT jsonb_build_object('users', "__sj_0"."json", 'products', "__sj_1"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "__sj_2"."json" AS "customers", "__sj_3"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('20') :: integer) AS "products_1" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "users_3"."full_name" AS "full_name", "users_3"."phone" AS "phone", "users_3"."email" AS "email" FROM (SELECT "users"."full_name", "users"."phone", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_1"."user_id"))) LIMIT ('1') :: integer) AS "users_3") AS "__sr_3") AS "__sj_3" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_1"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true')) AS "__sr_1") AS "__sj_1") AS "__sj_1", (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."name" AS "name" FROM (SELECT "users"."id" FROM "users" GROUP BY "users"."id" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"' with args [] was not expected
|
||||||
|
goos: darwin
|
||||||
|
goarch: amd64
|
||||||
|
pkg: github.com/dosco/super-graph/core
|
||||||
|
BenchmarkGraphQL-16 INF roles_query not defined: attribute based access control disabled
|
||||||
|
all expectations were already fulfilled, call to Query 'SELECT jsonb_build_object('users', "__sj_0"."json", 'products', "__sj_1"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "__sj_2"."json" AS "customers", "__sj_3"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('20') :: integer) AS "products_1" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "users_3"."full_name" AS "full_name", "users_3"."phone" AS "phone", "users_3"."email" AS "email" FROM (SELECT "users"."full_name", "users"."phone", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_1"."user_id"))) LIMIT ('1') :: integer) AS "users_3") AS "__sr_3") AS "__sj_3" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_1"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true')) AS "__sr_1") AS "__sj_1") AS "__sj_1", (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."name" AS "name" FROM (SELECT "users"."id" FROM "users" GROUP BY "users"."id" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"' with args [] was not expected
|
||||||
|
INF roles_query not defined: attribute based access control disabled
|
||||||
|
all expectations were already fulfilled, call to Query 'SELECT jsonb_build_object('users', "__sj_0"."json", 'products', "__sj_1"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "__sj_2"."json" AS "customers", "__sj_3"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('20') :: integer) AS "products_1" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "users_3"."full_name" AS "full_name", "users_3"."phone" AS "phone", "users_3"."email" AS "email" FROM (SELECT "users"."full_name", "users"."phone", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_1"."user_id"))) LIMIT ('1') :: integer) AS "users_3") AS "__sr_3") AS "__sj_3" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_1"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true')) AS "__sr_1") AS "__sj_1") AS "__sj_1", (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."name" AS "name" FROM (SELECT "users"."id" FROM "users" GROUP BY "users"."id" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"' with args [] was not expected
|
||||||
|
INF roles_query not defined: attribute based access control disabled
|
||||||
|
all expectations were already fulfilled, call to Query 'SELECT jsonb_build_object('users', "__sj_0"."json", 'products', "__sj_1"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "__sj_2"."json" AS "customers", "__sj_3"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('20') :: integer) AS "products_1" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "users_3"."full_name" AS "full_name", "users_3"."phone" AS "phone", "users_3"."email" AS "email" FROM (SELECT "users"."full_name", "users"."phone", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_1"."user_id"))) LIMIT ('1') :: integer) AS "users_3") AS "__sr_3") AS "__sj_3" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_1"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true')) AS "__sr_1") AS "__sj_1") AS "__sj_1", (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."name" AS "name" FROM (SELECT "users"."id" FROM "users" GROUP BY "users"."id" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"' with args [] was not expected
|
||||||
|
105048 10398 ns/op 18342 B/op 55 allocs/op
|
||||||
|
PASS
|
||||||
|
ok github.com/dosco/super-graph/core 1.328s
|
||||||
|
PASS
|
||||||
|
ok github.com/dosco/super-graph/core/internal/allow 0.088s
|
||||||
|
? github.com/dosco/super-graph/core/internal/crypto [no test files]
|
||||||
|
? github.com/dosco/super-graph/core/internal/integration_tests [no test files]
|
||||||
|
PASS
|
||||||
|
ok github.com/dosco/super-graph/core/internal/integration_tests/cockroachdb 0.121s
|
||||||
|
PASS
|
||||||
|
ok github.com/dosco/super-graph/core/internal/integration_tests/postgresql 0.118s
|
||||||
|
goos: darwin
|
||||||
|
goarch: amd64
|
||||||
|
pkg: github.com/dosco/super-graph/core/internal/psql
|
||||||
|
BenchmarkCompile-16 79845 14428 ns/op 4584 B/op 39 allocs/op
|
||||||
|
BenchmarkCompileParallel-16 326205 3918 ns/op 4633 B/op 39 allocs/op
|
||||||
|
PASS
|
||||||
|
ok github.com/dosco/super-graph/core/internal/psql 2.696s
|
||||||
|
goos: darwin
|
||||||
|
goarch: amd64
|
||||||
|
pkg: github.com/dosco/super-graph/core/internal/qcode
|
||||||
|
BenchmarkQCompile-16 146953 8049 ns/op 3756 B/op 28 allocs/op
|
||||||
|
BenchmarkQCompileP-16 475936 2447 ns/op 3790 B/op 28 allocs/op
|
||||||
|
BenchmarkParse-16 140811 8163 ns/op 3902 B/op 18 allocs/op
|
||||||
|
BenchmarkParseP-16 571345 2041 ns/op 3903 B/op 18 allocs/op
|
||||||
|
BenchmarkSchemaParse-16 230715 5012 ns/op 3968 B/op 57 allocs/op
|
||||||
|
BenchmarkSchemaParseP-16 802426 1565 ns/op 3968 B/op 57 allocs/op
|
||||||
|
PASS
|
||||||
|
ok github.com/dosco/super-graph/core/internal/qcode 8.427s
|
||||||
|
? github.com/dosco/super-graph/core/internal/util [no test files]
|
@ -82,7 +82,7 @@ func (sg *SuperGraph) buildMultiStmt(query, vars []byte) ([]stmt, error) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if sg.conf.RolesQuery == "" {
|
if len(sg.conf.RolesQuery) == 0 {
|
||||||
return nil, errors.New("roles_query not defined")
|
return nil, errors.New("roles_query not defined")
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -133,7 +133,7 @@ func (sg *SuperGraph) renderUserQuery(md psql.Metadata, stmts []stmt) (string, e
|
|||||||
io.WriteString(w, `SELECT "_sg_auth_info"."role", (CASE "_sg_auth_info"."role" `)
|
io.WriteString(w, `SELECT "_sg_auth_info"."role", (CASE "_sg_auth_info"."role" `)
|
||||||
|
|
||||||
for _, s := range stmts {
|
for _, s := range stmts {
|
||||||
if s.role.Match == "" &&
|
if len(s.role.Match) == 0 &&
|
||||||
s.role.Name != "user" && s.role.Name != "anon" {
|
s.role.Name != "user" && s.role.Name != "anon" {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
@ -150,7 +150,7 @@ func (sg *SuperGraph) renderUserQuery(md psql.Metadata, stmts []stmt) (string, e
|
|||||||
|
|
||||||
io.WriteString(w, `(SELECT (CASE`)
|
io.WriteString(w, `(SELECT (CASE`)
|
||||||
for _, s := range stmts {
|
for _, s := range stmts {
|
||||||
if s.role.Match == "" {
|
if len(s.role.Match) == 0 {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
io.WriteString(w, ` WHEN `)
|
io.WriteString(w, ` WHEN `)
|
||||||
|
@ -72,7 +72,6 @@ type Config struct {
|
|||||||
type Table struct {
|
type Table struct {
|
||||||
Name string
|
Name string
|
||||||
Table string
|
Table string
|
||||||
Type string
|
|
||||||
Blocklist []string
|
Blocklist []string
|
||||||
Remotes []Remote
|
Remotes []Remote
|
||||||
Columns []Column
|
Columns []Column
|
||||||
@ -152,7 +151,7 @@ type Delete struct {
|
|||||||
|
|
||||||
// AddRoleTable function is a helper function to make it easy to add per-table
|
// AddRoleTable function is a helper function to make it easy to add per-table
|
||||||
// row-level config
|
// row-level config
|
||||||
func (c *Config) AddRoleTable(role, table string, conf interface{}) error {
|
func (c *Config) AddRoleTable(role string, table string, conf interface{}) error {
|
||||||
var r *Role
|
var r *Role
|
||||||
|
|
||||||
for i := range c.Roles {
|
for i := range c.Roles {
|
||||||
|
@ -172,15 +172,14 @@ func (c *scontext) resolvePreparedSQL() ([]byte, *stmt, error) {
|
|||||||
|
|
||||||
h := maphash.Hash{}
|
h := maphash.Hash{}
|
||||||
h.SetSeed(c.sg.hashSeed)
|
h.SetSeed(c.sg.hashSeed)
|
||||||
id := queryID(&h, c.res.name, role)
|
|
||||||
|
|
||||||
q, ok := c.sg.queries[id]
|
q, ok := c.sg.queries[queryID(&h, c.res.name, role)]
|
||||||
if !ok {
|
if !ok {
|
||||||
return nil, nil, errNotFound
|
return nil, nil, errNotFound
|
||||||
}
|
}
|
||||||
|
|
||||||
if q.sd == nil {
|
if q.sd == nil {
|
||||||
q.Do(func() { c.sg.prepare(q, role) })
|
q.Do(func() { c.sg.prepare(&q, role) })
|
||||||
|
|
||||||
if q.err != nil {
|
if q.err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
@ -305,7 +304,7 @@ func (c *scontext) resolveSQL() ([]byte, *stmt, error) {
|
|||||||
err = row.Scan(&root)
|
err = row.Scan(&root)
|
||||||
}
|
}
|
||||||
|
|
||||||
if role == "" {
|
if len(role) == 0 {
|
||||||
c.res.role = c.role
|
c.res.role = c.role
|
||||||
} else {
|
} else {
|
||||||
c.res.role = role
|
c.res.role = role
|
||||||
|
98
core/init.go
98
core/init.go
@ -21,7 +21,7 @@ func (sg *SuperGraph) initConfig() error {
|
|||||||
|
|
||||||
for i := 0; i < len(c.Tables); i++ {
|
for i := 0; i < len(c.Tables); i++ {
|
||||||
t := &c.Tables[i]
|
t := &c.Tables[i]
|
||||||
// t.Name = flect.Pluralize(strings.ToLower(t.Name))
|
t.Name = flect.Pluralize(strings.ToLower(t.Name))
|
||||||
|
|
||||||
if _, ok := tm[t.Name]; ok {
|
if _, ok := tm[t.Name]; ok {
|
||||||
sg.conf.Tables = append(c.Tables[:i], c.Tables[i+1:]...)
|
sg.conf.Tables = append(c.Tables[:i], c.Tables[i+1:]...)
|
||||||
@ -100,26 +100,21 @@ func getDBTableAliases(c *Config) map[string][]string {
|
|||||||
for i := range c.Tables {
|
for i := range c.Tables {
|
||||||
t := c.Tables[i]
|
t := c.Tables[i]
|
||||||
|
|
||||||
if t.Table != "" && t.Type == "" {
|
if len(t.Table) == 0 || len(t.Columns) != 0 {
|
||||||
m[t.Table] = append(m[t.Table], t.Name)
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
m[t.Table] = append(m[t.Table], t.Name)
|
||||||
}
|
}
|
||||||
return m
|
return m
|
||||||
}
|
}
|
||||||
|
|
||||||
func addTables(c *Config, di *psql.DBInfo) error {
|
func addTables(c *Config, di *psql.DBInfo) error {
|
||||||
var err error
|
|
||||||
|
|
||||||
for _, t := range c.Tables {
|
for _, t := range c.Tables {
|
||||||
switch t.Type {
|
if t.Table == "" || len(t.Columns) == 0 {
|
||||||
case "json", "jsonb":
|
continue
|
||||||
err = addJsonTable(di, t.Columns, t)
|
|
||||||
|
|
||||||
case "polymorphic":
|
|
||||||
err = addVirtualTable(di, t.Columns, t)
|
|
||||||
}
|
}
|
||||||
|
if err := addTable(di, t.Columns, t); err != nil {
|
||||||
if err != nil {
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -127,18 +122,17 @@ func addTables(c *Config, di *psql.DBInfo) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func addJsonTable(di *psql.DBInfo, cols []Column, t Table) error {
|
func addTable(di *psql.DBInfo, cols []Column, t Table) error {
|
||||||
// This is for jsonb columns that want to be tables.
|
|
||||||
bc, ok := di.GetColumn(t.Table, t.Name)
|
bc, ok := di.GetColumn(t.Table, t.Name)
|
||||||
if !ok {
|
if !ok {
|
||||||
return fmt.Errorf(
|
return fmt.Errorf(
|
||||||
"json table: column '%s' not found on table '%s'",
|
"Column '%s' not found on table '%s'",
|
||||||
t.Name, t.Table)
|
t.Name, t.Table)
|
||||||
}
|
}
|
||||||
|
|
||||||
if bc.Type != "json" && bc.Type != "jsonb" {
|
if bc.Type != "json" && bc.Type != "jsonb" {
|
||||||
return fmt.Errorf(
|
return fmt.Errorf(
|
||||||
"json table: column '%s' in table '%s' is of type '%s'. Only JSON or JSONB is valid",
|
"Column '%s' in table '%s' is of type '%s'. Only JSON or JSONB is valid",
|
||||||
t.Name, t.Table, bc.Type)
|
t.Name, t.Table, bc.Type)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -165,38 +159,8 @@ func addJsonTable(di *psql.DBInfo, cols []Column, t Table) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func addVirtualTable(di *psql.DBInfo, cols []Column, t Table) error {
|
|
||||||
if len(cols) == 0 {
|
|
||||||
return fmt.Errorf("polymorphic table: no id column specified")
|
|
||||||
}
|
|
||||||
|
|
||||||
c := cols[0]
|
|
||||||
|
|
||||||
if c.ForeignKey == "" {
|
|
||||||
return fmt.Errorf("polymorphic table: no 'related_to' specified on id column")
|
|
||||||
}
|
|
||||||
|
|
||||||
s := strings.SplitN(c.ForeignKey, ".", 2)
|
|
||||||
|
|
||||||
if len(s) != 2 {
|
|
||||||
return fmt.Errorf("polymorphic table: foreign key must be <type column>.<foreign key column>")
|
|
||||||
}
|
|
||||||
|
|
||||||
di.VTables = append(di.VTables, psql.VirtualTable{
|
|
||||||
Name: t.Name,
|
|
||||||
IDColumn: c.Name,
|
|
||||||
TypeColumn: s[0],
|
|
||||||
FKeyColumn: s[1],
|
|
||||||
})
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func addForeignKeys(c *Config, di *psql.DBInfo) error {
|
func addForeignKeys(c *Config, di *psql.DBInfo) error {
|
||||||
for _, t := range c.Tables {
|
for _, t := range c.Tables {
|
||||||
if t.Type != "" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
for _, c := range t.Columns {
|
for _, c := range t.Columns {
|
||||||
if c.ForeignKey == "" {
|
if c.ForeignKey == "" {
|
||||||
continue
|
continue
|
||||||
@ -210,52 +174,30 @@ func addForeignKeys(c *Config, di *psql.DBInfo) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func addForeignKey(di *psql.DBInfo, c Column, t Table) error {
|
func addForeignKey(di *psql.DBInfo, c Column, t Table) error {
|
||||||
var tn string
|
c1, ok := di.GetColumn(t.Name, c.Name)
|
||||||
|
|
||||||
if t.Type == "polymorphic" {
|
|
||||||
tn = t.Table
|
|
||||||
} else {
|
|
||||||
tn = t.Name
|
|
||||||
}
|
|
||||||
|
|
||||||
c1, ok := di.GetColumn(tn, c.Name)
|
|
||||||
if !ok {
|
if !ok {
|
||||||
return fmt.Errorf(
|
return fmt.Errorf(
|
||||||
"config: invalid table '%s' or column '%s' defined",
|
"Invalid table '%s' or column '%s' in Config",
|
||||||
tn, c.Name)
|
t.Name, c.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
v := strings.SplitN(c.ForeignKey, ".", 2)
|
v := strings.SplitN(c.ForeignKey, ".", 2)
|
||||||
if len(v) != 2 {
|
if len(v) != 2 {
|
||||||
return fmt.Errorf(
|
return fmt.Errorf(
|
||||||
"config: invalid foreign_key defined for table '%s' and column '%s': %s",
|
"Invalid foreign_key in Config for table '%s' and column '%s",
|
||||||
tn, c.Name, c.ForeignKey)
|
t.Name, c.Name)
|
||||||
}
|
|
||||||
|
|
||||||
// check if it's a polymorphic foreign key
|
|
||||||
if _, ok := di.GetColumn(tn, v[0]); ok {
|
|
||||||
c2, ok := di.GetColumn(tn, v[1])
|
|
||||||
if !ok {
|
|
||||||
return fmt.Errorf(
|
|
||||||
"config: invalid column '%s' for polymorphic relationship on table '%s' and column '%s'",
|
|
||||||
v[1], tn, c.Name)
|
|
||||||
}
|
|
||||||
|
|
||||||
c1.FKeyTable = v[0]
|
|
||||||
c1.FKeyColID = []int16{c2.ID}
|
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fkt, fkc := v[0], v[1]
|
fkt, fkc := v[0], v[1]
|
||||||
c3, ok := di.GetColumn(fkt, fkc)
|
c2, ok := di.GetColumn(fkt, fkc)
|
||||||
if !ok {
|
if !ok {
|
||||||
return fmt.Errorf(
|
return fmt.Errorf(
|
||||||
"config: foreign_key for table '%s' and column '%s' points to unknown table '%s' and column '%s'",
|
"Invalid foreign_key in Config for table '%s' and column '%s",
|
||||||
t.Name, c.Name, v[0], v[1])
|
t.Name, c.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
c1.FKeyTable = fkt
|
c1.FKeyTable = fkt
|
||||||
c1.FKeyColID = []int16{c3.ID}
|
c1.FKeyColID = []int16{c2.ID}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -146,7 +146,7 @@ func (al *List) Load() ([]Item, error) {
|
|||||||
return parse(string(b), al.filepath)
|
return parse(string(b), al.filepath)
|
||||||
}
|
}
|
||||||
|
|
||||||
func parse(b, filename string) ([]Item, error) {
|
func parse(b string, filename string) ([]Item, error) {
|
||||||
var items []Item
|
var items []Item
|
||||||
|
|
||||||
var s scanner.Scanner
|
var s scanner.Scanner
|
||||||
@ -299,13 +299,9 @@ func (al *List) save(item Item) error {
|
|||||||
|
|
||||||
for _, v := range list {
|
for _, v := range list {
|
||||||
if v.Comment != "" {
|
if v.Comment != "" {
|
||||||
_, err = f.WriteString(fmt.Sprintf("/* %s */\n\n", v.Comment))
|
f.WriteString(fmt.Sprintf("/* %s */\n\n", v.Comment))
|
||||||
} else {
|
} else {
|
||||||
_, err = f.WriteString(fmt.Sprintf("/* %s */\n\n", v.Name))
|
f.WriteString(fmt.Sprintf("/* %s */\n\n", v.Name))
|
||||||
}
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if v.Vars != "" {
|
if v.Vars != "" {
|
||||||
@ -324,6 +320,18 @@ func (al *List) save(item Item) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func matchPrefix(b []byte, i int, s string) bool {
|
||||||
|
if (len(b) - i) < len(s) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
for n := 0; n < len(s); n++ {
|
||||||
|
if b[(i+n)] != s[n] {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
func QueryName(b string) string {
|
func QueryName(b string) string {
|
||||||
state, s := 0, 0
|
state, s := 0, 0
|
||||||
|
|
||||||
|
@ -14,7 +14,7 @@ func TestGQLName1(t *testing.T) {
|
|||||||
|
|
||||||
name := QueryName(q)
|
name := QueryName(q)
|
||||||
|
|
||||||
if name != "" {
|
if len(name) != 0 {
|
||||||
t.Fatal("Name should be empty, not ", name)
|
t.Fatal("Name should be empty, not ", name)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -156,16 +156,15 @@ func (co *Compiler) compileQueryWithMetadata(
|
|||||||
if id < closeBlock {
|
if id < closeBlock {
|
||||||
sel := &c.s[id]
|
sel := &c.s[id]
|
||||||
|
|
||||||
|
if len(sel.Cols) == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
ti, err := c.schema.GetTable(sel.Name)
|
ti, err := c.schema.GetTable(sel.Name)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return c.md, err
|
return c.md, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if sel.Type != qcode.STUnion {
|
|
||||||
if len(sel.Cols) == 0 {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if sel.ParentID == -1 {
|
if sel.ParentID == -1 {
|
||||||
io.WriteString(c.w, `(`)
|
io.WriteString(c.w, `(`)
|
||||||
} else {
|
} else {
|
||||||
@ -179,17 +178,16 @@ func (co *Compiler) compileQueryWithMetadata(
|
|||||||
if err := c.renderSelect(sel, ti, vars); err != nil {
|
if err := c.renderSelect(sel, ti, vars); err != nil {
|
||||||
return c.md, err
|
return c.md, err
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
for _, cid := range sel.Children {
|
for _, cid := range sel.Children {
|
||||||
if hasBit(c.md.skipped, uint32(cid)) {
|
if hasBit(c.md.skipped, uint32(cid)) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
child := &c.s[cid]
|
child := &c.s[cid]
|
||||||
|
|
||||||
if child.SkipRender {
|
if child.SkipRender {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
st.Push(child.ID + closeBlock)
|
st.Push(child.ID + closeBlock)
|
||||||
st.Push(child.ID)
|
st.Push(child.ID)
|
||||||
}
|
}
|
||||||
@ -197,7 +195,6 @@ func (co *Compiler) compileQueryWithMetadata(
|
|||||||
} else {
|
} else {
|
||||||
sel := &c.s[(id - closeBlock)]
|
sel := &c.s[(id - closeBlock)]
|
||||||
|
|
||||||
if sel.Type != qcode.STUnion {
|
|
||||||
ti, err := c.schema.GetTable(sel.Name)
|
ti, err := c.schema.GetTable(sel.Name)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return c.md, err
|
return c.md, err
|
||||||
@ -221,13 +218,12 @@ func (co *Compiler) compileQueryWithMetadata(
|
|||||||
} else {
|
} else {
|
||||||
c.renderLateralJoinClose(sel)
|
c.renderLateralJoinClose(sel)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if sel.Type != qcode.STMember {
|
|
||||||
if len(sel.Args) != 0 {
|
if len(sel.Args) != 0 {
|
||||||
|
i := 0
|
||||||
for _, v := range sel.Args {
|
for _, v := range sel.Args {
|
||||||
qcode.FreeNode(v)
|
qcode.FreeNode(v, 500)
|
||||||
}
|
i++
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -365,16 +361,6 @@ func (c *compilerContext) initSelect(sel *qcode.Select, ti *DBTableInfo, vars Va
|
|||||||
c.md.skipped |= (1 << uint(id))
|
c.md.skipped |= (1 << uint(id))
|
||||||
}
|
}
|
||||||
|
|
||||||
case RelPolymorphic:
|
|
||||||
if _, ok := colmap[rel.Left.Col]; !ok {
|
|
||||||
cols = append(cols, &qcode.Column{Table: ti.Name, Name: rel.Left.Col, FieldName: rel.Left.Col})
|
|
||||||
colmap[rel.Left.Col] = struct{}{}
|
|
||||||
}
|
|
||||||
if _, ok := colmap[rel.Right.Table]; !ok {
|
|
||||||
cols = append(cols, &qcode.Column{Table: ti.Name, Name: rel.Right.Table, FieldName: rel.Right.Table})
|
|
||||||
colmap[rel.Right.Table] = struct{}{}
|
|
||||||
}
|
|
||||||
|
|
||||||
default:
|
default:
|
||||||
return nil, fmt.Errorf("unknown relationship %s", rel)
|
return nil, fmt.Errorf("unknown relationship %s", rel)
|
||||||
}
|
}
|
||||||
@ -453,22 +439,14 @@ func (c *compilerContext) renderSelect(sel *qcode.Select, ti *DBTableInfo, vars
|
|||||||
var rel *DBRel
|
var rel *DBRel
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
// Relationships must be between union parents and their parents
|
|
||||||
if sel.ParentID != -1 {
|
if sel.ParentID != -1 {
|
||||||
if sel.Type == qcode.STMember && sel.UParentID != -1 {
|
parent := c.s[sel.ParentID]
|
||||||
cn := c.s[sel.ParentID].Name
|
|
||||||
pn := c.s[sel.UParentID].Name
|
|
||||||
rel, err = c.schema.GetRel(cn, pn)
|
|
||||||
|
|
||||||
} else {
|
|
||||||
pn := c.s[sel.ParentID].Name
|
|
||||||
rel, err = c.schema.GetRel(ti.Name, pn)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
rel, err = c.schema.GetRel(ti.Name, parent.Name)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
childCols, err := c.initSelect(sel, ti, vars)
|
childCols, err := c.initSelect(sel, ti, vars)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -553,27 +531,30 @@ func (c *compilerContext) renderJoin(sel *qcode.Select, ti *DBTableInfo) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (c *compilerContext) renderJoinByName(table, parent string, id int32) error {
|
func (c *compilerContext) renderJoinByName(table, parent string, id int32) error {
|
||||||
rel, _ := c.schema.GetRel(table, parent)
|
rel, err := c.schema.GetRel(table, parent)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
// This join is only required for one-to-many relations since
|
// This join is only required for one-to-many relations since
|
||||||
// these make use of join tables that need to be pulled in.
|
// these make use of join tables that need to be pulled in.
|
||||||
if rel == nil || rel.Type != RelOneToManyThrough {
|
if rel.Type != RelOneToManyThrough {
|
||||||
return nil
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// pt, err := c.schema.GetTable(parent)
|
pt, err := c.schema.GetTable(parent)
|
||||||
// if err != nil {
|
if err != nil {
|
||||||
// return err
|
return err
|
||||||
// }
|
}
|
||||||
|
|
||||||
//fmt.Fprintf(w, ` LEFT OUTER JOIN "%s" ON (("%s"."%s") = ("%s_%d"."%s"))`,
|
//fmt.Fprintf(w, ` LEFT OUTER JOIN "%s" ON (("%s"."%s") = ("%s_%d"."%s"))`,
|
||||||
//rel.Through, rel.Through, rel.ColT, c.parent.Name, c.parent.ID, rel.Left.Col)
|
//rel.Through, rel.Through, rel.ColT, c.parent.Name, c.parent.ID, rel.Left.Col)
|
||||||
io.WriteString(c.w, ` LEFT OUTER JOIN "`)
|
io.WriteString(c.w, ` LEFT OUTER JOIN "`)
|
||||||
io.WriteString(c.w, rel.Through.Table)
|
io.WriteString(c.w, rel.Through)
|
||||||
io.WriteString(c.w, `" ON ((`)
|
io.WriteString(c.w, `" ON ((`)
|
||||||
colWithTable(c.w, rel.Through.Table, rel.Through.ColL)
|
colWithTable(c.w, rel.Through, rel.ColT)
|
||||||
io.WriteString(c.w, `) = (`)
|
io.WriteString(c.w, `) = (`)
|
||||||
colWithTable(c.w, rel.Left.Table, rel.Left.Col)
|
colWithTableID(c.w, pt.Name, id, rel.Left.Col)
|
||||||
io.WriteString(c.w, `))`)
|
io.WriteString(c.w, `))`)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
@ -660,33 +641,10 @@ func (c *compilerContext) renderJoinColumns(sel *qcode.Select, ti *DBTableInfo,
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
if childSel.Type == qcode.STUnion {
|
|
||||||
rel, err := c.schema.GetRel(childSel.Name, ti.Name)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
io.WriteString(c.w, `(CASE `)
|
|
||||||
for _, uid := range childSel.Children {
|
|
||||||
unionSel := &c.s[uid]
|
|
||||||
|
|
||||||
io.WriteString(c.w, `WHEN `)
|
|
||||||
colWithTableID(c.w, ti.Name, sel.ID, rel.Right.Table)
|
|
||||||
io.WriteString(c.w, ` = `)
|
|
||||||
squoted(c.w, unionSel.Name)
|
|
||||||
io.WriteString(c.w, ` THEN `)
|
|
||||||
io.WriteString(c.w, `"__sj_`)
|
|
||||||
int32String(c.w, unionSel.ID)
|
|
||||||
io.WriteString(c.w, `"."json"`)
|
|
||||||
}
|
|
||||||
io.WriteString(c.w, `END)`)
|
|
||||||
alias(c.w, childSel.FieldName)
|
|
||||||
|
|
||||||
} else {
|
|
||||||
io.WriteString(c.w, `"__sj_`)
|
io.WriteString(c.w, `"__sj_`)
|
||||||
int32String(c.w, childSel.ID)
|
int32String(c.w, childSel.ID)
|
||||||
io.WriteString(c.w, `"."json"`)
|
io.WriteString(c.w, `"."json"`)
|
||||||
alias(c.w, childSel.FieldName)
|
alias(c.w, childSel.FieldName)
|
||||||
}
|
|
||||||
|
|
||||||
if childSel.Paging.Type != qcode.PtOffset {
|
if childSel.Paging.Type != qcode.PtOffset {
|
||||||
io.WriteString(c.w, `, "__sj_`)
|
io.WriteString(c.w, `, "__sj_`)
|
||||||
@ -741,8 +699,7 @@ func (c *compilerContext) renderBaseSelect(sel *qcode.Select, ti *DBTableInfo, r
|
|||||||
}
|
}
|
||||||
|
|
||||||
io.WriteString(c.w, ` WHERE (`)
|
io.WriteString(c.w, ` WHERE (`)
|
||||||
|
if err := c.renderRelationship(sel, ti); err != nil {
|
||||||
if err := c.renderRelationship(sel, rel); err != nil {
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if isFil {
|
if isFil {
|
||||||
@ -774,7 +731,7 @@ func (c *compilerContext) renderBaseSelect(sel *qcode.Select, ti *DBTableInfo, r
|
|||||||
case ti.IsSingular:
|
case ti.IsSingular:
|
||||||
io.WriteString(c.w, ` LIMIT ('1') :: integer`)
|
io.WriteString(c.w, ` LIMIT ('1') :: integer`)
|
||||||
|
|
||||||
case sel.Paging.Limit != "":
|
case len(sel.Paging.Limit) != 0:
|
||||||
//fmt.Fprintf(w, ` LIMIT ('%s') :: integer`, c.sel.Paging.Limit)
|
//fmt.Fprintf(w, ` LIMIT ('%s') :: integer`, c.sel.Paging.Limit)
|
||||||
io.WriteString(c.w, ` LIMIT ('`)
|
io.WriteString(c.w, ` LIMIT ('`)
|
||||||
io.WriteString(c.w, sel.Paging.Limit)
|
io.WriteString(c.w, sel.Paging.Limit)
|
||||||
@ -787,7 +744,7 @@ func (c *compilerContext) renderBaseSelect(sel *qcode.Select, ti *DBTableInfo, r
|
|||||||
io.WriteString(c.w, ` LIMIT ('20') :: integer`)
|
io.WriteString(c.w, ` LIMIT ('20') :: integer`)
|
||||||
}
|
}
|
||||||
|
|
||||||
if sel.Paging.Offset != "" {
|
if len(sel.Paging.Offset) != 0 {
|
||||||
//fmt.Fprintf(w, ` OFFSET ('%s') :: integer`, c.sel.Paging.Offset)
|
//fmt.Fprintf(w, ` OFFSET ('%s') :: integer`, c.sel.Paging.Offset)
|
||||||
io.WriteString(c.w, ` OFFSET ('`)
|
io.WriteString(c.w, ` OFFSET ('`)
|
||||||
io.WriteString(c.w, sel.Paging.Offset)
|
io.WriteString(c.w, sel.Paging.Offset)
|
||||||
@ -856,24 +813,21 @@ func (c *compilerContext) renderCursorCTE(sel *qcode.Select) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *compilerContext) renderRelationshipByName(table, parent string) error {
|
func (c *compilerContext) renderRelationship(sel *qcode.Select, ti *DBTableInfo) error {
|
||||||
rel, err := c.schema.GetRel(table, parent)
|
parent := c.s[sel.ParentID]
|
||||||
|
|
||||||
|
pti, err := c.schema.GetTable(parent.Name)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
return c.renderRelationship(nil, rel)
|
|
||||||
|
return c.renderRelationshipByName(ti.Name, pti.Name, parent.ID)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *compilerContext) renderRelationship(sel *qcode.Select, rel *DBRel) error {
|
func (c *compilerContext) renderRelationshipByName(table, parent string, id int32) error {
|
||||||
var pid int32
|
rel, err := c.schema.GetRel(table, parent)
|
||||||
|
if err != nil {
|
||||||
switch {
|
return err
|
||||||
case sel == nil:
|
|
||||||
pid = int32(-1)
|
|
||||||
case sel.Type == qcode.STMember:
|
|
||||||
pid = sel.UParentID
|
|
||||||
default:
|
|
||||||
pid = sel.ParentID
|
|
||||||
}
|
}
|
||||||
|
|
||||||
io.WriteString(c.w, `((`)
|
io.WriteString(c.w, `((`)
|
||||||
@ -886,19 +840,19 @@ func (c *compilerContext) renderRelationship(sel *qcode.Select, rel *DBRel) erro
|
|||||||
|
|
||||||
switch {
|
switch {
|
||||||
case !rel.Left.Array && rel.Right.Array:
|
case !rel.Left.Array && rel.Right.Array:
|
||||||
colWithTable(c.w, rel.Left.Table, rel.Left.Col)
|
colWithTable(c.w, table, rel.Left.Col)
|
||||||
io.WriteString(c.w, `) = any (`)
|
io.WriteString(c.w, `) = any (`)
|
||||||
colWithTableID(c.w, rel.Right.Table, pid, rel.Right.Col)
|
colWithTableID(c.w, parent, id, rel.Right.Col)
|
||||||
|
|
||||||
case rel.Left.Array && !rel.Right.Array:
|
case rel.Left.Array && !rel.Right.Array:
|
||||||
colWithTableID(c.w, rel.Right.Table, pid, rel.Right.Col)
|
colWithTableID(c.w, parent, id, rel.Right.Col)
|
||||||
io.WriteString(c.w, `) = any (`)
|
io.WriteString(c.w, `) = any (`)
|
||||||
colWithTable(c.w, rel.Left.Table, rel.Left.Col)
|
colWithTable(c.w, table, rel.Left.Col)
|
||||||
|
|
||||||
default:
|
default:
|
||||||
colWithTable(c.w, rel.Left.Table, rel.Left.Col)
|
colWithTable(c.w, table, rel.Left.Col)
|
||||||
io.WriteString(c.w, `) = (`)
|
io.WriteString(c.w, `) = (`)
|
||||||
colWithTableID(c.w, rel.Right.Table, pid, rel.Right.Col)
|
colWithTableID(c.w, parent, id, rel.Right.Col)
|
||||||
}
|
}
|
||||||
|
|
||||||
case RelOneToManyThrough:
|
case RelOneToManyThrough:
|
||||||
@ -908,34 +862,25 @@ func (c *compilerContext) renderRelationship(sel *qcode.Select, rel *DBRel) erro
|
|||||||
|
|
||||||
switch {
|
switch {
|
||||||
case !rel.Left.Array && rel.Right.Array:
|
case !rel.Left.Array && rel.Right.Array:
|
||||||
colWithTable(c.w, rel.Left.Table, rel.Left.Col)
|
colWithTable(c.w, table, rel.Left.Col)
|
||||||
io.WriteString(c.w, `) = any (`)
|
io.WriteString(c.w, `) = any (`)
|
||||||
colWithTable(c.w, rel.Through.Table, rel.Through.ColR)
|
colWithTable(c.w, rel.Through, rel.Right.Col)
|
||||||
|
|
||||||
case rel.Left.Array && !rel.Right.Array:
|
case rel.Left.Array && !rel.Right.Array:
|
||||||
colWithTable(c.w, rel.Through.Table, rel.Through.ColR)
|
colWithTable(c.w, rel.Through, rel.Right.Col)
|
||||||
io.WriteString(c.w, `) = any (`)
|
io.WriteString(c.w, `) = any (`)
|
||||||
colWithTable(c.w, rel.Left.Table, rel.Left.Col)
|
colWithTable(c.w, table, rel.Left.Col)
|
||||||
|
|
||||||
default:
|
default:
|
||||||
colWithTable(c.w, rel.Through.Table, rel.Through.ColR)
|
colWithTable(c.w, table, rel.Left.Col)
|
||||||
io.WriteString(c.w, `) = (`)
|
io.WriteString(c.w, `) = (`)
|
||||||
colWithTable(c.w, rel.Right.Table, rel.Right.Col)
|
colWithTable(c.w, rel.Through, rel.Right.Col)
|
||||||
}
|
}
|
||||||
|
|
||||||
case RelEmbedded:
|
case RelEmbedded:
|
||||||
colWithTable(c.w, rel.Left.Table, rel.Left.Col)
|
colWithTable(c.w, rel.Left.Table, rel.Left.Col)
|
||||||
io.WriteString(c.w, `) = (`)
|
io.WriteString(c.w, `) = (`)
|
||||||
colWithTableID(c.w, rel.Left.Table, pid, rel.Left.Col)
|
colWithTableID(c.w, parent, id, rel.Left.Col)
|
||||||
|
|
||||||
case RelPolymorphic:
|
|
||||||
colWithTable(c.w, sel.Name, rel.Right.Col)
|
|
||||||
io.WriteString(c.w, `) = (`)
|
|
||||||
colWithTableID(c.w, rel.Left.Table, pid, rel.Left.Col)
|
|
||||||
io.WriteString(c.w, `) AND (`)
|
|
||||||
colWithTableID(c.w, rel.Left.Table, pid, rel.Right.Table)
|
|
||||||
io.WriteString(c.w, `) = (`)
|
|
||||||
squoted(c.w, sel.Name)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
io.WriteString(c.w, `))`)
|
io.WriteString(c.w, `))`)
|
||||||
@ -1011,10 +956,13 @@ func (c *compilerContext) renderExp(ex *qcode.Exp, ti *DBTableInfo, skipNested b
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
} else if err := c.renderOp(val, ti); err != nil {
|
} else {
|
||||||
|
//fmt.Fprintf(w, `(("%s"."%s") `, c.sel.Name, val.Col)
|
||||||
|
if err := c.renderOp(val, ti); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
//qcode.FreeExp(val)
|
//qcode.FreeExp(val)
|
||||||
|
|
||||||
default:
|
default:
|
||||||
@ -1045,7 +993,7 @@ func (c *compilerContext) renderNestedWhere(ex *qcode.Exp, ti *DBTableInfo) erro
|
|||||||
|
|
||||||
io.WriteString(c.w, ` WHERE `)
|
io.WriteString(c.w, ` WHERE `)
|
||||||
|
|
||||||
if err := c.renderRelationshipByName(cti.Name, ti.Name); err != nil {
|
if err := c.renderRelationshipByName(cti.Name, ti.Name, -1); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1074,7 +1022,7 @@ func (c *compilerContext) renderOp(ex *qcode.Exp, ti *DBTableInfo) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if ex.Col != "" {
|
if len(ex.Col) != 0 {
|
||||||
if col, ok = ti.ColMap[ex.Col]; !ok {
|
if col, ok = ti.ColMap[ex.Col]; !ok {
|
||||||
return fmt.Errorf("no column '%s' found ", ex.Col)
|
return fmt.Errorf("no column '%s' found ", ex.Col)
|
||||||
}
|
}
|
||||||
@ -1314,7 +1262,7 @@ func funcPrefixLen(fm map[string]*DBFunction, fn string) int {
|
|||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
func hasBit(n, pos uint32) bool {
|
func hasBit(n uint32, pos uint32) bool {
|
||||||
val := n & (1 << pos)
|
val := n & (1 << pos)
|
||||||
return (val > 0)
|
return (val > 0)
|
||||||
}
|
}
|
||||||
|
@ -381,26 +381,6 @@ func withFragment3(t *testing.T) {
|
|||||||
compileGQLToPSQL(t, gql, nil, "anon")
|
compileGQLToPSQL(t, gql, nil, "anon")
|
||||||
}
|
}
|
||||||
|
|
||||||
// func withInlineFragment(t *testing.T) {
|
|
||||||
// gql := `
|
|
||||||
// query {
|
|
||||||
// users {
|
|
||||||
// ... on users {
|
|
||||||
// id
|
|
||||||
// email
|
|
||||||
// }
|
|
||||||
// created_at
|
|
||||||
// ... on user {
|
|
||||||
// first_name
|
|
||||||
// last_name
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// `
|
|
||||||
|
|
||||||
// compileGQLToPSQL(t, gql, nil, "anon")
|
|
||||||
// }
|
|
||||||
|
|
||||||
func withCursor(t *testing.T) {
|
func withCursor(t *testing.T) {
|
||||||
gql := `query {
|
gql := `query {
|
||||||
Products(
|
Products(
|
||||||
@ -497,7 +477,6 @@ func TestCompileQuery(t *testing.T) {
|
|||||||
t.Run("withFragment1", withFragment1)
|
t.Run("withFragment1", withFragment1)
|
||||||
t.Run("withFragment2", withFragment2)
|
t.Run("withFragment2", withFragment2)
|
||||||
t.Run("withFragment3", withFragment3)
|
t.Run("withFragment3", withFragment3)
|
||||||
//t.Run("withInlineFragment", withInlineFragment)
|
|
||||||
t.Run("jsonColumnAsTable", jsonColumnAsTable)
|
t.Run("jsonColumnAsTable", jsonColumnAsTable)
|
||||||
t.Run("withCursor", withCursor)
|
t.Run("withCursor", withCursor)
|
||||||
t.Run("nullForAuthRequiredInAnon", nullForAuthRequiredInAnon)
|
t.Run("nullForAuthRequiredInAnon", nullForAuthRequiredInAnon)
|
||||||
|
@ -11,7 +11,6 @@ type DBSchema struct {
|
|||||||
ver int
|
ver int
|
||||||
t map[string]*DBTableInfo
|
t map[string]*DBTableInfo
|
||||||
rm map[string]map[string]*DBRel
|
rm map[string]map[string]*DBRel
|
||||||
vt map[string]*VirtualTable
|
|
||||||
fm map[string]*DBFunction
|
fm map[string]*DBFunction
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -34,18 +33,14 @@ const (
|
|||||||
RelOneToOne RelType = iota + 1
|
RelOneToOne RelType = iota + 1
|
||||||
RelOneToMany
|
RelOneToMany
|
||||||
RelOneToManyThrough
|
RelOneToManyThrough
|
||||||
RelPolymorphic
|
|
||||||
RelEmbedded
|
RelEmbedded
|
||||||
RelRemote
|
RelRemote
|
||||||
)
|
)
|
||||||
|
|
||||||
type DBRel struct {
|
type DBRel struct {
|
||||||
Type RelType
|
Type RelType
|
||||||
Through struct {
|
Through string
|
||||||
Table string
|
ColT string
|
||||||
ColL string
|
|
||||||
ColR string
|
|
||||||
}
|
|
||||||
Left struct {
|
Left struct {
|
||||||
col *DBColumn
|
col *DBColumn
|
||||||
Table string
|
Table string
|
||||||
@ -65,7 +60,6 @@ func NewDBSchema(info *DBInfo, aliases map[string][]string) (*DBSchema, error) {
|
|||||||
ver: info.Version,
|
ver: info.Version,
|
||||||
t: make(map[string]*DBTableInfo),
|
t: make(map[string]*DBTableInfo),
|
||||||
rm: make(map[string]map[string]*DBRel),
|
rm: make(map[string]map[string]*DBRel),
|
||||||
vt: make(map[string]*VirtualTable),
|
|
||||||
fm: make(map[string]*DBFunction, len(info.Functions)),
|
fm: make(map[string]*DBFunction, len(info.Functions)),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -76,10 +70,6 @@ func NewDBSchema(info *DBInfo, aliases map[string][]string) (*DBSchema, error) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := schema.virtualRels(info.VTables); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
for i, t := range info.Tables {
|
for i, t := range info.Tables {
|
||||||
err := schema.firstDegreeRels(t, info.Columns[i])
|
err := schema.firstDegreeRels(t, info.Columns[i])
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -112,7 +102,7 @@ func (s *DBSchema) addTable(
|
|||||||
singular := flect.Singularize(t.Key)
|
singular := flect.Singularize(t.Key)
|
||||||
plural := flect.Pluralize(t.Key)
|
plural := flect.Pluralize(t.Key)
|
||||||
|
|
||||||
ts := &DBTableInfo{
|
s.t[singular] = &DBTableInfo{
|
||||||
Name: t.Name,
|
Name: t.Name,
|
||||||
Type: t.Type,
|
Type: t.Type,
|
||||||
IsSingular: true,
|
IsSingular: true,
|
||||||
@ -122,9 +112,8 @@ func (s *DBSchema) addTable(
|
|||||||
Singular: singular,
|
Singular: singular,
|
||||||
Plural: plural,
|
Plural: plural,
|
||||||
}
|
}
|
||||||
s.t[singular] = ts
|
|
||||||
|
|
||||||
tp := &DBTableInfo{
|
s.t[plural] = &DBTableInfo{
|
||||||
Name: t.Name,
|
Name: t.Name,
|
||||||
Type: t.Type,
|
Type: t.Type,
|
||||||
IsSingular: false,
|
IsSingular: false,
|
||||||
@ -134,15 +123,14 @@ func (s *DBSchema) addTable(
|
|||||||
Singular: singular,
|
Singular: singular,
|
||||||
Plural: plural,
|
Plural: plural,
|
||||||
}
|
}
|
||||||
s.t[plural] = tp
|
|
||||||
|
|
||||||
if al, ok := aliases[t.Key]; ok {
|
if al, ok := aliases[t.Key]; ok {
|
||||||
for i := range al {
|
for i := range al {
|
||||||
k1 := flect.Singularize(al[i])
|
k1 := flect.Singularize(al[i])
|
||||||
s.t[k1] = ts
|
s.t[k1] = s.t[singular]
|
||||||
|
|
||||||
k2 := flect.Pluralize(al[i])
|
k2 := flect.Pluralize(al[i])
|
||||||
s.t[k2] = tp
|
s.t[k2] = s.t[plural]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -166,54 +154,6 @@ func (s *DBSchema) addTable(
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *DBSchema) virtualRels(vts []VirtualTable) error {
|
|
||||||
for _, vt := range vts {
|
|
||||||
s.vt[vt.Name] = &vt
|
|
||||||
|
|
||||||
for _, t := range s.t {
|
|
||||||
idCol, ok := t.ColMap[vt.IDColumn]
|
|
||||||
if !ok {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if _, ok = t.ColMap[vt.TypeColumn]; !ok {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
nt := DBTable{
|
|
||||||
ID: -1,
|
|
||||||
Name: vt.Name,
|
|
||||||
Key: strings.ToLower(vt.Name),
|
|
||||||
Type: "virtual",
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := s.addTable(nt, nil, nil); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
rel := &DBRel{Type: RelPolymorphic}
|
|
||||||
rel.Left.col = idCol
|
|
||||||
rel.Left.Table = t.Name
|
|
||||||
rel.Left.Col = idCol.Name
|
|
||||||
|
|
||||||
rcol := DBColumn{
|
|
||||||
Name: vt.FKeyColumn,
|
|
||||||
Key: strings.ToLower(vt.FKeyColumn),
|
|
||||||
Type: idCol.Type,
|
|
||||||
}
|
|
||||||
|
|
||||||
rel.Right.col = &rcol
|
|
||||||
rel.Right.Table = vt.TypeColumn
|
|
||||||
rel.Right.Col = rcol.Name
|
|
||||||
|
|
||||||
if err := s.SetRel(vt.Name, t.Name, rel); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *DBSchema) firstDegreeRels(t DBTable, cols []DBColumn) error {
|
func (s *DBSchema) firstDegreeRels(t DBTable, cols []DBColumn) error {
|
||||||
ct := t.Key
|
ct := t.Key
|
||||||
cti, ok := s.t[ct]
|
cti, ok := s.t[ct]
|
||||||
@ -224,7 +164,7 @@ func (s *DBSchema) firstDegreeRels(t DBTable, cols []DBColumn) error {
|
|||||||
for i := range cols {
|
for i := range cols {
|
||||||
c := cols[i]
|
c := cols[i]
|
||||||
|
|
||||||
if c.FKeyTable == "" {
|
if len(c.FKeyTable) == 0 {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -328,7 +268,7 @@ func (s *DBSchema) secondDegreeRels(t DBTable, cols []DBColumn) error {
|
|||||||
for i := range cols {
|
for i := range cols {
|
||||||
c := cols[i]
|
c := cols[i]
|
||||||
|
|
||||||
if c.FKeyTable == "" {
|
if len(c.FKeyTable) == 0 {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -404,17 +344,16 @@ func (s *DBSchema) updateSchemaOTMT(
|
|||||||
// One-to-many-through relation between 1nd foreign key table and the
|
// One-to-many-through relation between 1nd foreign key table and the
|
||||||
// 2nd foreign key table
|
// 2nd foreign key table
|
||||||
rel1 := &DBRel{Type: RelOneToManyThrough}
|
rel1 := &DBRel{Type: RelOneToManyThrough}
|
||||||
rel1.Through.Table = ti.Name
|
rel1.Through = ti.Name
|
||||||
rel1.Through.ColL = col1.Name
|
rel1.ColT = col2.Name
|
||||||
rel1.Through.ColR = col2.Name
|
|
||||||
|
|
||||||
rel1.Left.col = fc1
|
rel1.Left.col = &col2
|
||||||
rel1.Left.Table = col1.FKeyTable
|
rel1.Left.Table = col2.FKeyTable
|
||||||
rel1.Left.Col = fc1.Name
|
rel1.Left.Col = fc2.Name
|
||||||
|
|
||||||
rel1.Right.col = fc2
|
rel1.Right.col = &col1
|
||||||
rel1.Right.Table = t2
|
rel1.Right.Table = ti.Name
|
||||||
rel1.Right.Col = fc2.Name
|
rel1.Right.Col = col1.Name
|
||||||
|
|
||||||
if err := s.SetRel(t1, t2, rel1); err != nil {
|
if err := s.SetRel(t1, t2, rel1); err != nil {
|
||||||
return err
|
return err
|
||||||
@ -423,17 +362,16 @@ func (s *DBSchema) updateSchemaOTMT(
|
|||||||
// One-to-many-through relation between 2nd foreign key table and the
|
// One-to-many-through relation between 2nd foreign key table and the
|
||||||
// 1nd foreign key table
|
// 1nd foreign key table
|
||||||
rel2 := &DBRel{Type: RelOneToManyThrough}
|
rel2 := &DBRel{Type: RelOneToManyThrough}
|
||||||
rel2.Through.Table = ti.Name
|
rel2.Through = ti.Name
|
||||||
rel2.Through.ColL = col2.Name
|
rel2.ColT = col1.Name
|
||||||
rel2.Through.ColR = col1.Name
|
|
||||||
|
|
||||||
rel2.Left.col = fc2
|
rel1.Left.col = fc1
|
||||||
rel2.Left.Table = col2.FKeyTable
|
rel2.Left.Table = col1.FKeyTable
|
||||||
rel2.Left.Col = fc2.Name
|
rel2.Left.Col = fc1.Name
|
||||||
|
|
||||||
rel2.Right.col = fc1
|
rel1.Right.col = &col2
|
||||||
rel2.Right.Table = t1
|
rel2.Right.Table = ti.Name
|
||||||
rel2.Right.Col = fc1.Name
|
rel2.Right.Col = col2.Name
|
||||||
|
|
||||||
if err := s.SetRel(t2, t1, rel2); err != nil {
|
if err := s.SetRel(t2, t1, rel2); err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -14,18 +14,14 @@ func (rt RelType) String() string {
|
|||||||
return "remote"
|
return "remote"
|
||||||
case RelEmbedded:
|
case RelEmbedded:
|
||||||
return "embedded"
|
return "embedded"
|
||||||
case RelPolymorphic:
|
|
||||||
return "polymorphic"
|
|
||||||
}
|
}
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
func (re *DBRel) String() string {
|
func (re *DBRel) String() string {
|
||||||
if re.Type == RelOneToManyThrough {
|
if re.Type == RelOneToManyThrough {
|
||||||
return fmt.Sprintf("'%s.%s' --(%s.%s, %s.%s)--> '%s.%s'",
|
return fmt.Sprintf("'%s.%s' --(Through: %s)--> '%s.%s'",
|
||||||
re.Left.Table, re.Left.Col,
|
re.Left.Table, re.Left.Col, re.Through, re.Right.Table, re.Right.Col)
|
||||||
re.Through.Table, re.Through.ColL, re.Through.Table, re.Through.ColR,
|
|
||||||
re.Right.Table, re.Right.Col)
|
|
||||||
}
|
}
|
||||||
return fmt.Sprintf("'%s.%s' --(%s)--> '%s.%s'",
|
return fmt.Sprintf("'%s.%s' --(%s)--> '%s.%s'",
|
||||||
re.Left.Table, re.Left.Col, re.Type, re.Right.Table, re.Right.Col)
|
re.Left.Table, re.Left.Col, re.Type, re.Right.Table, re.Right.Col)
|
||||||
|
@ -14,17 +14,9 @@ type DBInfo struct {
|
|||||||
Tables []DBTable
|
Tables []DBTable
|
||||||
Columns [][]DBColumn
|
Columns [][]DBColumn
|
||||||
Functions []DBFunction
|
Functions []DBFunction
|
||||||
VTables []VirtualTable
|
|
||||||
colMap map[string]map[string]*DBColumn
|
colMap map[string]map[string]*DBColumn
|
||||||
}
|
}
|
||||||
|
|
||||||
type VirtualTable struct {
|
|
||||||
Name string
|
|
||||||
IDColumn string
|
|
||||||
TypeColumn string
|
|
||||||
FKeyColumn string
|
|
||||||
}
|
|
||||||
|
|
||||||
func GetDBInfo(db *sql.DB, schema string) (*DBInfo, error) {
|
func GetDBInfo(db *sql.DB, schema string) (*DBInfo, error) {
|
||||||
di := &DBInfo{}
|
di := &DBInfo{}
|
||||||
var version string
|
var version string
|
||||||
|
@ -8,8 +8,8 @@ WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (INSERT INTO "produc
|
|||||||
=== RUN TestCompileInsert/simpleInsertWithPresets
|
=== RUN TestCompileInsert/simpleInsertWithPresets
|
||||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), 'now' :: timestamp without time zone, 'now' :: timestamp without time zone, $2 :: bigint FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id" FROM (SELECT "products"."id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), 'now' :: timestamp without time zone, 'now' :: timestamp without time zone, $2 :: bigint FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id" FROM (SELECT "products"."id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileInsert/nestedInsertManyToMany
|
=== RUN TestCompileInsert/nestedInsertManyToMany
|
||||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (INSERT INTO "products" ("name", "price") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i RETURNING *), "customers" AS (INSERT INTO "customers" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *), "purchases" AS (INSERT INTO "purchases" ("sale_type", "quantity", "due_date", "customer_id", "product_id") SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone), "customers"."id", "products"."id" FROM "_sg_input" i, "customers", "products" RETURNING *) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
|
||||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "customers" AS (INSERT INTO "customers" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i RETURNING *), "purchases" AS (INSERT INTO "purchases" ("sale_type", "quantity", "due_date", "product_id", "customer_id") SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone), "products"."id", "customers"."id" FROM "_sg_input" i, "products", "customers" RETURNING *) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "customers" AS (INSERT INTO "customers" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i RETURNING *), "purchases" AS (INSERT INTO "purchases" ("sale_type", "quantity", "due_date", "product_id", "customer_id") SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone), "products"."id", "customers"."id" FROM "_sg_input" i, "products", "customers" RETURNING *) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (INSERT INTO "products" ("name", "price") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i RETURNING *), "customers" AS (INSERT INTO "customers" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *), "purchases" AS (INSERT INTO "purchases" ("sale_type", "quantity", "due_date", "customer_id", "product_id") SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone), "customers"."id", "products"."id" FROM "_sg_input" i, "customers", "products" RETURNING *) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileInsert/nestedInsertOneToMany
|
=== RUN TestCompileInsert/nestedInsertOneToMany
|
||||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "users"."id" FROM "_sg_input" i, "users" RETURNING *) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "users"."id" FROM "_sg_input" i, "users" RETURNING *) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileInsert/nestedInsertOneToOne
|
=== RUN TestCompileInsert/nestedInsertOneToOne
|
||||||
@ -20,7 +20,7 @@ WITH "_sg_input" AS (SELECT $1 :: json AS j), "users" AS (INSERT INTO "users" ("
|
|||||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint LIMIT 1), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "_x_users"."id" FROM "_sg_input" i, "_x_users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user", "__sj_2"."json" AS "tags" FROM (SELECT "products"."id", "products"."name", "products"."user_id", "products"."tags" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "tags_2"."id" AS "id", "tags_2"."name" AS "name" FROM (SELECT "tags"."id", "tags"."name" FROM "tags" WHERE ((("tags"."slug") = any ("products_0"."tags"))) LIMIT ('20') :: integer) AS "tags_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint LIMIT 1), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "_x_users"."id" FROM "_sg_input" i, "_x_users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user", "__sj_2"."json" AS "tags" FROM (SELECT "products"."id", "products"."name", "products"."user_id", "products"."tags" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "tags_2"."id" AS "id", "tags_2"."name" AS "name" FROM (SELECT "tags"."id", "tags"."name" FROM "tags" WHERE ((("tags"."slug") = any ("products_0"."tags"))) LIMIT ('20') :: integer) AS "tags_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileInsert/nestedInsertOneToOneWithConnectArray
|
=== RUN TestCompileInsert/nestedInsertOneToOneWithConnectArray
|
||||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id" = ANY((select a::bigint AS list from json_array_elements_text((i.j->'user'->'connect'->>'id')::json) AS a)) LIMIT 1), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "_x_users"."id" FROM "_sg_input" i, "_x_users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id" = ANY((select a::bigint AS list from json_array_elements_text((i.j->'user'->'connect'->>'id')::json) AS a)) LIMIT 1), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "_x_users"."id" FROM "_sg_input" i, "_x_users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
--- PASS: TestCompileInsert (0.03s)
|
--- PASS: TestCompileInsert (0.02s)
|
||||||
--- PASS: TestCompileInsert/simpleInsert (0.00s)
|
--- PASS: TestCompileInsert/simpleInsert (0.00s)
|
||||||
--- PASS: TestCompileInsert/singleInsert (0.00s)
|
--- PASS: TestCompileInsert/singleInsert (0.00s)
|
||||||
--- PASS: TestCompileInsert/bulkInsert (0.00s)
|
--- PASS: TestCompileInsert/bulkInsert (0.00s)
|
||||||
@ -67,9 +67,9 @@ SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT
|
|||||||
=== RUN TestCompileQuery/oneToManyArray
|
=== RUN TestCompileQuery/oneToManyArray
|
||||||
SELECT jsonb_build_object('tags', "__sj_0"."json", 'product', "__sj_2"."json") as "__root" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "products_2"."name" AS "name", "products_2"."price" AS "price", "__sj_3"."json" AS "tags" FROM (SELECT "products"."name", "products"."price", "products"."tags" FROM "products" LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_3"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "tags_3"."id" AS "id", "tags_3"."name" AS "name" FROM (SELECT "tags"."id", "tags"."name" FROM "tags" WHERE ((("tags"."slug") = any ("products_2"."tags"))) LIMIT ('20') :: integer) AS "tags_3") AS "__sr_3") AS "__sj_3") AS "__sj_3" ON ('true')) AS "__sr_2") AS "__sj_2", (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "tags_0"."name" AS "name", "__sj_1"."json" AS "product" FROM (SELECT "tags"."name", "tags"."slug" FROM "tags" LIMIT ('20') :: integer) AS "tags_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."name" AS "name" FROM (SELECT "products"."name" FROM "products" WHERE ((("tags_0"."slug") = any ("products"."tags"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('tags', "__sj_0"."json", 'product', "__sj_2"."json") as "__root" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "products_2"."name" AS "name", "products_2"."price" AS "price", "__sj_3"."json" AS "tags" FROM (SELECT "products"."name", "products"."price", "products"."tags" FROM "products" LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_3"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "tags_3"."id" AS "id", "tags_3"."name" AS "name" FROM (SELECT "tags"."id", "tags"."name" FROM "tags" WHERE ((("tags"."slug") = any ("products_2"."tags"))) LIMIT ('20') :: integer) AS "tags_3") AS "__sr_3") AS "__sj_3") AS "__sj_3" ON ('true')) AS "__sr_2") AS "__sj_2", (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "tags_0"."name" AS "name", "__sj_1"."json" AS "product" FROM (SELECT "tags"."name", "tags"."slug" FROM "tags" LIMIT ('20') :: integer) AS "tags_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."name" AS "name" FROM (SELECT "products"."name" FROM "products" WHERE ((("tags_0"."slug") = any ("products"."tags"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/manyToMany
|
=== RUN TestCompileQuery/manyToMany
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."name" AS "name", "__sj_1"."json" AS "customers" FROM (SELECT "products"."name", "products"."id" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "customers_1"."email" AS "email", "customers_1"."full_name" AS "full_name" FROM (SELECT "customers"."email", "customers"."full_name" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."customer_id") = ("customers"."id")) WHERE ((("purchases"."product_id") = ("products"."id"))) LIMIT ('20') :: integer) AS "customers_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."name" AS "name", "__sj_1"."json" AS "customers" FROM (SELECT "products"."name", "products"."id" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "customers_1"."email" AS "email", "customers_1"."full_name" AS "full_name" FROM (SELECT "customers"."email", "customers"."full_name" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_0"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/manyToManyReverse
|
=== RUN TestCompileQuery/manyToManyReverse
|
||||||
SELECT jsonb_build_object('customers', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "customers_0"."email" AS "email", "customers_0"."full_name" AS "full_name", "__sj_1"."json" AS "products" FROM (SELECT "customers"."email", "customers"."full_name", "customers"."id" FROM "customers" LIMIT ('20') :: integer) AS "customers_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."name" AS "name" FROM (SELECT "products"."name" FROM "products" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products"."id")) WHERE ((("purchases"."customer_id") = ("customers"."id")) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('customers', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "customers_0"."email" AS "email", "customers_0"."full_name" AS "full_name", "__sj_1"."json" AS "products" FROM (SELECT "customers"."email", "customers"."full_name", "customers"."id" FROM "customers" LIMIT ('20') :: integer) AS "customers_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."name" AS "name" FROM (SELECT "products"."name" FROM "products" LEFT OUTER JOIN "purchases" ON (("purchases"."customer_id") = ("customers_0"."id")) WHERE ((("products"."id") = ("purchases"."product_id")) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/aggFunction
|
=== RUN TestCompileQuery/aggFunction
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."name" AS "name", "products_0"."count_price" AS "count_price" FROM (SELECT "products"."name", count("products"."price") AS "count_price" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) GROUP BY "products"."name" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."name" AS "name", "products_0"."count_price" AS "count_price" FROM (SELECT "products"."name", count("products"."price") AS "count_price" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) GROUP BY "products"."name" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/aggFunctionBlockedByCol
|
=== RUN TestCompileQuery/aggFunctionBlockedByCol
|
||||||
@ -85,7 +85,7 @@ SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT t
|
|||||||
=== RUN TestCompileQuery/withWhereOnRelations
|
=== RUN TestCompileQuery/withWhereOnRelations
|
||||||
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" WHERE (NOT EXISTS (SELECT 1 FROM products WHERE (("products"."user_id") = ("users"."id")) AND ((("products"."price") > '3' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" WHERE (NOT EXISTS (SELECT 1 FROM products WHERE (("products"."user_id") = ("users"."id")) AND ((("products"."price") > '3' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/multiRoot
|
=== RUN TestCompileQuery/multiRoot
|
||||||
SELECT jsonb_build_object('customer', "__sj_0"."json", 'user', "__sj_1"."json", 'product', "__sj_2"."json") as "__root" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "products_2"."id" AS "id", "products_2"."name" AS "name", "__sj_3"."json" AS "customers", "__sj_4"."json" AS "customer" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_4".*) AS "json"FROM (SELECT "customers_4"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."customer_id") = ("customers"."id")) WHERE ((("purchases"."product_id") = ("products"."id"))) LIMIT ('1') :: integer) AS "customers_4") AS "__sr_4") AS "__sj_4" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_3"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "customers_3"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."customer_id") = ("customers"."id")) WHERE ((("purchases"."product_id") = ("products"."id"))) LIMIT ('20') :: integer) AS "customers_3") AS "__sr_3") AS "__sj_3") AS "__sj_3" ON ('true')) AS "__sr_2") AS "__sj_2", (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1", (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "customers_0"."id" AS "id" FROM (SELECT "customers"."id" FROM "customers" LIMIT ('1') :: integer) AS "customers_0") AS "__sr_0") AS "__sj_0"
|
SELECT jsonb_build_object('customer', "__sj_0"."json", 'user', "__sj_1"."json", 'product', "__sj_2"."json") as "__root" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "products_2"."id" AS "id", "products_2"."name" AS "name", "__sj_3"."json" AS "customers", "__sj_4"."json" AS "customer" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_4".*) AS "json"FROM (SELECT "customers_4"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_2"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('1') :: integer) AS "customers_4") AS "__sr_4") AS "__sj_4" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_3"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "customers_3"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_2"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_3") AS "__sr_3") AS "__sj_3") AS "__sj_3" ON ('true')) AS "__sr_2") AS "__sj_2", (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1", (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "customers_0"."id" AS "id" FROM (SELECT "customers"."id" FROM "customers" LIMIT ('1') :: integer) AS "customers_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/withFragment1
|
=== RUN TestCompileQuery/withFragment1
|
||||||
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."first_name" AS "first_name", "users_0"."last_name" AS "last_name", "users_0"."created_at" AS "created_at", "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT , "users"."created_at", "users"."id", "users"."email" FROM "users" GROUP BY "users"."created_at", "users"."id", "users"."email" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."first_name" AS "first_name", "users_0"."last_name" AS "last_name", "users_0"."created_at" AS "created_at", "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT , "users"."created_at", "users"."id", "users"."email" FROM "users" GROUP BY "users"."created_at", "users"."id", "users"."email" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/withFragment2
|
=== RUN TestCompileQuery/withFragment2
|
||||||
@ -146,8 +146,8 @@ WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (UPDATE "products" S
|
|||||||
=== RUN TestCompileUpdate/nestedUpdateOneToManyWithConnect
|
=== RUN TestCompileUpdate/nestedUpdateOneToManyWithConnect
|
||||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "users" AS (UPDATE "users" SET ("full_name", "email", "created_at", "updated_at") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("users"."id") = $2 :: bigint) RETURNING "users".*), "products_c" AS ( UPDATE "products" SET "user_id" = "users"."id" FROM "users" WHERE ("products"."id"= ((i.j->'product'->'connect'->>'id'))::bigint) RETURNING "products".*), "products_d" AS ( UPDATE "products" SET "user_id" = NULL FROM "users" WHERE ("products"."id"= ((i.j->'product'->'disconnect'->>'id'))::bigint) RETURNING "products".*), "products" AS (SELECT * FROM "products_c" UNION ALL SELECT * FROM "products_d") SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "users" AS (UPDATE "users" SET ("full_name", "email", "created_at", "updated_at") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("users"."id") = $2 :: bigint) RETURNING "users".*), "products_c" AS ( UPDATE "products" SET "user_id" = "users"."id" FROM "users" WHERE ("products"."id"= ((i.j->'product'->'connect'->>'id'))::bigint) RETURNING "products".*), "products_d" AS ( UPDATE "products" SET "user_id" = NULL FROM "users" WHERE ("products"."id"= ((i.j->'product'->'disconnect'->>'id'))::bigint) RETURNING "products".*), "products" AS (SELECT * FROM "products_c" UNION ALL SELECT * FROM "products_d") SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileUpdate/nestedUpdateOneToOneWithConnect
|
=== RUN TestCompileUpdate/nestedUpdateOneToOneWithConnect
|
||||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."email"= ((i.j->'user'->'connect'->>'email'))::character varying AND "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint LIMIT 1), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = $2 :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
|
||||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint AND "users"."email"= ((i.j->'user'->'connect'->>'email'))::character varying LIMIT 1), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = $2 :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint AND "users"."email"= ((i.j->'user'->'connect'->>'email'))::character varying LIMIT 1), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = $2 :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."email"= ((i.j->'user'->'connect'->>'email'))::character varying AND "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint LIMIT 1), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = $2 :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileUpdate/nestedUpdateOneToOneWithDisconnect
|
=== RUN TestCompileUpdate/nestedUpdateOneToOneWithDisconnect
|
||||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT * FROM (VALUES(NULL::bigint)) AS LOOKUP("id")), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = $2 :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."user_id" AS "user_id" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT * FROM (VALUES(NULL::bigint)) AS LOOKUP("id")), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = $2 :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."user_id" AS "user_id" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
--- PASS: TestCompileUpdate (0.02s)
|
--- PASS: TestCompileUpdate (0.02s)
|
||||||
@ -160,4 +160,4 @@ WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT * FROM (VALU
|
|||||||
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithConnect (0.00s)
|
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithConnect (0.00s)
|
||||||
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithDisconnect (0.00s)
|
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithDisconnect (0.00s)
|
||||||
PASS
|
PASS
|
||||||
ok github.com/dosco/super-graph/core/internal/psql 0.323s
|
ok github.com/dosco/super-graph/core/internal/psql 0.374s
|
||||||
|
@ -121,12 +121,14 @@ func (c *compilerContext) renderUpdateStmt(w io.Writer, qc *qcode.QCode, item re
|
|||||||
}
|
}
|
||||||
io.WriteString(w, `)`)
|
io.WriteString(w, `)`)
|
||||||
|
|
||||||
} else if qc.Selects[0].Where != nil {
|
} else {
|
||||||
|
if qc.Selects[0].Where != nil {
|
||||||
io.WriteString(w, `WHERE `)
|
io.WriteString(w, `WHERE `)
|
||||||
if err := c.renderWhere(&qc.Selects[0], ti); err != nil {
|
if err := c.renderWhere(&qc.Selects[0], ti); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
io.WriteString(w, ` RETURNING `)
|
io.WriteString(w, ` RETURNING `)
|
||||||
quoted(w, ti.Name)
|
quoted(w, ti.Name)
|
||||||
|
@ -1,13 +0,0 @@
|
|||||||
goos: darwin
|
|
||||||
goarch: amd64
|
|
||||||
pkg: github.com/dosco/super-graph/core/internal/qcode
|
|
||||||
BenchmarkQCompile-16 118282 9686 ns/op 4031 B/op 30 allocs/op
|
|
||||||
BenchmarkQCompileP-16 427531 2710 ns/op 4077 B/op 30 allocs/op
|
|
||||||
BenchmarkQCompileFragment-16 140588 8328 ns/op 8903 B/op 13 allocs/op
|
|
||||||
BenchmarkParse-16 131396 9212 ns/op 4175 B/op 18 allocs/op
|
|
||||||
BenchmarkParseP-16 503778 2310 ns/op 4176 B/op 18 allocs/op
|
|
||||||
BenchmarkParseFragment-16 143725 8158 ns/op 10193 B/op 9 allocs/op
|
|
||||||
BenchmarkSchemaParse-16 240609 5060 ns/op 3968 B/op 57 allocs/op
|
|
||||||
BenchmarkSchemaParseP-16 785116 1534 ns/op 3968 B/op 57 allocs/op
|
|
||||||
PASS
|
|
||||||
ok github.com/dosco/super-graph/core/internal/qcode 11.092s
|
|
@ -141,7 +141,8 @@ func (l *lexer) current() (Pos, Pos) {
|
|||||||
func (l *lexer) emit(t itemType) {
|
func (l *lexer) emit(t itemType) {
|
||||||
l.items = append(l.items, item{t, l.start, l.pos, l.line})
|
l.items = append(l.items, item{t, l.start, l.pos, l.line})
|
||||||
// Some items contain text internally. If so, count their newlines.
|
// Some items contain text internally. If so, count their newlines.
|
||||||
if t == itemStringVal {
|
switch t {
|
||||||
|
case itemStringVal:
|
||||||
for i := l.start; i < l.pos; i++ {
|
for i := l.start; i < l.pos; i++ {
|
||||||
if l.input[i] == '\n' {
|
if l.input[i] == '\n' {
|
||||||
l.line++
|
l.line++
|
||||||
@ -403,15 +404,15 @@ func isAlphaNumeric(r rune) bool {
|
|||||||
return r == '_' || unicode.IsLetter(r) || unicode.IsDigit(r)
|
return r == '_' || unicode.IsLetter(r) || unicode.IsDigit(r)
|
||||||
}
|
}
|
||||||
|
|
||||||
func equals(b []byte, s, e Pos, val []byte) bool {
|
func equals(b []byte, s Pos, e Pos, val []byte) bool {
|
||||||
return bytes.EqualFold(b[s:e], val)
|
return bytes.EqualFold(b[s:e], val)
|
||||||
}
|
}
|
||||||
|
|
||||||
func contains(b []byte, s, e Pos, chars string) bool {
|
func contains(b []byte, s Pos, e Pos, chars string) bool {
|
||||||
return bytes.ContainsAny(b[s:e], chars)
|
return bytes.ContainsAny(b[s:e], chars)
|
||||||
}
|
}
|
||||||
|
|
||||||
func lowercase(b []byte, s, e Pos) {
|
func lowercase(b []byte, s Pos, e Pos) {
|
||||||
for i := s; i < e; i++ {
|
for i := s; i < e; i++ {
|
||||||
if b[i] >= 'A' && b[i] <= 'Z' {
|
if b[i] >= 'A' && b[i] <= 'Z' {
|
||||||
b[i] = ('a' + (b[i] - 'A'))
|
b[i] = ('a' + (b[i] - 'A'))
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
package qcode
|
package qcode
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"encoding/binary"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"hash/maphash"
|
"hash/maphash"
|
||||||
@ -34,8 +35,7 @@ const (
|
|||||||
NodeVar
|
NodeVar
|
||||||
)
|
)
|
||||||
|
|
||||||
type Operation struct {
|
type SelectionSet struct {
|
||||||
Type parserType
|
|
||||||
Name string
|
Name string
|
||||||
Args []Arg
|
Args []Arg
|
||||||
argsA [10]Arg
|
argsA [10]Arg
|
||||||
@ -43,6 +43,11 @@ type Operation struct {
|
|||||||
fieldsA [10]Field
|
fieldsA [10]Field
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type Operation struct {
|
||||||
|
Type parserType
|
||||||
|
SelectionSet
|
||||||
|
}
|
||||||
|
|
||||||
var zeroOperation = Operation{}
|
var zeroOperation = Operation{}
|
||||||
|
|
||||||
func (o *Operation) Reset() {
|
func (o *Operation) Reset() {
|
||||||
@ -52,8 +57,7 @@ func (o *Operation) Reset() {
|
|||||||
type Fragment struct {
|
type Fragment struct {
|
||||||
Name string
|
Name string
|
||||||
On string
|
On string
|
||||||
Fields []Field
|
SelectionSet
|
||||||
fieldsA [10]Field
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var zeroFragment = Fragment{}
|
var zeroFragment = Fragment{}
|
||||||
@ -71,13 +75,11 @@ type Field struct {
|
|||||||
argsA [5]Arg
|
argsA [5]Arg
|
||||||
Children []int32
|
Children []int32
|
||||||
childrenA [5]int32
|
childrenA [5]int32
|
||||||
Union bool
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type Arg struct {
|
type Arg struct {
|
||||||
Name string
|
Name string
|
||||||
Val *Node
|
Val *Node
|
||||||
df bool
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type Node struct {
|
type Node struct {
|
||||||
@ -156,11 +158,9 @@ func Parse(gql []byte) (*Operation, error) {
|
|||||||
|
|
||||||
if p.peek(itemFragment) {
|
if p.peek(itemFragment) {
|
||||||
p.ignore()
|
p.ignore()
|
||||||
if f, err := p.parseFragment(); err != nil {
|
if err = p.parseFragment(op); err != nil {
|
||||||
fragPool.Put(f)
|
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
if !qf && p.peek(itemQuery, itemMutation, itemSub, itemObjOpen) {
|
if !qf && p.peek(itemQuery, itemMutation, itemSub, itemObjOpen) {
|
||||||
s = p.pos
|
s = p.pos
|
||||||
@ -175,47 +175,40 @@ func Parse(gql []byte) (*Operation, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, v := range p.frags {
|
|
||||||
fragPool.Put(v)
|
|
||||||
}
|
|
||||||
|
|
||||||
return op, nil
|
return op, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *Parser) parseFragment() (*Fragment, error) {
|
func (p *Parser) parseFragment(op *Operation) error {
|
||||||
var err error
|
|
||||||
|
|
||||||
frag := fragPool.Get().(*Fragment)
|
frag := fragPool.Get().(*Fragment)
|
||||||
frag.Reset()
|
frag.Reset()
|
||||||
|
|
||||||
frag.Fields = frag.fieldsA[:0]
|
frag.Fields = frag.fieldsA[:0]
|
||||||
|
frag.Args = frag.argsA[:0]
|
||||||
|
|
||||||
if p.peek(itemName) {
|
if p.peek(itemName) {
|
||||||
frag.Name = p.val(p.next())
|
frag.Name = p.val(p.next())
|
||||||
} else {
|
|
||||||
return frag, errors.New("fragment: missing name")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if p.peek(itemOn) {
|
if p.peek(itemOn) {
|
||||||
p.ignore()
|
p.ignore()
|
||||||
} else {
|
} else {
|
||||||
return frag, errors.New("fragment: missing 'on' keyword")
|
return errors.New("fragment: missing 'on' keyword")
|
||||||
}
|
}
|
||||||
|
|
||||||
if p.peek(itemName) {
|
if p.peek(itemName) {
|
||||||
frag.On = p.vall(p.next())
|
frag.On = p.vall(p.next())
|
||||||
} else {
|
} else {
|
||||||
return frag, errors.New("fragment: missing table name after 'on' keyword")
|
return errors.New("fragment: missing table name after 'on' keyword")
|
||||||
}
|
}
|
||||||
|
|
||||||
if p.peek(itemObjOpen) {
|
if p.peek(itemObjOpen) {
|
||||||
p.ignore()
|
p.ignore()
|
||||||
} else {
|
} else {
|
||||||
return frag, fmt.Errorf("fragment: expecting a '{', got: %s", p.next())
|
return fmt.Errorf("fragment: expecting a '{', got: %s", p.next())
|
||||||
}
|
}
|
||||||
|
|
||||||
frag.Fields, err = p.parseFields(frag.Fields)
|
if err := p.parseSelectionSet(&frag.SelectionSet); err != nil {
|
||||||
if err != nil {
|
return fmt.Errorf("fragment: %v", err)
|
||||||
return frag, fmt.Errorf("fragment: %v", err)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if p.frags == nil {
|
if p.frags == nil {
|
||||||
@ -228,7 +221,7 @@ func (p *Parser) parseFragment() (*Fragment, error) {
|
|||||||
|
|
||||||
p.frags[k] = frag
|
p.frags[k] = frag
|
||||||
|
|
||||||
return frag, nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *Parser) parseOp(op *Operation) error {
|
func (p *Parser) parseOp(op *Operation) error {
|
||||||
@ -256,7 +249,7 @@ func (p *Parser) parseOp(op *Operation) error {
|
|||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
op.Fields, err = p.parseFields(op.Fields)
|
err = p.parseSelectionSet(&op.SelectionSet)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("%s: %v", op.Type, err)
|
return fmt.Errorf("%s: %v", op.Type, err)
|
||||||
}
|
}
|
||||||
@ -300,6 +293,17 @@ func (p *Parser) parseOpTypeAndArgs(op *Operation) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (p *Parser) parseSelectionSet(selset *SelectionSet) error {
|
||||||
|
var err error
|
||||||
|
|
||||||
|
selset.Fields, err = p.parseFields(selset.Fields)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func ParseArgValue(argVal string) (*Node, error) {
|
func ParseArgValue(argVal string) (*Node, error) {
|
||||||
l := lexPool.Get().(*lexer)
|
l := lexPool.Get().(*lexer)
|
||||||
l.Reset()
|
l.Reset()
|
||||||
@ -320,7 +324,6 @@ func ParseArgValue(argVal string) (*Node, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (p *Parser) parseFields(fields []Field) ([]Field, error) {
|
func (p *Parser) parseFields(fields []Field) ([]Field, error) {
|
||||||
var err error
|
|
||||||
st := NewStack()
|
st := NewStack()
|
||||||
|
|
||||||
if !p.peek(itemName, itemSpread) {
|
if !p.peek(itemName, itemSpread) {
|
||||||
@ -355,28 +358,61 @@ func (p *Parser) parseFields(fields []Field) ([]Field, error) {
|
|||||||
isFrag = true
|
isFrag = true
|
||||||
}
|
}
|
||||||
|
|
||||||
if isFrag {
|
|
||||||
fields, err = p.parseFragmentFields(st, fields)
|
|
||||||
} else {
|
|
||||||
fields, err = p.parseNormalFields(st, fields)
|
|
||||||
}
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return fields, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *Parser) parseNormalFields(st *Stack, fields []Field) ([]Field, error) {
|
|
||||||
if !p.peek(itemName) {
|
if !p.peek(itemName) {
|
||||||
|
if isFrag {
|
||||||
|
return nil, fmt.Errorf("expecting a fragment name, got: %s", p.next())
|
||||||
|
} else {
|
||||||
return nil, fmt.Errorf("expecting an alias or field name, got: %s", p.next())
|
return nil, fmt.Errorf("expecting an alias or field name, got: %s", p.next())
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var f *Field
|
||||||
|
|
||||||
|
if isFrag {
|
||||||
|
name := p.val(p.next())
|
||||||
|
p.h.WriteString(name)
|
||||||
|
k := p.h.Sum64()
|
||||||
|
p.h.Reset()
|
||||||
|
|
||||||
|
fr, ok := p.frags[k]
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("no fragment named '%s' defined", name)
|
||||||
|
}
|
||||||
|
|
||||||
|
n := int32(len(fields))
|
||||||
|
fields = append(fields, fr.Fields...)
|
||||||
|
|
||||||
|
for i := 0; i < len(fr.Fields); i++ {
|
||||||
|
k := (n + int32(i))
|
||||||
|
f := &fields[k]
|
||||||
|
f.ID = int32(k)
|
||||||
|
|
||||||
|
// If this is the top-level point the parent to the parent of the
|
||||||
|
// previous field.
|
||||||
|
if f.ParentID == -1 {
|
||||||
|
pid := st.Peek()
|
||||||
|
f.ParentID = pid
|
||||||
|
if f.ParentID != -1 {
|
||||||
|
fields[pid].Children = append(fields[f.ParentID].Children, f.ID)
|
||||||
|
}
|
||||||
|
// Update all the other parents id's by our new place in this new array
|
||||||
|
} else {
|
||||||
|
f.ParentID += n
|
||||||
|
}
|
||||||
|
|
||||||
|
f.Children = make([]int32, len(f.Children))
|
||||||
|
copy(f.Children, fr.Fields[i].Children)
|
||||||
|
|
||||||
|
// Update all the children which is needed.
|
||||||
|
for j := range f.Children {
|
||||||
|
f.Children[j] += n
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
} else {
|
||||||
fields = append(fields, Field{ID: int32(len(fields))})
|
fields = append(fields, Field{ID: int32(len(fields))})
|
||||||
|
|
||||||
f := &fields[(len(fields) - 1)]
|
f = &fields[(len(fields) - 1)]
|
||||||
f.Args = f.argsA[:0]
|
f.Args = f.argsA[:0]
|
||||||
f.Children = f.childrenA[:0]
|
f.Children = f.childrenA[:0]
|
||||||
|
|
||||||
@ -392,6 +428,7 @@ func (p *Parser) parseNormalFields(st *Stack, fields []Field) ([]Field, error) {
|
|||||||
f.ParentID = pid
|
f.ParentID = pid
|
||||||
fields[pid].Children = append(fields[pid].Children, f.ID)
|
fields[pid].Children = append(fields[pid].Children, f.ID)
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// The first opening curley brackets after this
|
// The first opening curley brackets after this
|
||||||
// comes the columns or child fields
|
// comes the columns or child fields
|
||||||
@ -399,80 +436,6 @@ func (p *Parser) parseNormalFields(st *Stack, fields []Field) ([]Field, error) {
|
|||||||
p.ignore()
|
p.ignore()
|
||||||
st.Push(f.ID)
|
st.Push(f.ID)
|
||||||
}
|
}
|
||||||
|
|
||||||
return fields, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *Parser) parseFragmentFields(st *Stack, fields []Field) ([]Field, error) {
|
|
||||||
var err error
|
|
||||||
pid := st.Peek()
|
|
||||||
|
|
||||||
if p.peek(itemOn) {
|
|
||||||
p.ignore()
|
|
||||||
fields[pid].Union = true
|
|
||||||
|
|
||||||
if fields, err = p.parseNormalFields(st, fields); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// If parent is a union selector than copy over args from the parent
|
|
||||||
// to the first child which is the root selector for each union type.
|
|
||||||
for i := pid + 1; i < int32(len(fields)); i++ {
|
|
||||||
f := &fields[i]
|
|
||||||
if f.ParentID == pid {
|
|
||||||
f.Args = fields[pid].Args
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
} else {
|
|
||||||
if !p.peek(itemName) {
|
|
||||||
return nil, fmt.Errorf("expecting a fragment name, got: %s", p.next())
|
|
||||||
}
|
|
||||||
|
|
||||||
name := p.val(p.next())
|
|
||||||
_, _ = p.h.WriteString(name)
|
|
||||||
id := p.h.Sum64()
|
|
||||||
p.h.Reset()
|
|
||||||
|
|
||||||
fr, ok := p.frags[id]
|
|
||||||
if !ok {
|
|
||||||
return nil, fmt.Errorf("no fragment named '%s' defined", name)
|
|
||||||
}
|
|
||||||
ff := fr.Fields
|
|
||||||
|
|
||||||
n := int32(len(fields))
|
|
||||||
fields = append(fields, ff...)
|
|
||||||
|
|
||||||
for i := 0; i < len(ff); i++ {
|
|
||||||
k := (n + int32(i))
|
|
||||||
f := &fields[k]
|
|
||||||
f.ID = int32(k)
|
|
||||||
|
|
||||||
// If this is the top-level point the parent to the parent of the
|
|
||||||
// previous field.
|
|
||||||
if f.ParentID == -1 {
|
|
||||||
f.ParentID = pid
|
|
||||||
if f.ParentID != -1 {
|
|
||||||
fields[pid].Children = append(fields[pid].Children, f.ID)
|
|
||||||
}
|
|
||||||
// Update all the other parents id's by our new place in this new array
|
|
||||||
} else {
|
|
||||||
f.ParentID += n
|
|
||||||
}
|
|
||||||
|
|
||||||
// Copy over children since fields append is not a deep copy
|
|
||||||
f.Children = make([]int32, len(f.Children))
|
|
||||||
copy(f.Children, ff[i].Children)
|
|
||||||
|
|
||||||
// Copy over args since args append is not a deep copy
|
|
||||||
f.Args = make([]Arg, len(f.Args))
|
|
||||||
copy(f.Args, ff[i].Args)
|
|
||||||
|
|
||||||
// Update all the children which is needed.
|
|
||||||
for j := range f.Children {
|
|
||||||
f.Children[j] += n
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return fields, nil
|
return fields, nil
|
||||||
@ -571,9 +534,11 @@ func (p *Parser) parseList() (*Node, error) {
|
|||||||
}
|
}
|
||||||
if ty == 0 {
|
if ty == 0 {
|
||||||
ty = node.Type
|
ty = node.Type
|
||||||
} else if ty != node.Type {
|
} else {
|
||||||
|
if ty != node.Type {
|
||||||
return nil, errors.New("All values in a list must be of the same type")
|
return nil, errors.New("All values in a list must be of the same type")
|
||||||
}
|
}
|
||||||
|
}
|
||||||
node.Parent = parent
|
node.Parent = parent
|
||||||
nodes = append(nodes, node)
|
nodes = append(nodes, node)
|
||||||
}
|
}
|
||||||
@ -711,6 +676,11 @@ func (p *Parser) ignore() {
|
|||||||
p.pos = n
|
p.pos = n
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (p *Parser) peekCurrent() string {
|
||||||
|
item := p.items[p.pos]
|
||||||
|
return b2s(p.input[item.pos:item.end])
|
||||||
|
}
|
||||||
|
|
||||||
func (p *Parser) peekNext() string {
|
func (p *Parser) peekNext() string {
|
||||||
item := p.items[p.pos+1]
|
item := p.items[p.pos+1]
|
||||||
return b2s(p.input[item.pos:item.end])
|
return b2s(p.input[item.pos:item.end])
|
||||||
@ -720,6 +690,16 @@ func (p *Parser) reset(to int) {
|
|||||||
p.pos = to
|
p.pos = to
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (p *Parser) fHash(name string, parentID int32) uint64 {
|
||||||
|
var b []byte
|
||||||
|
binary.LittleEndian.PutUint32(b, uint32(parentID))
|
||||||
|
p.h.WriteString(name)
|
||||||
|
p.h.Write(b)
|
||||||
|
v := p.h.Sum64()
|
||||||
|
p.h.Reset()
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
|
||||||
func b2s(b []byte) string {
|
func b2s(b []byte) string {
|
||||||
return *(*string)(unsafe.Pointer(&b))
|
return *(*string)(unsafe.Pointer(&b))
|
||||||
}
|
}
|
||||||
@ -756,6 +736,31 @@ func (t parserType) String() string {
|
|||||||
return v
|
return v
|
||||||
}
|
}
|
||||||
|
|
||||||
func FreeNode(n *Node) {
|
// type Frees struct {
|
||||||
|
// n *Node
|
||||||
|
// loc int
|
||||||
|
// }
|
||||||
|
|
||||||
|
// var freeList []Frees
|
||||||
|
|
||||||
|
// func FreeNode(n *Node, loc int) {
|
||||||
|
// j := -1
|
||||||
|
|
||||||
|
// for i := range freeList {
|
||||||
|
// if n == freeList[i].n {
|
||||||
|
// j = i
|
||||||
|
// break
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
// if j == -1 {
|
||||||
|
// nodePool.Put(n)
|
||||||
|
// freeList = append(freeList, Frees{n, loc})
|
||||||
|
// } else {
|
||||||
|
// fmt.Printf("(%d) RE_FREE %d %p %s %s\n", loc, freeList[j].loc, freeList[j].n, n.Name, n.Type)
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
func FreeNode(n *Node, loc int) {
|
||||||
nodePool.Put(n)
|
nodePool.Put(n)
|
||||||
}
|
}
|
||||||
|
@ -12,7 +12,6 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type QType int
|
type QType int
|
||||||
type SType int
|
|
||||||
type Action int
|
type Action int
|
||||||
|
|
||||||
const (
|
const (
|
||||||
@ -20,8 +19,7 @@ const (
|
|||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
QTUnknown QType = iota
|
QTQuery QType = iota + 1
|
||||||
QTQuery
|
|
||||||
QTMutation
|
QTMutation
|
||||||
QTInsert
|
QTInsert
|
||||||
QTUpdate
|
QTUpdate
|
||||||
@ -29,12 +27,6 @@ const (
|
|||||||
QTUpsert
|
QTUpsert
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
|
||||||
STNone SType = iota
|
|
||||||
STUnion
|
|
||||||
STMember
|
|
||||||
)
|
|
||||||
|
|
||||||
type QCode struct {
|
type QCode struct {
|
||||||
Type QType
|
Type QType
|
||||||
ActionVar string
|
ActionVar string
|
||||||
@ -46,8 +38,6 @@ type QCode struct {
|
|||||||
type Select struct {
|
type Select struct {
|
||||||
ID int32
|
ID int32
|
||||||
ParentID int32
|
ParentID int32
|
||||||
UParentID int32
|
|
||||||
Type SType
|
|
||||||
Args map[string]*Node
|
Args map[string]*Node
|
||||||
Name string
|
Name string
|
||||||
FieldName string
|
FieldName string
|
||||||
@ -287,7 +277,6 @@ func (com *Compiler) Compile(query []byte, role string) (*QCode, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
freeNodes(op)
|
|
||||||
opPool.Put(op)
|
opPool.Put(op)
|
||||||
|
|
||||||
return &qc, nil
|
return &qc, nil
|
||||||
@ -382,11 +371,7 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
|
|||||||
})
|
})
|
||||||
s := &selects[(len(selects) - 1)]
|
s := &selects[(len(selects) - 1)]
|
||||||
|
|
||||||
if field.Union {
|
if len(field.Alias) != 0 {
|
||||||
s.Type = STUnion
|
|
||||||
}
|
|
||||||
|
|
||||||
if field.Alias != "" {
|
|
||||||
s.FieldName = field.Alias
|
s.FieldName = field.Alias
|
||||||
} else {
|
} else {
|
||||||
s.FieldName = s.Name
|
s.FieldName = s.Name
|
||||||
@ -397,11 +382,6 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
|
|||||||
} else {
|
} else {
|
||||||
p := &selects[s.ParentID]
|
p := &selects[s.ParentID]
|
||||||
p.Children = append(p.Children, s.ID)
|
p.Children = append(p.Children, s.ID)
|
||||||
|
|
||||||
if p.Type == STUnion {
|
|
||||||
s.Type = STMember
|
|
||||||
s.UParentID = p.ParentID
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if skipRender {
|
if skipRender {
|
||||||
@ -481,7 +461,6 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
qc.Selects = selects[:id]
|
qc.Selects = selects[:id]
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -513,42 +492,50 @@ func (com *Compiler) AddFilters(qc *QCode, sel *Select, role string) {
|
|||||||
func (com *Compiler) compileArgs(qc *QCode, sel *Select, args []Arg, role string) error {
|
func (com *Compiler) compileArgs(qc *QCode, sel *Select, args []Arg, role string) error {
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
|
// don't free this arg either previously done or will be free'd
|
||||||
|
// in the future like in psql
|
||||||
|
var df bool
|
||||||
|
|
||||||
for i := range args {
|
for i := range args {
|
||||||
arg := &args[i]
|
arg := &args[i]
|
||||||
|
|
||||||
switch arg.Name {
|
switch arg.Name {
|
||||||
case "id":
|
case "id":
|
||||||
err = com.compileArgID(sel, arg)
|
err, df = com.compileArgID(sel, arg)
|
||||||
|
|
||||||
case "search":
|
case "search":
|
||||||
err = com.compileArgSearch(sel, arg)
|
err, df = com.compileArgSearch(sel, arg)
|
||||||
|
|
||||||
case "where":
|
case "where":
|
||||||
err = com.compileArgWhere(sel, arg, role)
|
err, df = com.compileArgWhere(sel, arg, role)
|
||||||
|
|
||||||
case "orderby", "order_by", "order":
|
case "orderby", "order_by", "order":
|
||||||
err = com.compileArgOrderBy(sel, arg)
|
err, df = com.compileArgOrderBy(sel, arg)
|
||||||
|
|
||||||
case "distinct_on", "distinct":
|
case "distinct_on", "distinct":
|
||||||
err = com.compileArgDistinctOn(sel, arg)
|
err, df = com.compileArgDistinctOn(sel, arg)
|
||||||
|
|
||||||
case "limit":
|
case "limit":
|
||||||
err = com.compileArgLimit(sel, arg)
|
err, df = com.compileArgLimit(sel, arg)
|
||||||
|
|
||||||
case "offset":
|
case "offset":
|
||||||
err = com.compileArgOffset(sel, arg)
|
err, df = com.compileArgOffset(sel, arg)
|
||||||
|
|
||||||
case "first":
|
case "first":
|
||||||
err = com.compileArgFirstLast(sel, arg, PtForward)
|
err, df = com.compileArgFirstLast(sel, arg, PtForward)
|
||||||
|
|
||||||
case "last":
|
case "last":
|
||||||
err = com.compileArgFirstLast(sel, arg, PtBackward)
|
err, df = com.compileArgFirstLast(sel, arg, PtBackward)
|
||||||
|
|
||||||
case "after":
|
case "after":
|
||||||
err = com.compileArgAfterBefore(sel, arg, PtForward)
|
err, df = com.compileArgAfterBefore(sel, arg, PtForward)
|
||||||
|
|
||||||
case "before":
|
case "before":
|
||||||
err = com.compileArgAfterBefore(sel, arg, PtBackward)
|
err, df = com.compileArgAfterBefore(sel, arg, PtBackward)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !df {
|
||||||
|
FreeNode(arg.Val, 5)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -629,13 +616,15 @@ func (com *Compiler) compileArgNode(st *util.Stack, node *Node, usePool bool) (*
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Objects inside a list
|
// Objects inside a list
|
||||||
if node.Name == "" {
|
if len(node.Name) == 0 {
|
||||||
pushChildren(st, node.exp, node)
|
pushChildren(st, node.exp, node)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
} else if _, ok := com.bl[node.Name]; ok {
|
} else {
|
||||||
|
if _, ok := com.bl[node.Name]; ok {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
ex, err := newExp(st, node, usePool)
|
ex, err := newExp(st, node, usePool)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -657,20 +646,39 @@ func (com *Compiler) compileArgNode(st *util.Stack, node *Node, usePool bool) (*
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if usePool {
|
||||||
|
st.Push(node)
|
||||||
|
|
||||||
|
for {
|
||||||
|
if st.Len() == 0 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
intf := st.Pop()
|
||||||
|
node, ok := intf.(*Node)
|
||||||
|
if !ok || node == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for i := range node.Children {
|
||||||
|
st.Push(node.Children[i])
|
||||||
|
}
|
||||||
|
FreeNode(node, 1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return root, needsUser, nil
|
return root, needsUser, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (com *Compiler) compileArgID(sel *Select, arg *Arg) error {
|
func (com *Compiler) compileArgID(sel *Select, arg *Arg) (error, bool) {
|
||||||
if sel.ID != 0 {
|
if sel.ID != 0 {
|
||||||
return nil
|
return nil, false
|
||||||
}
|
}
|
||||||
|
|
||||||
if sel.Where != nil && sel.Where.Op == OpEqID {
|
if sel.Where != nil && sel.Where.Op == OpEqID {
|
||||||
return nil
|
return nil, false
|
||||||
}
|
}
|
||||||
|
|
||||||
if arg.Val.Type != NodeVar {
|
if arg.Val.Type != NodeVar {
|
||||||
return argErr("id", "variable")
|
return argErr("id", "variable"), false
|
||||||
}
|
}
|
||||||
|
|
||||||
ex := expPool.Get().(*Exp)
|
ex := expPool.Get().(*Exp)
|
||||||
@ -681,12 +689,12 @@ func (com *Compiler) compileArgID(sel *Select, arg *Arg) error {
|
|||||||
ex.Val = arg.Val.Val
|
ex.Val = arg.Val.Val
|
||||||
|
|
||||||
sel.Where = ex
|
sel.Where = ex
|
||||||
return nil
|
return nil, false
|
||||||
}
|
}
|
||||||
|
|
||||||
func (com *Compiler) compileArgSearch(sel *Select, arg *Arg) error {
|
func (com *Compiler) compileArgSearch(sel *Select, arg *Arg) (error, bool) {
|
||||||
if arg.Val.Type != NodeVar {
|
if arg.Val.Type != NodeVar {
|
||||||
return argErr("search", "variable")
|
return argErr("search", "variable"), false
|
||||||
}
|
}
|
||||||
|
|
||||||
ex := expPool.Get().(*Exp)
|
ex := expPool.Get().(*Exp)
|
||||||
@ -701,19 +709,18 @@ func (com *Compiler) compileArgSearch(sel *Select, arg *Arg) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
sel.Args[arg.Name] = arg.Val
|
sel.Args[arg.Name] = arg.Val
|
||||||
arg.df = true
|
|
||||||
AddFilter(sel, ex)
|
AddFilter(sel, ex)
|
||||||
|
|
||||||
return nil
|
return nil, true
|
||||||
}
|
}
|
||||||
|
|
||||||
func (com *Compiler) compileArgWhere(sel *Select, arg *Arg, role string) error {
|
func (com *Compiler) compileArgWhere(sel *Select, arg *Arg, role string) (error, bool) {
|
||||||
st := util.NewStack()
|
st := util.NewStack()
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
ex, nu, err := com.compileArgObj(st, arg)
|
ex, nu, err := com.compileArgObj(st, arg)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err, false
|
||||||
}
|
}
|
||||||
|
|
||||||
if nu && role == "anon" {
|
if nu && role == "anon" {
|
||||||
@ -721,12 +728,12 @@ func (com *Compiler) compileArgWhere(sel *Select, arg *Arg, role string) error {
|
|||||||
}
|
}
|
||||||
AddFilter(sel, ex)
|
AddFilter(sel, ex)
|
||||||
|
|
||||||
return nil
|
return nil, true
|
||||||
}
|
}
|
||||||
|
|
||||||
func (com *Compiler) compileArgOrderBy(sel *Select, arg *Arg) error {
|
func (com *Compiler) compileArgOrderBy(sel *Select, arg *Arg) (error, bool) {
|
||||||
if arg.Val.Type != NodeObj {
|
if arg.Val.Type != NodeObj {
|
||||||
return fmt.Errorf("expecting an object")
|
return fmt.Errorf("expecting an object"), false
|
||||||
}
|
}
|
||||||
|
|
||||||
st := util.NewStack()
|
st := util.NewStack()
|
||||||
@ -744,15 +751,16 @@ func (com *Compiler) compileArgOrderBy(sel *Select, arg *Arg) error {
|
|||||||
node, ok := intf.(*Node)
|
node, ok := intf.(*Node)
|
||||||
|
|
||||||
if !ok || node == nil {
|
if !ok || node == nil {
|
||||||
return fmt.Errorf("17: unexpected value %v (%t)", intf, intf)
|
return fmt.Errorf("17: unexpected value %v (%t)", intf, intf), false
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, ok := com.bl[node.Name]; ok {
|
if _, ok := com.bl[node.Name]; ok {
|
||||||
|
FreeNode(node, 2)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
if node.Type != NodeStr && node.Type != NodeVar {
|
if node.Type != NodeStr && node.Type != NodeVar {
|
||||||
return fmt.Errorf("expecting a string or variable")
|
return fmt.Errorf("expecting a string or variable"), false
|
||||||
}
|
}
|
||||||
|
|
||||||
ob := &OrderBy{}
|
ob := &OrderBy{}
|
||||||
@ -771,24 +779,25 @@ func (com *Compiler) compileArgOrderBy(sel *Select, arg *Arg) error {
|
|||||||
case "desc_nulls_last":
|
case "desc_nulls_last":
|
||||||
ob.Order = OrderDescNullsLast
|
ob.Order = OrderDescNullsLast
|
||||||
default:
|
default:
|
||||||
return fmt.Errorf("valid values include asc, desc, asc_nulls_first and desc_nulls_first")
|
return fmt.Errorf("valid values include asc, desc, asc_nulls_first and desc_nulls_first"), false
|
||||||
}
|
}
|
||||||
|
|
||||||
setOrderByColName(ob, node)
|
setOrderByColName(ob, node)
|
||||||
sel.OrderBy = append(sel.OrderBy, ob)
|
sel.OrderBy = append(sel.OrderBy, ob)
|
||||||
|
FreeNode(node, 3)
|
||||||
}
|
}
|
||||||
return nil
|
return nil, false
|
||||||
}
|
}
|
||||||
|
|
||||||
func (com *Compiler) compileArgDistinctOn(sel *Select, arg *Arg) error {
|
func (com *Compiler) compileArgDistinctOn(sel *Select, arg *Arg) (error, bool) {
|
||||||
node := arg.Val
|
node := arg.Val
|
||||||
|
|
||||||
if _, ok := com.bl[node.Name]; ok {
|
if _, ok := com.bl[node.Name]; ok {
|
||||||
return nil
|
return nil, false
|
||||||
}
|
}
|
||||||
|
|
||||||
if node.Type != NodeList && node.Type != NodeStr {
|
if node.Type != NodeList && node.Type != NodeStr {
|
||||||
return fmt.Errorf("expecting a list of strings or just a string")
|
return fmt.Errorf("expecting a list of strings or just a string"), false
|
||||||
}
|
}
|
||||||
|
|
||||||
if node.Type == NodeStr {
|
if node.Type == NodeStr {
|
||||||
@ -797,57 +806,58 @@ func (com *Compiler) compileArgDistinctOn(sel *Select, arg *Arg) error {
|
|||||||
|
|
||||||
for i := range node.Children {
|
for i := range node.Children {
|
||||||
sel.DistinctOn = append(sel.DistinctOn, node.Children[i].Val)
|
sel.DistinctOn = append(sel.DistinctOn, node.Children[i].Val)
|
||||||
|
FreeNode(node.Children[i], 5)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil, false
|
||||||
}
|
}
|
||||||
|
|
||||||
func (com *Compiler) compileArgLimit(sel *Select, arg *Arg) error {
|
func (com *Compiler) compileArgLimit(sel *Select, arg *Arg) (error, bool) {
|
||||||
node := arg.Val
|
node := arg.Val
|
||||||
|
|
||||||
if node.Type != NodeInt {
|
if node.Type != NodeInt {
|
||||||
return argErr("limit", "number")
|
return argErr("limit", "number"), false
|
||||||
}
|
}
|
||||||
|
|
||||||
sel.Paging.Limit = node.Val
|
sel.Paging.Limit = node.Val
|
||||||
|
|
||||||
return nil
|
return nil, false
|
||||||
}
|
}
|
||||||
|
|
||||||
func (com *Compiler) compileArgOffset(sel *Select, arg *Arg) error {
|
func (com *Compiler) compileArgOffset(sel *Select, arg *Arg) (error, bool) {
|
||||||
node := arg.Val
|
node := arg.Val
|
||||||
|
|
||||||
if node.Type != NodeVar {
|
if node.Type != NodeVar {
|
||||||
return argErr("offset", "variable")
|
return argErr("offset", "variable"), false
|
||||||
}
|
}
|
||||||
|
|
||||||
sel.Paging.Offset = node.Val
|
sel.Paging.Offset = node.Val
|
||||||
return nil
|
return nil, false
|
||||||
}
|
}
|
||||||
|
|
||||||
func (com *Compiler) compileArgFirstLast(sel *Select, arg *Arg, pt PagingType) error {
|
func (com *Compiler) compileArgFirstLast(sel *Select, arg *Arg, pt PagingType) (error, bool) {
|
||||||
node := arg.Val
|
node := arg.Val
|
||||||
|
|
||||||
if node.Type != NodeInt {
|
if node.Type != NodeInt {
|
||||||
return argErr(arg.Name, "number")
|
return argErr(arg.Name, "number"), false
|
||||||
}
|
}
|
||||||
|
|
||||||
sel.Paging.Type = pt
|
sel.Paging.Type = pt
|
||||||
sel.Paging.Limit = node.Val
|
sel.Paging.Limit = node.Val
|
||||||
|
|
||||||
return nil
|
return nil, false
|
||||||
}
|
}
|
||||||
|
|
||||||
func (com *Compiler) compileArgAfterBefore(sel *Select, arg *Arg, pt PagingType) error {
|
func (com *Compiler) compileArgAfterBefore(sel *Select, arg *Arg, pt PagingType) (error, bool) {
|
||||||
node := arg.Val
|
node := arg.Val
|
||||||
|
|
||||||
if node.Type != NodeVar || node.Val != "cursor" {
|
if node.Type != NodeVar || node.Val != "cursor" {
|
||||||
return fmt.Errorf("value for argument '%s' must be a variable named $cursor", arg.Name)
|
return fmt.Errorf("value for argument '%s' must be a variable named $cursor", arg.Name), false
|
||||||
}
|
}
|
||||||
sel.Paging.Type = pt
|
sel.Paging.Type = pt
|
||||||
sel.Paging.Cursor = true
|
sel.Paging.Cursor = true
|
||||||
|
|
||||||
return nil
|
return nil, false
|
||||||
}
|
}
|
||||||
|
|
||||||
// var zeroTrv = &trval{}
|
// var zeroTrv = &trval{}
|
||||||
@ -1048,7 +1058,7 @@ func setWhereColName(ex *Exp, node *Node) {
|
|||||||
if n.Type != NodeObj {
|
if n.Type != NodeObj {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if n.Name != "" {
|
if len(n.Name) != 0 {
|
||||||
k := n.Name
|
k := n.Name
|
||||||
if k == "and" || k == "or" || k == "not" ||
|
if k == "and" || k == "or" || k == "not" ||
|
||||||
k == "_and" || k == "_or" || k == "_not" {
|
k == "_and" || k == "_or" || k == "_not" {
|
||||||
@ -1227,81 +1237,3 @@ func FreeExp(ex *Exp) {
|
|||||||
func argErr(name, ty string) error {
|
func argErr(name, ty string) error {
|
||||||
return fmt.Errorf("value for argument '%s' must be a %s", name, ty)
|
return fmt.Errorf("value for argument '%s' must be a %s", name, ty)
|
||||||
}
|
}
|
||||||
|
|
||||||
func freeNodes(op *Operation) {
|
|
||||||
var st *util.Stack
|
|
||||||
fm := make(map[*Node]struct{})
|
|
||||||
|
|
||||||
for i := range op.Args {
|
|
||||||
arg := op.Args[i]
|
|
||||||
if arg.df {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
for i := range arg.Val.Children {
|
|
||||||
if st == nil {
|
|
||||||
st = util.NewStack()
|
|
||||||
}
|
|
||||||
c := arg.Val.Children[i]
|
|
||||||
if _, ok := fm[c]; !ok {
|
|
||||||
st.Push(c)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := fm[arg.Val]; !ok {
|
|
||||||
nodePool.Put(arg.Val)
|
|
||||||
fm[arg.Val] = struct{}{}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
for i := range op.Fields {
|
|
||||||
f := op.Fields[i]
|
|
||||||
|
|
||||||
for j := range f.Args {
|
|
||||||
arg := f.Args[j]
|
|
||||||
if arg.df {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
for k := range arg.Val.Children {
|
|
||||||
if st == nil {
|
|
||||||
st = util.NewStack()
|
|
||||||
}
|
|
||||||
c := arg.Val.Children[k]
|
|
||||||
if _, ok := fm[c]; !ok {
|
|
||||||
st.Push(c)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := fm[arg.Val]; !ok {
|
|
||||||
nodePool.Put(arg.Val)
|
|
||||||
fm[arg.Val] = struct{}{}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if st == nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
for {
|
|
||||||
if st.Len() == 0 {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
intf := st.Pop()
|
|
||||||
node, ok := intf.(*Node)
|
|
||||||
if !ok || node == nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
for i := range node.Children {
|
|
||||||
st.Push(node.Children[i])
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := fm[node]; !ok {
|
|
||||||
nodePool.Put(node)
|
|
||||||
fm[node] = struct{}{}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
@ -29,8 +29,6 @@ func al(b byte) bool {
|
|||||||
|
|
||||||
func (qt QType) String() string {
|
func (qt QType) String() string {
|
||||||
switch qt {
|
switch qt {
|
||||||
case QTUnknown:
|
|
||||||
return "unknown"
|
|
||||||
case QTQuery:
|
case QTQuery:
|
||||||
return "query"
|
return "query"
|
||||||
case QTMutation:
|
case QTMutation:
|
||||||
|
@ -64,7 +64,7 @@ func (sg *SuperGraph) initPrepared() error {
|
|||||||
return fmt.Errorf("role query: %w", err)
|
return fmt.Errorf("role query: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
sg.queries = make(map[uint64]*query)
|
sg.queries = make(map[uint64]query)
|
||||||
|
|
||||||
list, err := sg.allowList.Load()
|
list, err := sg.allowList.Load()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -75,20 +75,22 @@ func (sg *SuperGraph) initPrepared() error {
|
|||||||
h.SetSeed(sg.hashSeed)
|
h.SetSeed(sg.hashSeed)
|
||||||
|
|
||||||
for _, v := range list {
|
for _, v := range list {
|
||||||
if v.Query == "" {
|
if len(v.Query) == 0 {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
qt := qcode.GetQType(v.Query)
|
qt := qcode.GetQType(v.Query)
|
||||||
|
|
||||||
switch qt {
|
switch qt {
|
||||||
case qcode.QTQuery:
|
case qcode.QTQuery:
|
||||||
sg.queries[queryID(&h, v.Name, "user")] = &query{ai: v, qt: qt}
|
sg.queries[queryID(&h, v.Name, "user")] = query{ai: v, qt: qt}
|
||||||
sg.queries[queryID(&h, v.Name, "anon")] = &query{ai: v, qt: qt}
|
|
||||||
|
if sg.anonExists {
|
||||||
|
sg.queries[queryID(&h, v.Name, "anon")] = query{ai: v, qt: qt}
|
||||||
|
}
|
||||||
|
|
||||||
case qcode.QTMutation:
|
case qcode.QTMutation:
|
||||||
for _, role := range sg.conf.Roles {
|
for _, role := range sg.conf.Roles {
|
||||||
sg.queries[queryID(&h, v.Name, role.Name)] = &query{ai: v, qt: qt}
|
sg.queries[queryID(&h, v.Name, role.Name)] = query{ai: v, qt: qt}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -113,7 +115,7 @@ func (sg *SuperGraph) prepareRoleStmt() error {
|
|||||||
|
|
||||||
io.WriteString(w, `(SELECT (CASE`)
|
io.WriteString(w, `(SELECT (CASE`)
|
||||||
for _, role := range sg.conf.Roles {
|
for _, role := range sg.conf.Roles {
|
||||||
if role.Match == "" {
|
if len(role.Match) == 0 {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
io.WriteString(w, ` WHEN `)
|
io.WriteString(w, ` WHEN `)
|
||||||
@ -159,7 +161,7 @@ func (sg *SuperGraph) initAllowList() error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// nolint: errcheck
|
// nolint: errcheck
|
||||||
func queryID(h *maphash.Hash, name, role string) uint64 {
|
func queryID(h *maphash.Hash, name string, role string) uint64 {
|
||||||
h.WriteString(name)
|
h.WriteString(name)
|
||||||
h.WriteString(role)
|
h.WriteString(role)
|
||||||
v := h.Sum64()
|
v := h.Sum64()
|
||||||
|
@ -238,7 +238,7 @@ func (sg *SuperGraph) parentFieldIds(h *maphash.Hash, sel []qcode.Select, skippe
|
|||||||
return fm, sm
|
return fm, sm
|
||||||
}
|
}
|
||||||
|
|
||||||
func isSkipped(n, pos uint32) bool {
|
func isSkipped(n uint32, pos uint32) bool {
|
||||||
return ((n & (1 << pos)) != 0)
|
return ((n & (1 << pos)) != 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -46,7 +46,7 @@ func (sg *SuperGraph) initRemotes(t Table) error {
|
|||||||
|
|
||||||
// if no table column specified in the config then
|
// if no table column specified in the config then
|
||||||
// use the primary key of the table as the id
|
// use the primary key of the table as the id
|
||||||
if idcol == "" {
|
if len(idcol) == 0 {
|
||||||
pcol, err := sg.pc.IDColumn(t.Name)
|
pcol, err := sg.pc.IDColumn(t.Name)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -3,7 +3,7 @@ package core
|
|||||||
import "hash/maphash"
|
import "hash/maphash"
|
||||||
|
|
||||||
// nolint: errcheck
|
// nolint: errcheck
|
||||||
func mkkey(h *maphash.Hash, k1, k2 string) uint64 {
|
func mkkey(h *maphash.Hash, k1 string, k2 string) uint64 {
|
||||||
h.WriteString(k1)
|
h.WriteString(k1)
|
||||||
h.WriteString(k2)
|
h.WriteString(k2)
|
||||||
v := h.Sum64()
|
v := h.Sum64()
|
||||||
|
@ -155,7 +155,7 @@ func cmdVersion(cmd *cobra.Command, args []string) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func BuildDetails() string {
|
func BuildDetails() string {
|
||||||
if version == "" {
|
if len(version) == 0 {
|
||||||
return `
|
return `
|
||||||
Super Graph (unknown version)
|
Super Graph (unknown version)
|
||||||
For documentation, visit https://supergraph.dev
|
For documentation, visit https://supergraph.dev
|
||||||
|
@ -88,10 +88,6 @@ func cmdNew(cmd *cobra.Command, args []string) {
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
ifNotExists(path.Join(appConfigPath, "allow.list"), func(p string) error {
|
|
||||||
return ioutil.WriteFile(p, []byte{}, 0644)
|
|
||||||
})
|
|
||||||
|
|
||||||
// Create app migrations folder and add relevant files
|
// Create app migrations folder and add relevant files
|
||||||
|
|
||||||
appMigrationsPath := path.Join(appConfigPath, "migrations")
|
appMigrationsPath := path.Join(appConfigPath, "migrations")
|
||||||
|
@ -80,7 +80,7 @@ func cmdDBSeed(cmd *cobra.Command, args []string) {
|
|||||||
func graphQLFunc(sg *core.SuperGraph, query string, data interface{}, opt map[string]string) map[string]interface{} {
|
func graphQLFunc(sg *core.SuperGraph, query string, data interface{}, opt map[string]string) map[string]interface{} {
|
||||||
ct := context.Background()
|
ct := context.Background()
|
||||||
|
|
||||||
if v, ok := opt["user_id"]; ok && v != "" {
|
if v, ok := opt["user_id"]; ok && len(v) != 0 {
|
||||||
ct = context.WithValue(ct, core.UserIDKey, v)
|
ct = context.WithValue(ct, core.UserIDKey, v)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -144,7 +144,7 @@ func (c *csvSource) Values() ([]interface{}, error) {
|
|||||||
|
|
||||||
for _, v := range c.rows[c.i] {
|
for _, v := range c.rows[c.i] {
|
||||||
switch {
|
switch {
|
||||||
case v == "":
|
case len(v) == 0:
|
||||||
vals = append(vals, "")
|
vals = append(vals, "")
|
||||||
case isDigit(v):
|
case isDigit(v):
|
||||||
var n int
|
var n int
|
||||||
@ -243,7 +243,7 @@ func avatarURL(size int) string {
|
|||||||
return fmt.Sprintf("https://i.pravatar.cc/%d?%d", size, rand.Intn(5000))
|
return fmt.Sprintf("https://i.pravatar.cc/%d?%d", size, rand.Intn(5000))
|
||||||
}
|
}
|
||||||
|
|
||||||
func imageURL(width, height int) string {
|
func imageURL(width int, height int) string {
|
||||||
return fmt.Sprintf("https://picsum.photos/%d/%d?%d", width, height, rand.Intn(5000))
|
return fmt.Sprintf("https://picsum.photos/%d/%d?%d", width, height, rand.Intn(5000))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -90,7 +90,7 @@ func newViper(configPath, configFile string) *viper.Viper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func GetConfigName() string {
|
func GetConfigName() string {
|
||||||
if os.Getenv("GO_ENV") == "" {
|
if len(os.Getenv("GO_ENV")) == 0 {
|
||||||
return "dev"
|
return "dev"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -105,7 +105,7 @@ func apiV1(w http.ResponseWriter, r *http.Request) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if err == nil {
|
if err == nil {
|
||||||
if conf.CacheControl != "" && res.Operation() == core.OpQuery {
|
if len(conf.CacheControl) != 0 && res.Operation() == core.OpQuery {
|
||||||
w.Header().Set("Cache-Control", conf.CacheControl)
|
w.Header().Set("Cache-Control", conf.CacheControl)
|
||||||
}
|
}
|
||||||
//nolint: errcheck
|
//nolint: errcheck
|
||||||
|
@ -47,17 +47,17 @@ func SimpleHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
|||||||
ctx := r.Context()
|
ctx := r.Context()
|
||||||
|
|
||||||
userIDProvider := r.Header.Get("X-User-ID-Provider")
|
userIDProvider := r.Header.Get("X-User-ID-Provider")
|
||||||
if userIDProvider != "" {
|
if len(userIDProvider) != 0 {
|
||||||
ctx = context.WithValue(ctx, core.UserIDProviderKey, userIDProvider)
|
ctx = context.WithValue(ctx, core.UserIDProviderKey, userIDProvider)
|
||||||
}
|
}
|
||||||
|
|
||||||
userID := r.Header.Get("X-User-ID")
|
userID := r.Header.Get("X-User-ID")
|
||||||
if userID != "" {
|
if len(userID) != 0 {
|
||||||
ctx = context.WithValue(ctx, core.UserIDKey, userID)
|
ctx = context.WithValue(ctx, core.UserIDKey, userID)
|
||||||
}
|
}
|
||||||
|
|
||||||
userRole := r.Header.Get("X-User-Role")
|
userRole := r.Header.Get("X-User-Role")
|
||||||
if userRole != "" {
|
if len(userRole) != 0 {
|
||||||
ctx = context.WithValue(ctx, core.UserRoleKey, userRole)
|
ctx = context.WithValue(ctx, core.UserRoleKey, userRole)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -68,11 +68,11 @@ func SimpleHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
|||||||
func HeaderHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
func HeaderHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||||
hdr := ac.Header
|
hdr := ac.Header
|
||||||
|
|
||||||
if hdr.Name == "" {
|
if len(hdr.Name) == 0 {
|
||||||
return nil, fmt.Errorf("auth '%s': no header.name defined", ac.Name)
|
return nil, fmt.Errorf("auth '%s': no header.name defined", ac.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
if !hdr.Exists && hdr.Value == "" {
|
if !hdr.Exists && len(hdr.Value) == 0 {
|
||||||
return nil, fmt.Errorf("auth '%s': no header.value defined", ac.Name)
|
return nil, fmt.Errorf("auth '%s': no header.value defined", ac.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -82,7 +82,7 @@ func HeaderHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
|||||||
|
|
||||||
switch {
|
switch {
|
||||||
case hdr.Exists:
|
case hdr.Exists:
|
||||||
fo1 = (value == "")
|
fo1 = (len(value) == 0)
|
||||||
|
|
||||||
default:
|
default:
|
||||||
fo1 = (value != hdr.Value)
|
fo1 = (value != hdr.Value)
|
||||||
|
@ -44,10 +44,10 @@ func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
|||||||
publicKeyFile := ac.JWT.PubKeyFile
|
publicKeyFile := ac.JWT.PubKeyFile
|
||||||
|
|
||||||
switch {
|
switch {
|
||||||
case secret != "":
|
case len(secret) != 0:
|
||||||
key = []byte(secret)
|
key = []byte(secret)
|
||||||
|
|
||||||
case publicKeyFile != "":
|
case len(publicKeyFile) != 0:
|
||||||
kd, err := ioutil.ReadFile(publicKeyFile)
|
kd, err := ioutil.ReadFile(publicKeyFile)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -74,7 +74,7 @@ func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
|||||||
|
|
||||||
var tok string
|
var tok string
|
||||||
|
|
||||||
if cookie != "" {
|
if len(cookie) != 0 {
|
||||||
ck, err := r.Cookie(cookie)
|
ck, err := r.Cookie(cookie)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
next.ServeHTTP(w, r)
|
next.ServeHTTP(w, r)
|
||||||
|
@ -165,7 +165,7 @@ func railsAuth(ac *Auth) (*rails.Auth, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
version := ac.Rails.Version
|
version := ac.Rails.Version
|
||||||
if version == "" {
|
if len(version) == 0 {
|
||||||
return nil, errors.New("no auth.rails.version defined")
|
return nil, errors.New("no auth.rails.version defined")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -199,7 +199,7 @@ func (m *Migrator) LoadMigrations(path string) error {
|
|||||||
for _, v := range strings.Split(upSQL, "\n") {
|
for _, v := range strings.Split(upSQL, "\n") {
|
||||||
// Only account for regular single line comment, empty line and space/comment combination
|
// Only account for regular single line comment, empty line and space/comment combination
|
||||||
cleanString := strings.TrimSpace(v)
|
cleanString := strings.TrimSpace(v)
|
||||||
if cleanString != "" &&
|
if len(cleanString) != 0 &&
|
||||||
!strings.HasPrefix(cleanString, "--") {
|
!strings.HasPrefix(cleanString, "--") {
|
||||||
containsSQL = true
|
containsSQL = true
|
||||||
break
|
break
|
||||||
|
File diff suppressed because one or more lines are too long
@ -27,7 +27,7 @@ func initWatcher() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var d dir
|
var d dir
|
||||||
if cpath == "" || cpath == "./" {
|
if len(cpath) == 0 || cpath == "./" {
|
||||||
d = Dir("./config", ReExec)
|
d = Dir("./config", ReExec)
|
||||||
} else {
|
} else {
|
||||||
d = Dir(cpath, ReExec)
|
d = Dir(cpath, ReExec)
|
||||||
@ -52,11 +52,11 @@ func startHTTP() {
|
|||||||
hp := strings.SplitN(conf.HostPort, ":", 2)
|
hp := strings.SplitN(conf.HostPort, ":", 2)
|
||||||
|
|
||||||
if len(hp) == 2 {
|
if len(hp) == 2 {
|
||||||
if conf.Host != "" {
|
if len(conf.Host) != 0 {
|
||||||
hp[0] = conf.Host
|
hp[0] = conf.Host
|
||||||
}
|
}
|
||||||
|
|
||||||
if conf.Port != "" {
|
if len(conf.Port) != 0 {
|
||||||
hp[1] = conf.Port
|
hp[1] = conf.Port
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -64,7 +64,7 @@ func startHTTP() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if conf.hostPort == "" {
|
if len(conf.hostPort) == 0 {
|
||||||
conf.hostPort = defaultHP
|
conf.hostPort = defaultHP
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -123,7 +123,7 @@ func routeHandler() (http.Handler, error) {
|
|||||||
return mux, nil
|
return mux, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if conf.APIPath != "" {
|
if len(conf.APIPath) != 0 {
|
||||||
apiRoute = path.Join("/", conf.APIPath, "/v1/graphql")
|
apiRoute = path.Join("/", conf.APIPath, "/v1/graphql")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -134,7 +134,7 @@ database:
|
|||||||
type: postgres
|
type: postgres
|
||||||
host: db
|
host: db
|
||||||
port: 5432
|
port: 5432
|
||||||
dbname: {{ .AppNameSlug -}}_development
|
dbname: {{- .AppNameSlug -}}_development
|
||||||
user: postgres
|
user: postgres
|
||||||
password: postgres
|
password: postgres
|
||||||
|
|
||||||
|
@ -82,7 +82,7 @@ database:
|
|||||||
type: postgres
|
type: postgres
|
||||||
host: db
|
host: db
|
||||||
port: 5432
|
port: 5432
|
||||||
dbname: {{ .AppNameSlug -}}_production
|
dbname: {{- .AppNameSlug -}}_production
|
||||||
user: postgres
|
user: postgres
|
||||||
password: postgres
|
password: postgres
|
||||||
#pool_size: 10
|
#pool_size: 10
|
||||||
|
Loading…
x
Reference in New Issue
Block a user