Compare commits
7 Commits
Author | SHA1 | Date | |
---|---|---|---|
1fb7f0e6c8 | |||
2241364d00 | |||
f63e270c73 | |||
ccab367351 | |||
67ddc148a9 | |||
31afdac3af | |||
1344246287 |
1
.gitignore
vendored
1
.gitignore
vendored
@ -38,4 +38,5 @@ release
|
|||||||
.gofuzz
|
.gofuzz
|
||||||
*-fuzz.zip
|
*-fuzz.zip
|
||||||
*.test
|
*.test
|
||||||
|
.firebase
|
||||||
|
|
||||||
|
@ -29,9 +29,9 @@ COPY --from=react-build /web/build/ ./internal/serv/web/build
|
|||||||
|
|
||||||
RUN go mod vendor
|
RUN go mod vendor
|
||||||
RUN make build
|
RUN make build
|
||||||
RUN echo "Compressing binary, will take a bit of time..." && \
|
# RUN echo "Compressing binary, will take a bit of time..." && \
|
||||||
upx --ultra-brute -qq super-graph && \
|
# upx --ultra-brute -qq super-graph && \
|
||||||
upx -t super-graph
|
# upx -t super-graph
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@ -52,7 +52,9 @@ func main() {
|
|||||||
}
|
}
|
||||||
}`
|
}`
|
||||||
|
|
||||||
res, err := sg.GraphQL(context.Background(), query, nil)
|
ctx = context.WithValue(ctx, core.UserIDKey, 1)
|
||||||
|
|
||||||
|
res, err := sg.GraphQL(ctx, query, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
@ -90,7 +92,7 @@ This compiler is what sits at the heart of Super Graph, with layers of useful fu
|
|||||||
- Fuzz tested for security
|
- Fuzz tested for security
|
||||||
- Database migrations tool
|
- Database migrations tool
|
||||||
- Database seeding tool
|
- Database seeding tool
|
||||||
- Works with Postgres and YugabyteDB
|
- Works with Postgres and Yugabyte DB
|
||||||
- OpenCensus Support: Zipkin, Prometheus, X-Ray, Stackdriver
|
- OpenCensus Support: Zipkin, Prometheus, X-Ray, Stackdriver
|
||||||
|
|
||||||
## Documentation
|
## Documentation
|
||||||
|
@ -77,6 +77,8 @@ cors_debug: true
|
|||||||
# exporter: "zipkin"
|
# exporter: "zipkin"
|
||||||
# endpoint: "http://zipkin:9411/api/v2/spans"
|
# endpoint: "http://zipkin:9411/api/v2/spans"
|
||||||
# sample: 0.2
|
# sample: 0.2
|
||||||
|
# include_query: false
|
||||||
|
# include_params: false
|
||||||
|
|
||||||
auth:
|
auth:
|
||||||
# Can be 'rails' or 'jwt'
|
# Can be 'rails' or 'jwt'
|
||||||
|
@ -32,7 +32,9 @@
|
|||||||
}
|
}
|
||||||
}`
|
}`
|
||||||
|
|
||||||
res, err := sg.GraphQL(context.Background(), query, nil)
|
ctx = context.WithValue(ctx, core.UserIDKey, 1)
|
||||||
|
|
||||||
|
res, err := sg.GraphQL(ctx, query, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
|
129
core/args.go
129
core/args.go
@ -1,72 +1,18 @@
|
|||||||
package core
|
package core
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
|
||||||
|
|
||||||
|
"github.com/dosco/super-graph/core/internal/psql"
|
||||||
"github.com/dosco/super-graph/jsn"
|
"github.com/dosco/super-graph/jsn"
|
||||||
)
|
)
|
||||||
|
|
||||||
// argMap function is used to string replace variables with values by
|
|
||||||
// the fasttemplate code
|
|
||||||
func (c *scontext) argMap() func(w io.Writer, tag string) (int, error) {
|
|
||||||
return func(w io.Writer, tag string) (int, error) {
|
|
||||||
switch tag {
|
|
||||||
case "user_id_provider":
|
|
||||||
if v := c.Value(UserIDProviderKey); v != nil {
|
|
||||||
return io.WriteString(w, v.(string))
|
|
||||||
}
|
|
||||||
return 0, argErr("user_id_provider")
|
|
||||||
|
|
||||||
case "user_id":
|
|
||||||
if v := c.Value(UserIDKey); v != nil {
|
|
||||||
return io.WriteString(w, v.(string))
|
|
||||||
}
|
|
||||||
return 0, argErr("user_id")
|
|
||||||
|
|
||||||
case "user_role":
|
|
||||||
if v := c.Value(UserRoleKey); v != nil {
|
|
||||||
return io.WriteString(w, v.(string))
|
|
||||||
}
|
|
||||||
return 0, argErr("user_role")
|
|
||||||
}
|
|
||||||
|
|
||||||
fields := jsn.Get(c.vars, [][]byte{[]byte(tag)})
|
|
||||||
|
|
||||||
if len(fields) == 0 {
|
|
||||||
return 0, argErr(tag)
|
|
||||||
|
|
||||||
}
|
|
||||||
v := fields[0].Value
|
|
||||||
|
|
||||||
// Open and close quotes
|
|
||||||
if len(v) >= 2 && v[0] == '"' && v[len(v)-1] == '"' {
|
|
||||||
fields[0].Value = v[1 : len(v)-1]
|
|
||||||
}
|
|
||||||
|
|
||||||
if tag == "cursor" {
|
|
||||||
if bytes.EqualFold(v, []byte("null")) {
|
|
||||||
return io.WriteString(w, ``)
|
|
||||||
}
|
|
||||||
v1, err := c.sg.decrypt(string(fields[0].Value))
|
|
||||||
if err != nil {
|
|
||||||
return 0, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return w.Write(v1)
|
|
||||||
}
|
|
||||||
|
|
||||||
return w.Write(escSQuote(fields[0].Value))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// argList function is used to create a list of arguments to pass
|
// argList function is used to create a list of arguments to pass
|
||||||
// to a prepared statement. FYI no escaping of single quotes is
|
// to a prepared statement.
|
||||||
// needed here
|
|
||||||
func (c *scontext) argList(args [][]byte) ([]interface{}, error) {
|
func (c *scontext) argList(md psql.Metadata) ([]interface{}, error) {
|
||||||
vars := make([]interface{}, len(args))
|
vars := make([]interface{}, len(md.Params))
|
||||||
|
|
||||||
var fields map[string]json.RawMessage
|
var fields map[string]json.RawMessage
|
||||||
var err error
|
var err error
|
||||||
@ -79,31 +25,30 @@ func (c *scontext) argList(args [][]byte) ([]interface{}, error) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for i := range args {
|
for i, p := range md.Params {
|
||||||
av := args[i]
|
switch p.Name {
|
||||||
switch {
|
case "user_id":
|
||||||
case bytes.Equal(av, []byte("user_id")):
|
|
||||||
if v := c.Value(UserIDKey); v != nil {
|
if v := c.Value(UserIDKey); v != nil {
|
||||||
vars[i] = v.(string)
|
vars[i] = v.(string)
|
||||||
} else {
|
} else {
|
||||||
return nil, argErr("user_id")
|
return nil, argErr(p)
|
||||||
}
|
}
|
||||||
|
|
||||||
case bytes.Equal(av, []byte("user_id_provider")):
|
case "user_id_provider":
|
||||||
if v := c.Value(UserIDProviderKey); v != nil {
|
if v := c.Value(UserIDProviderKey); v != nil {
|
||||||
vars[i] = v.(string)
|
vars[i] = v.(string)
|
||||||
} else {
|
} else {
|
||||||
return nil, argErr("user_id_provider")
|
return nil, argErr(p)
|
||||||
}
|
}
|
||||||
|
|
||||||
case bytes.Equal(av, []byte("user_role")):
|
case "user_role":
|
||||||
if v := c.Value(UserRoleKey); v != nil {
|
if v := c.Value(UserRoleKey); v != nil {
|
||||||
vars[i] = v.(string)
|
vars[i] = v.(string)
|
||||||
} else {
|
} else {
|
||||||
return nil, argErr("user_role")
|
return nil, argErr(p)
|
||||||
}
|
}
|
||||||
|
|
||||||
case bytes.Equal(av, []byte("cursor")):
|
case "cursor":
|
||||||
if v, ok := fields["cursor"]; ok && v[0] == '"' {
|
if v, ok := fields["cursor"]; ok && v[0] == '"' {
|
||||||
v1, err := c.sg.decrypt(string(v[1 : len(v)-1]))
|
v1, err := c.sg.decrypt(string(v[1 : len(v)-1]))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -111,25 +56,33 @@ func (c *scontext) argList(args [][]byte) ([]interface{}, error) {
|
|||||||
}
|
}
|
||||||
vars[i] = v1
|
vars[i] = v1
|
||||||
} else {
|
} else {
|
||||||
return nil, argErr("cursor")
|
return nil, argErr(p)
|
||||||
}
|
}
|
||||||
|
|
||||||
default:
|
default:
|
||||||
if v, ok := fields[string(av)]; ok {
|
if v, ok := fields[p.Name]; ok {
|
||||||
|
switch {
|
||||||
|
case p.IsArray && v[0] != '[':
|
||||||
|
return nil, fmt.Errorf("variable '%s' should be an array of type '%s'", p.Name, p.Type)
|
||||||
|
|
||||||
|
case p.Type == "json" && v[0] != '[' && v[0] != '{':
|
||||||
|
return nil, fmt.Errorf("variable '%s' should be an array or object", p.Name)
|
||||||
|
}
|
||||||
|
|
||||||
switch v[0] {
|
switch v[0] {
|
||||||
case '[', '{':
|
case '[', '{':
|
||||||
vars[i] = v
|
vars[i] = v
|
||||||
|
|
||||||
default:
|
default:
|
||||||
var val interface{}
|
var val interface{}
|
||||||
if err := json.Unmarshal(v, &val); err != nil {
|
if err := json.Unmarshal(v, &val); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
vars[i] = val
|
vars[i] = val
|
||||||
}
|
}
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
return nil, argErr(string(av))
|
return nil, argErr(p)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -137,32 +90,6 @@ func (c *scontext) argList(args [][]byte) ([]interface{}, error) {
|
|||||||
return vars, nil
|
return vars, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
//
|
func argErr(p psql.Param) error {
|
||||||
func escSQuote(b []byte) []byte {
|
return fmt.Errorf("required variable '%s' of type '%s' must be set", p.Name, p.Type)
|
||||||
var buf *bytes.Buffer
|
|
||||||
s := 0
|
|
||||||
for i := range b {
|
|
||||||
if b[i] == '\'' {
|
|
||||||
if buf == nil {
|
|
||||||
buf = &bytes.Buffer{}
|
|
||||||
}
|
|
||||||
buf.Write(b[s:i])
|
|
||||||
buf.WriteString(`''`)
|
|
||||||
s = i + 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if buf == nil {
|
|
||||||
return b
|
|
||||||
}
|
|
||||||
|
|
||||||
l := len(b)
|
|
||||||
if s < (l - 1) {
|
|
||||||
buf.Write(b[s:l])
|
|
||||||
}
|
|
||||||
return buf.Bytes()
|
|
||||||
}
|
|
||||||
|
|
||||||
func argErr(name string) error {
|
|
||||||
return fmt.Errorf("query requires variable '%s' to be set", name)
|
|
||||||
}
|
}
|
||||||
|
@ -1,13 +0,0 @@
|
|||||||
package core
|
|
||||||
|
|
||||||
import "testing"
|
|
||||||
|
|
||||||
func TestEscQuote(t *testing.T) {
|
|
||||||
val := "That's the worst, don''t be calling me's again"
|
|
||||||
exp := "That''s the worst, don''''t be calling me''s again"
|
|
||||||
ret := escSQuote([]byte(val))
|
|
||||||
|
|
||||||
if exp != string(ret) {
|
|
||||||
t.Errorf("escSQuote failed: %s", string(ret))
|
|
||||||
}
|
|
||||||
}
|
|
@ -12,10 +12,10 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type stmt struct {
|
type stmt struct {
|
||||||
role *Role
|
role *Role
|
||||||
qc *qcode.QCode
|
qc *qcode.QCode
|
||||||
skipped uint32
|
md psql.Metadata
|
||||||
sql string
|
sql string
|
||||||
}
|
}
|
||||||
|
|
||||||
func (sg *SuperGraph) buildStmt(qt qcode.QType, query, vars []byte, role string) ([]stmt, error) {
|
func (sg *SuperGraph) buildStmt(qt qcode.QType, query, vars []byte, role string) ([]stmt, error) {
|
||||||
@ -62,12 +62,11 @@ func (sg *SuperGraph) buildRoleStmt(query, vars []byte, role string) ([]stmt, er
|
|||||||
stmts := []stmt{stmt{role: ro, qc: qc}}
|
stmts := []stmt{stmt{role: ro, qc: qc}}
|
||||||
w := &bytes.Buffer{}
|
w := &bytes.Buffer{}
|
||||||
|
|
||||||
skipped, err := sg.pc.Compile(qc, w, psql.Variables(vm))
|
stmts[0].md, err = sg.pc.Compile(w, qc, psql.Variables(vm))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
stmts[0].skipped = skipped
|
|
||||||
stmts[0].sql = w.String()
|
stmts[0].sql = w.String()
|
||||||
|
|
||||||
return stmts, nil
|
return stmts, nil
|
||||||
@ -104,14 +103,13 @@ func (sg *SuperGraph) buildMultiStmt(query, vars []byte) ([]stmt, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
stmts = append(stmts, stmt{role: role, qc: qc})
|
stmts = append(stmts, stmt{role: role, qc: qc})
|
||||||
|
s := &stmts[len(stmts)-1]
|
||||||
|
|
||||||
skipped, err := sg.pc.Compile(qc, w, psql.Variables(vm))
|
s.md, err = sg.pc.Compile(w, qc, psql.Variables(vm))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
s := &stmts[len(stmts)-1]
|
|
||||||
s.skipped = skipped
|
|
||||||
s.sql = w.String()
|
s.sql = w.String()
|
||||||
w.Reset()
|
w.Reset()
|
||||||
}
|
}
|
||||||
|
@ -30,12 +30,10 @@ type Config struct {
|
|||||||
// or other database functions
|
// or other database functions
|
||||||
SetUserID bool `mapstructure:"set_user_id"`
|
SetUserID bool `mapstructure:"set_user_id"`
|
||||||
|
|
||||||
// DefaultAllow reverses the blocked by default behaviour for queries in
|
// DefaultBlock ensures that in anonymous mode (role 'anon') all tables
|
||||||
// anonymous mode. (anon role)
|
// are blocked from queries and mutations. To open access to tables in
|
||||||
// For example if the table `users` is not listed under the anon role then
|
// anonymous mode they have to be added to the 'anon' role config.
|
||||||
// access to it would by default for unauthenticated queries this reverses
|
DefaultBlock bool `mapstructure:"default_block"`
|
||||||
// this behavior (!!! Use with caution !!!!)
|
|
||||||
DefaultAllow bool `mapstructure:"default_allow"`
|
|
||||||
|
|
||||||
// Vars is a map of hardcoded variables that can be leveraged in your
|
// Vars is a map of hardcoded variables that can be leveraged in your
|
||||||
// queries (eg variable admin_id will be $admin_id in the query)
|
// queries (eg variable admin_id will be $admin_id in the query)
|
||||||
@ -57,6 +55,9 @@ type Config struct {
|
|||||||
// Roles contains all the configuration for all the roles you want to support
|
// Roles contains all the configuration for all the roles you want to support
|
||||||
// `user` and `anon` are two default roles. User role is for when a user ID is
|
// `user` and `anon` are two default roles. User role is for when a user ID is
|
||||||
// available and Anon when it's not.
|
// available and Anon when it's not.
|
||||||
|
//
|
||||||
|
// If you're using the RolesQuery config to enable atribute based acess control then
|
||||||
|
// you can add more custom roles.
|
||||||
Roles []Role
|
Roles []Role
|
||||||
|
|
||||||
// Inflections is to add additionally singular to plural mappings
|
// Inflections is to add additionally singular to plural mappings
|
||||||
@ -108,12 +109,12 @@ type Role struct {
|
|||||||
// RoleTable struct contains role specific access control values for a database table
|
// RoleTable struct contains role specific access control values for a database table
|
||||||
type RoleTable struct {
|
type RoleTable struct {
|
||||||
Name string
|
Name string
|
||||||
ReadOnly *bool `mapstructure:"read_only"`
|
ReadOnly bool `mapstructure:"read_only"`
|
||||||
|
|
||||||
Query Query
|
Query *Query
|
||||||
Insert Insert
|
Insert *Insert
|
||||||
Update Update
|
Update *Update
|
||||||
Delete Delete
|
Delete *Delete
|
||||||
}
|
}
|
||||||
|
|
||||||
// Query struct contains access control values for query operations
|
// Query struct contains access control values for query operations
|
||||||
@ -122,7 +123,7 @@ type Query struct {
|
|||||||
Filters []string
|
Filters []string
|
||||||
Columns []string
|
Columns []string
|
||||||
DisableFunctions bool `mapstructure:"disable_functions"`
|
DisableFunctions bool `mapstructure:"disable_functions"`
|
||||||
Block *bool
|
Block bool
|
||||||
}
|
}
|
||||||
|
|
||||||
// Insert struct contains access control values for insert operations
|
// Insert struct contains access control values for insert operations
|
||||||
@ -130,7 +131,7 @@ type Insert struct {
|
|||||||
Filters []string
|
Filters []string
|
||||||
Columns []string
|
Columns []string
|
||||||
Presets map[string]string
|
Presets map[string]string
|
||||||
Block *bool
|
Block bool
|
||||||
}
|
}
|
||||||
|
|
||||||
// Insert struct contains access control values for update operations
|
// Insert struct contains access control values for update operations
|
||||||
@ -138,14 +139,59 @@ type Update struct {
|
|||||||
Filters []string
|
Filters []string
|
||||||
Columns []string
|
Columns []string
|
||||||
Presets map[string]string
|
Presets map[string]string
|
||||||
Block *bool
|
Block bool
|
||||||
}
|
}
|
||||||
|
|
||||||
// Delete struct contains access control values for delete operations
|
// Delete struct contains access control values for delete operations
|
||||||
type Delete struct {
|
type Delete struct {
|
||||||
Filters []string
|
Filters []string
|
||||||
Columns []string
|
Columns []string
|
||||||
Block *bool
|
Block bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddRoleTable function is a helper function to make it easy to add per-table
|
||||||
|
// row-level config
|
||||||
|
func (c *Config) AddRoleTable(role string, table string, conf interface{}) error {
|
||||||
|
var r *Role
|
||||||
|
|
||||||
|
for i := range c.Roles {
|
||||||
|
if strings.EqualFold(c.Roles[i].Name, role) {
|
||||||
|
r = &c.Roles[i]
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if r == nil {
|
||||||
|
nr := Role{Name: role}
|
||||||
|
c.Roles = append(c.Roles, nr)
|
||||||
|
r = &nr
|
||||||
|
}
|
||||||
|
|
||||||
|
var t *RoleTable
|
||||||
|
for i := range r.Tables {
|
||||||
|
if strings.EqualFold(r.Tables[i].Name, table) {
|
||||||
|
t = &r.Tables[i]
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if t == nil {
|
||||||
|
nt := RoleTable{Name: table}
|
||||||
|
r.Tables = append(r.Tables, nt)
|
||||||
|
t = &nt
|
||||||
|
}
|
||||||
|
|
||||||
|
switch v := conf.(type) {
|
||||||
|
case Query:
|
||||||
|
t.Query = &v
|
||||||
|
case Insert:
|
||||||
|
t.Insert = &v
|
||||||
|
case Update:
|
||||||
|
t.Update = &v
|
||||||
|
case Delete:
|
||||||
|
t.Delete = &v
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("unsupported object type: %t", v)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// ReadInConfig function reads in the config file for the environment specified in the GO_ENV
|
// ReadInConfig function reads in the config file for the environment specified in the GO_ENV
|
||||||
|
@ -5,11 +5,6 @@ import (
|
|||||||
"errors"
|
"errors"
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
|
||||||
openVar = "{{"
|
|
||||||
closeVar = "}}"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
var (
|
||||||
errNotFound = errors.New("not found in prepared statements")
|
errNotFound = errors.New("not found in prepared statements")
|
||||||
)
|
)
|
||||||
|
34
core/core.go
34
core/core.go
@ -1,7 +1,6 @@
|
|||||||
package core
|
package core
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
|
||||||
"context"
|
"context"
|
||||||
"database/sql"
|
"database/sql"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
@ -10,8 +9,6 @@ import (
|
|||||||
|
|
||||||
"github.com/dosco/super-graph/core/internal/psql"
|
"github.com/dosco/super-graph/core/internal/psql"
|
||||||
"github.com/dosco/super-graph/core/internal/qcode"
|
"github.com/dosco/super-graph/core/internal/qcode"
|
||||||
|
|
||||||
"github.com/valyala/fasttemplate"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type OpType int
|
type OpType int
|
||||||
@ -93,7 +90,8 @@ func (sg *SuperGraph) initCompilers() error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
sg.qc, err = qcode.NewCompiler(qcode.Config{
|
sg.qc, err = qcode.NewCompiler(qcode.Config{
|
||||||
Blocklist: sg.conf.Blocklist,
|
DefaultBlock: sg.conf.DefaultBlock,
|
||||||
|
Blocklist: sg.conf.Blocklist,
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@ -126,7 +124,7 @@ func (c *scontext) execQuery() ([]byte, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(data) == 0 || st.skipped == 0 {
|
if len(data) == 0 || st.md.Skipped == 0 {
|
||||||
return data, nil
|
return data, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -181,7 +179,7 @@ func (c *scontext) resolvePreparedSQL() ([]byte, *stmt, error) {
|
|||||||
var root []byte
|
var root []byte
|
||||||
var row *sql.Row
|
var row *sql.Row
|
||||||
|
|
||||||
varsList, err := c.argList(ps.args)
|
varsList, err := c.argList(ps.st.md)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
@ -252,15 +250,23 @@ func (c *scontext) resolveSQL() ([]byte, *stmt, error) {
|
|||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
st := &stmts[0]
|
st := &stmts[0]
|
||||||
|
c.res.sql = st.sql
|
||||||
|
|
||||||
t := fasttemplate.New(st.sql, openVar, closeVar)
|
varList, err := c.argList(st.md)
|
||||||
buf := &bytes.Buffer{}
|
|
||||||
|
|
||||||
_, err = t.ExecuteFunc(buf, c.argMap())
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
finalSQL := buf.String()
|
// finalSQL := buf.String()
|
||||||
|
|
||||||
|
////
|
||||||
|
|
||||||
|
// _, err = t.ExecuteFunc(buf, c.argMap(st.md))
|
||||||
|
// if err != nil {
|
||||||
|
// return nil, nil, err
|
||||||
|
// }
|
||||||
|
// finalSQL := buf.String()
|
||||||
|
|
||||||
|
/////
|
||||||
|
|
||||||
// var stime time.Time
|
// var stime time.Time
|
||||||
|
|
||||||
@ -275,9 +281,9 @@ func (c *scontext) resolveSQL() ([]byte, *stmt, error) {
|
|||||||
// defaultRole := c.role
|
// defaultRole := c.role
|
||||||
|
|
||||||
if useTx {
|
if useTx {
|
||||||
row = tx.QueryRow(finalSQL)
|
row = tx.QueryRowContext(c, st.sql, varList...)
|
||||||
} else {
|
} else {
|
||||||
row = c.sg.db.QueryRow(finalSQL)
|
row = c.sg.db.QueryRowContext(c, st.sql, varList...)
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(stmts) > 1 {
|
if len(stmts) > 1 {
|
||||||
@ -286,8 +292,6 @@ func (c *scontext) resolveSQL() ([]byte, *stmt, error) {
|
|||||||
err = row.Scan(&root)
|
err = row.Scan(&root)
|
||||||
}
|
}
|
||||||
|
|
||||||
c.res.sql = finalSQL
|
|
||||||
|
|
||||||
if len(role) == 0 {
|
if len(role) == 0 {
|
||||||
c.res.role = c.role
|
c.res.role = c.role
|
||||||
} else {
|
} else {
|
||||||
|
121
core/init.go
121
core/init.go
@ -2,9 +2,7 @@ package core
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"regexp"
|
|
||||||
"strings"
|
"strings"
|
||||||
"unicode"
|
|
||||||
|
|
||||||
"github.com/dosco/super-graph/core/internal/psql"
|
"github.com/dosco/super-graph/core/internal/psql"
|
||||||
"github.com/dosco/super-graph/core/internal/qcode"
|
"github.com/dosco/super-graph/core/internal/qcode"
|
||||||
@ -18,11 +16,6 @@ func (sg *SuperGraph) initConfig() error {
|
|||||||
flect.AddPlural(k, v)
|
flect.AddPlural(k, v)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Variables: Validate and sanitize
|
|
||||||
for k, v := range c.Vars {
|
|
||||||
c.Vars[k] = sanitizeVars(v)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Tables: Validate and sanitize
|
// Tables: Validate and sanitize
|
||||||
tm := make(map[string]struct{})
|
tm := make(map[string]struct{})
|
||||||
|
|
||||||
@ -80,9 +73,6 @@ func (sg *SuperGraph) initConfig() error {
|
|||||||
sg.roles["anon"] = &ur
|
sg.roles["anon"] = &ur
|
||||||
}
|
}
|
||||||
|
|
||||||
// Roles: validate and sanitize
|
|
||||||
c.RolesQuery = sanitizeVars(c.RolesQuery)
|
|
||||||
|
|
||||||
if c.RolesQuery == "" {
|
if c.RolesQuery == "" {
|
||||||
sg.log.Printf("WRN roles_query not defined: attribute based access control disabled")
|
sg.log.Printf("WRN roles_query not defined: attribute based access control disabled")
|
||||||
}
|
}
|
||||||
@ -206,7 +196,7 @@ func addForeignKey(di *psql.DBInfo, c Column, t Table) error {
|
|||||||
func addRoles(c *Config, qc *qcode.Compiler) error {
|
func addRoles(c *Config, qc *qcode.Compiler) error {
|
||||||
for _, r := range c.Roles {
|
for _, r := range c.Roles {
|
||||||
for _, t := range r.Tables {
|
for _, t := range r.Tables {
|
||||||
if err := addRole(qc, r, t, c.DefaultAllow); err != nil {
|
if err := addRole(qc, r, t, c.DefaultBlock); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -215,67 +205,56 @@ func addRoles(c *Config, qc *qcode.Compiler) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func addRole(qc *qcode.Compiler, r Role, t RoleTable, defaultAllow bool) error {
|
func addRole(qc *qcode.Compiler, r Role, t RoleTable, defaultBlock bool) error {
|
||||||
ro := true // read-only
|
ro := false // read-only
|
||||||
|
|
||||||
if defaultAllow {
|
if defaultBlock && r.Name == "anon" {
|
||||||
ro = false
|
ro = true
|
||||||
}
|
}
|
||||||
|
|
||||||
if r.Name != "anon" {
|
if t.ReadOnly {
|
||||||
ro = false
|
ro = true
|
||||||
}
|
}
|
||||||
|
|
||||||
if t.ReadOnly != nil {
|
query := qcode.QueryConfig{Block: false}
|
||||||
ro = *t.ReadOnly
|
insert := qcode.InsertConfig{Block: ro}
|
||||||
|
update := qcode.UpdateConfig{Block: ro}
|
||||||
|
del := qcode.DeleteConfig{Block: ro}
|
||||||
|
|
||||||
|
if t.Query != nil {
|
||||||
|
query = qcode.QueryConfig{
|
||||||
|
Limit: t.Query.Limit,
|
||||||
|
Filters: t.Query.Filters,
|
||||||
|
Columns: t.Query.Columns,
|
||||||
|
DisableFunctions: t.Query.DisableFunctions,
|
||||||
|
Block: t.Query.Block,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
blocked := struct {
|
if t.Insert != nil {
|
||||||
query bool
|
insert = qcode.InsertConfig{
|
||||||
insert bool
|
Filters: t.Insert.Filters,
|
||||||
update bool
|
Columns: t.Insert.Columns,
|
||||||
delete bool
|
Presets: t.Insert.Presets,
|
||||||
}{false, ro, ro, ro}
|
Block: t.Insert.Block,
|
||||||
|
}
|
||||||
if t.Query.Block != nil {
|
|
||||||
blocked.query = *t.Query.Block
|
|
||||||
}
|
|
||||||
if t.Insert.Block != nil {
|
|
||||||
blocked.insert = *t.Insert.Block
|
|
||||||
}
|
|
||||||
if t.Update.Block != nil {
|
|
||||||
blocked.update = *t.Update.Block
|
|
||||||
}
|
|
||||||
if t.Delete.Block != nil {
|
|
||||||
blocked.delete = *t.Delete.Block
|
|
||||||
}
|
}
|
||||||
|
|
||||||
query := qcode.QueryConfig{
|
if t.Update != nil {
|
||||||
Limit: t.Query.Limit,
|
update = qcode.UpdateConfig{
|
||||||
Filters: t.Query.Filters,
|
Filters: t.Update.Filters,
|
||||||
Columns: t.Query.Columns,
|
Columns: t.Update.Columns,
|
||||||
DisableFunctions: t.Query.DisableFunctions,
|
Presets: t.Update.Presets,
|
||||||
Block: blocked.query,
|
Block: t.Update.Block,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
insert := qcode.InsertConfig{
|
if t.Delete != nil {
|
||||||
Filters: t.Insert.Filters,
|
del = qcode.DeleteConfig{
|
||||||
Columns: t.Insert.Columns,
|
Filters: t.Delete.Filters,
|
||||||
Presets: t.Insert.Presets,
|
Columns: t.Delete.Columns,
|
||||||
Block: blocked.insert,
|
Block: t.Delete.Block,
|
||||||
}
|
}
|
||||||
|
|
||||||
update := qcode.UpdateConfig{
|
|
||||||
Filters: t.Update.Filters,
|
|
||||||
Columns: t.Update.Columns,
|
|
||||||
Presets: t.Update.Presets,
|
|
||||||
Block: blocked.update,
|
|
||||||
}
|
|
||||||
|
|
||||||
del := qcode.DeleteConfig{
|
|
||||||
Filters: t.Delete.Filters,
|
|
||||||
Columns: t.Delete.Columns,
|
|
||||||
Block: blocked.delete,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return qc.AddRole(r.Name, t.Name, qcode.TRConfig{
|
return qc.AddRole(r.Name, t.Name, qcode.TRConfig{
|
||||||
@ -293,23 +272,3 @@ func (r *Role) GetTable(name string) *RoleTable {
|
|||||||
func sanitize(value string) string {
|
func sanitize(value string) string {
|
||||||
return strings.ToLower(strings.TrimSpace(value))
|
return strings.ToLower(strings.TrimSpace(value))
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
|
||||||
varRe1 = regexp.MustCompile(`(?mi)\$([a-zA-Z0-9_.]+)`)
|
|
||||||
varRe2 = regexp.MustCompile(`\{\{([a-zA-Z0-9_.]+)\}\}`)
|
|
||||||
)
|
|
||||||
|
|
||||||
func sanitizeVars(s string) string {
|
|
||||||
s0 := varRe1.ReplaceAllString(s, `{{$1}}`)
|
|
||||||
|
|
||||||
s1 := strings.Map(func(r rune) rune {
|
|
||||||
if unicode.IsSpace(r) {
|
|
||||||
return ' '
|
|
||||||
}
|
|
||||||
return r
|
|
||||||
}, s0)
|
|
||||||
|
|
||||||
return varRe2.ReplaceAllStringFunc(s1, func(m string) string {
|
|
||||||
return strings.ToLower(m)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
@ -239,8 +239,6 @@ func (al *List) save(item Item) error {
|
|||||||
qd := &schema.QueryDocument{}
|
qd := &schema.QueryDocument{}
|
||||||
|
|
||||||
if err := qd.Parse(item.Query); err != nil {
|
if err := qd.Parse(item.Query); err != nil {
|
||||||
fmt.Println("##", item.Query)
|
|
||||||
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -248,8 +246,6 @@ func (al *List) save(item Item) error {
|
|||||||
query := buf.String()
|
query := buf.String()
|
||||||
buf.Reset()
|
buf.Reset()
|
||||||
|
|
||||||
// fmt.Println(">", query)
|
|
||||||
|
|
||||||
item.Name = QueryName(query)
|
item.Name = QueryName(query)
|
||||||
item.key = strings.ToLower(item.Name)
|
item.key = strings.ToLower(item.Name)
|
||||||
|
|
||||||
|
@ -55,19 +55,6 @@ func TestSuperGraph(t *testing.T, db *sql.DB, before func(t *testing.T)) {
|
|||||||
config.AllowListFile = "./allow.list"
|
config.AllowListFile = "./allow.list"
|
||||||
config.RolesQuery = `SELECT * FROM users WHERE id = $user_id`
|
config.RolesQuery = `SELECT * FROM users WHERE id = $user_id`
|
||||||
|
|
||||||
blockFalse := false
|
|
||||||
|
|
||||||
config.Roles = []core.Role{
|
|
||||||
core.Role{
|
|
||||||
Name: "anon",
|
|
||||||
Tables: []core.RoleTable{
|
|
||||||
core.RoleTable{Name: "users", ReadOnly: &blockFalse, Query: core.Query{Limit: 100}},
|
|
||||||
core.RoleTable{Name: "product", ReadOnly: &blockFalse, Query: core.Query{Limit: 100}},
|
|
||||||
core.RoleTable{Name: "line_item", ReadOnly: &blockFalse, Query: core.Query{Limit: 100}},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
sg, err := core.NewSuperGraph(&config, db)
|
sg, err := core.NewSuperGraph(&config, db)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
|
@ -12,8 +12,7 @@ import (
|
|||||||
func (c *compilerContext) renderBaseColumns(
|
func (c *compilerContext) renderBaseColumns(
|
||||||
sel *qcode.Select,
|
sel *qcode.Select,
|
||||||
ti *DBTableInfo,
|
ti *DBTableInfo,
|
||||||
childCols []*qcode.Column,
|
childCols []*qcode.Column) ([]int, bool, error) {
|
||||||
skipped uint32) ([]int, bool, error) {
|
|
||||||
|
|
||||||
var realColsRendered []int
|
var realColsRendered []int
|
||||||
|
|
||||||
@ -116,12 +115,12 @@ func (c *compilerContext) renderColumnSearchRank(sel *qcode.Select, ti *DBTableI
|
|||||||
io.WriteString(c.w, `ts_rank(`)
|
io.WriteString(c.w, `ts_rank(`)
|
||||||
colWithTable(c.w, ti.Name, cn)
|
colWithTable(c.w, ti.Name, cn)
|
||||||
if c.schema.ver >= 110000 {
|
if c.schema.ver >= 110000 {
|
||||||
io.WriteString(c.w, `, websearch_to_tsquery('{{`)
|
io.WriteString(c.w, `, websearch_to_tsquery(`)
|
||||||
} else {
|
} else {
|
||||||
io.WriteString(c.w, `, to_tsquery('{{`)
|
io.WriteString(c.w, `, to_tsquery(`)
|
||||||
}
|
}
|
||||||
io.WriteString(c.w, arg.Val)
|
c.renderValueExp(Param{Name: arg.Val, Type: "string"})
|
||||||
io.WriteString(c.w, `}}'))`)
|
io.WriteString(c.w, `))`)
|
||||||
alias(c.w, col.Name)
|
alias(c.w, col.Name)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
@ -141,12 +140,12 @@ func (c *compilerContext) renderColumnSearchHeadline(sel *qcode.Select, ti *DBTa
|
|||||||
io.WriteString(c.w, `ts_headline(`)
|
io.WriteString(c.w, `ts_headline(`)
|
||||||
colWithTable(c.w, ti.Name, cn)
|
colWithTable(c.w, ti.Name, cn)
|
||||||
if c.schema.ver >= 110000 {
|
if c.schema.ver >= 110000 {
|
||||||
io.WriteString(c.w, `, websearch_to_tsquery('{{`)
|
io.WriteString(c.w, `, websearch_to_tsquery(`)
|
||||||
} else {
|
} else {
|
||||||
io.WriteString(c.w, `, to_tsquery('{{`)
|
io.WriteString(c.w, `, to_tsquery(`)
|
||||||
}
|
}
|
||||||
io.WriteString(c.w, arg.Val)
|
c.renderValueExp(Param{Name: arg.Val, Type: "string"})
|
||||||
io.WriteString(c.w, `}}'))`)
|
io.WriteString(c.w, `))`)
|
||||||
alias(c.w, col.Name)
|
alias(c.w, col.Name)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
@ -4,6 +4,7 @@ package psql
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
|
||||||
"github.com/dosco/super-graph/core/internal/qcode"
|
"github.com/dosco/super-graph/core/internal/qcode"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -12,9 +13,9 @@ var (
|
|||||||
|
|
||||||
schema = GetTestSchema()
|
schema = GetTestSchema()
|
||||||
|
|
||||||
vars = NewVariables(map[string]string{
|
vars = map[string]string{
|
||||||
"admin_account_id": "5",
|
"admin_account_id": "5",
|
||||||
})
|
}
|
||||||
|
|
||||||
pcompileTest = NewCompiler(Config{
|
pcompileTest = NewCompiler(Config{
|
||||||
Schema: schema,
|
Schema: schema,
|
||||||
|
@ -10,8 +10,8 @@ import (
|
|||||||
"github.com/dosco/super-graph/core/internal/util"
|
"github.com/dosco/super-graph/core/internal/util"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (c *compilerContext) renderInsert(qc *qcode.QCode, w io.Writer,
|
func (c *compilerContext) renderInsert(
|
||||||
vars Variables, ti *DBTableInfo) (uint32, error) {
|
w io.Writer, qc *qcode.QCode, vars Variables, ti *DBTableInfo) (uint32, error) {
|
||||||
|
|
||||||
insert, ok := vars[qc.ActionVar]
|
insert, ok := vars[qc.ActionVar]
|
||||||
if !ok {
|
if !ok {
|
||||||
@ -25,9 +25,8 @@ func (c *compilerContext) renderInsert(qc *qcode.QCode, w io.Writer,
|
|||||||
if insert[0] == '[' {
|
if insert[0] == '[' {
|
||||||
io.WriteString(c.w, `json_array_elements(`)
|
io.WriteString(c.w, `json_array_elements(`)
|
||||||
}
|
}
|
||||||
io.WriteString(c.w, `'{{`)
|
c.renderValueExp(Param{Name: qc.ActionVar, Type: "json"})
|
||||||
io.WriteString(c.w, qc.ActionVar)
|
io.WriteString(c.w, ` :: json`)
|
||||||
io.WriteString(c.w, `}}' :: json`)
|
|
||||||
if insert[0] == '[' {
|
if insert[0] == '[' {
|
||||||
io.WriteString(c.w, `)`)
|
io.WriteString(c.w, `)`)
|
||||||
}
|
}
|
||||||
@ -90,12 +89,12 @@ func (c *compilerContext) renderInsertStmt(qc *qcode.QCode, w io.Writer, item re
|
|||||||
io.WriteString(w, `INSERT INTO `)
|
io.WriteString(w, `INSERT INTO `)
|
||||||
quoted(w, ti.Name)
|
quoted(w, ti.Name)
|
||||||
io.WriteString(w, ` (`)
|
io.WriteString(w, ` (`)
|
||||||
renderInsertUpdateColumns(w, qc, jt, ti, sk, false)
|
c.renderInsertUpdateColumns(qc, jt, ti, sk, false)
|
||||||
renderNestedInsertRelColumns(w, item.kvitem, false)
|
renderNestedInsertRelColumns(w, item.kvitem, false)
|
||||||
io.WriteString(w, `)`)
|
io.WriteString(w, `)`)
|
||||||
|
|
||||||
io.WriteString(w, ` SELECT `)
|
io.WriteString(w, ` SELECT `)
|
||||||
renderInsertUpdateColumns(w, qc, jt, ti, sk, true)
|
c.renderInsertUpdateColumns(qc, jt, ti, sk, true)
|
||||||
renderNestedInsertRelColumns(w, item.kvitem, true)
|
renderNestedInsertRelColumns(w, item.kvitem, true)
|
||||||
|
|
||||||
io.WriteString(w, ` FROM "_sg_input" i`)
|
io.WriteString(w, ` FROM "_sg_input" i`)
|
||||||
|
@ -6,6 +6,7 @@ import (
|
|||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
|
"strings"
|
||||||
|
|
||||||
"github.com/dosco/super-graph/core/internal/qcode"
|
"github.com/dosco/super-graph/core/internal/qcode"
|
||||||
"github.com/dosco/super-graph/core/internal/util"
|
"github.com/dosco/super-graph/core/internal/util"
|
||||||
@ -33,42 +34,44 @@ var updateTypes = map[string]itemType{
|
|||||||
|
|
||||||
var noLimit = qcode.Paging{NoLimit: true}
|
var noLimit = qcode.Paging{NoLimit: true}
|
||||||
|
|
||||||
func (co *Compiler) compileMutation(qc *qcode.QCode, w io.Writer, vars Variables) (uint32, error) {
|
func (co *Compiler) compileMutation(w io.Writer, qc *qcode.QCode, vars Variables) (Metadata, error) {
|
||||||
|
md := Metadata{}
|
||||||
|
|
||||||
if len(qc.Selects) == 0 {
|
if len(qc.Selects) == 0 {
|
||||||
return 0, errors.New("empty query")
|
return md, errors.New("empty query")
|
||||||
}
|
}
|
||||||
|
|
||||||
c := &compilerContext{w, qc.Selects, co}
|
c := &compilerContext{md, w, qc.Selects, co}
|
||||||
root := &qc.Selects[0]
|
root := &qc.Selects[0]
|
||||||
|
|
||||||
ti, err := c.schema.GetTable(root.Name)
|
ti, err := c.schema.GetTable(root.Name)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, err
|
return c.md, err
|
||||||
}
|
}
|
||||||
|
|
||||||
switch qc.Type {
|
switch qc.Type {
|
||||||
case qcode.QTInsert:
|
case qcode.QTInsert:
|
||||||
if _, err := c.renderInsert(qc, w, vars, ti); err != nil {
|
if _, err := c.renderInsert(w, qc, vars, ti); err != nil {
|
||||||
return 0, err
|
return c.md, err
|
||||||
}
|
}
|
||||||
|
|
||||||
case qcode.QTUpdate:
|
case qcode.QTUpdate:
|
||||||
if _, err := c.renderUpdate(qc, w, vars, ti); err != nil {
|
if _, err := c.renderUpdate(w, qc, vars, ti); err != nil {
|
||||||
return 0, err
|
return c.md, err
|
||||||
}
|
}
|
||||||
|
|
||||||
case qcode.QTUpsert:
|
case qcode.QTUpsert:
|
||||||
if _, err := c.renderUpsert(qc, w, vars, ti); err != nil {
|
if _, err := c.renderUpsert(w, qc, vars, ti); err != nil {
|
||||||
return 0, err
|
return c.md, err
|
||||||
}
|
}
|
||||||
|
|
||||||
case qcode.QTDelete:
|
case qcode.QTDelete:
|
||||||
if _, err := c.renderDelete(qc, w, vars, ti); err != nil {
|
if _, err := c.renderDelete(w, qc, vars, ti); err != nil {
|
||||||
return 0, err
|
return c.md, err
|
||||||
}
|
}
|
||||||
|
|
||||||
default:
|
default:
|
||||||
return 0, errors.New("valid mutations are 'insert', 'update', 'upsert' and 'delete'")
|
return c.md, errors.New("valid mutations are 'insert', 'update', 'upsert' and 'delete'")
|
||||||
}
|
}
|
||||||
|
|
||||||
root.Paging = noLimit
|
root.Paging = noLimit
|
||||||
@ -77,7 +80,7 @@ func (co *Compiler) compileMutation(qc *qcode.QCode, w io.Writer, vars Variables
|
|||||||
root.Where = nil
|
root.Where = nil
|
||||||
root.Args = nil
|
root.Args = nil
|
||||||
|
|
||||||
return c.compileQuery(qc, w, vars)
|
return co.compileQueryWithMetadata(w, qc, vars, c.md)
|
||||||
}
|
}
|
||||||
|
|
||||||
type kvitem struct {
|
type kvitem struct {
|
||||||
@ -365,12 +368,12 @@ func (c *compilerContext) renderUnionStmt(w io.Writer, item renitem) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func renderInsertUpdateColumns(w io.Writer,
|
func (c *compilerContext) renderInsertUpdateColumns(
|
||||||
qc *qcode.QCode,
|
qc *qcode.QCode,
|
||||||
jt map[string]json.RawMessage,
|
jt map[string]json.RawMessage,
|
||||||
ti *DBTableInfo,
|
ti *DBTableInfo,
|
||||||
skipcols map[string]struct{},
|
skipcols map[string]struct{},
|
||||||
values bool) (uint32, error) {
|
isValues bool) (uint32, error) {
|
||||||
|
|
||||||
root := &qc.Selects[0]
|
root := &qc.Selects[0]
|
||||||
renderedCol := false
|
renderedCol := false
|
||||||
@ -392,18 +395,18 @@ func renderInsertUpdateColumns(w io.Writer,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if n != 0 {
|
if n != 0 {
|
||||||
io.WriteString(w, `, `)
|
io.WriteString(c.w, `, `)
|
||||||
}
|
}
|
||||||
|
|
||||||
if values {
|
if isValues {
|
||||||
io.WriteString(w, `CAST( i.j ->>`)
|
io.WriteString(c.w, `CAST( i.j ->>`)
|
||||||
io.WriteString(w, `'`)
|
io.WriteString(c.w, `'`)
|
||||||
io.WriteString(w, cn.Name)
|
io.WriteString(c.w, cn.Name)
|
||||||
io.WriteString(w, `' AS `)
|
io.WriteString(c.w, `' AS `)
|
||||||
io.WriteString(w, cn.Type)
|
io.WriteString(c.w, cn.Type)
|
||||||
io.WriteString(w, `)`)
|
io.WriteString(c.w, `)`)
|
||||||
} else {
|
} else {
|
||||||
quoted(w, cn.Name)
|
quoted(c.w, cn.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
if !renderedCol {
|
if !renderedCol {
|
||||||
@ -422,16 +425,28 @@ func renderInsertUpdateColumns(w io.Writer,
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if i != 0 || n != 0 {
|
if i != 0 || n != 0 {
|
||||||
io.WriteString(w, `, `)
|
io.WriteString(c.w, `, `)
|
||||||
}
|
}
|
||||||
|
|
||||||
if values {
|
if isValues {
|
||||||
io.WriteString(w, `'`)
|
val := root.PresetMap[cn]
|
||||||
io.WriteString(w, root.PresetMap[cn])
|
switch {
|
||||||
io.WriteString(w, `' :: `)
|
case ok && len(val) > 1 && val[0] == '$':
|
||||||
io.WriteString(w, col.Type)
|
c.renderValueExp(Param{Name: val[1:], Type: col.Type})
|
||||||
|
|
||||||
|
case ok && strings.HasPrefix(val, "sql:"):
|
||||||
|
io.WriteString(c.w, `(`)
|
||||||
|
c.renderVar(val[4:], c.renderValueExp)
|
||||||
|
io.WriteString(c.w, `)`)
|
||||||
|
|
||||||
|
case ok:
|
||||||
|
squoted(c.w, val)
|
||||||
|
}
|
||||||
|
|
||||||
|
io.WriteString(c.w, ` :: `)
|
||||||
|
io.WriteString(c.w, col.Type)
|
||||||
} else {
|
} else {
|
||||||
quoted(w, cn)
|
quoted(c.w, cn)
|
||||||
}
|
}
|
||||||
|
|
||||||
if !renderedCol {
|
if !renderedCol {
|
||||||
@ -440,15 +455,15 @@ func renderInsertUpdateColumns(w io.Writer,
|
|||||||
}
|
}
|
||||||
|
|
||||||
if len(skipcols) != 0 && renderedCol {
|
if len(skipcols) != 0 && renderedCol {
|
||||||
io.WriteString(w, `, `)
|
io.WriteString(c.w, `, `)
|
||||||
}
|
}
|
||||||
return 0, nil
|
return 0, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *compilerContext) renderUpsert(qc *qcode.QCode, w io.Writer,
|
func (c *compilerContext) renderUpsert(
|
||||||
vars Variables, ti *DBTableInfo) (uint32, error) {
|
w io.Writer, qc *qcode.QCode, vars Variables, ti *DBTableInfo) (uint32, error) {
|
||||||
root := &qc.Selects[0]
|
|
||||||
|
|
||||||
|
root := &qc.Selects[0]
|
||||||
upsert, ok := vars[qc.ActionVar]
|
upsert, ok := vars[qc.ActionVar]
|
||||||
if !ok {
|
if !ok {
|
||||||
return 0, fmt.Errorf("variable '%s' not defined", qc.ActionVar)
|
return 0, fmt.Errorf("variable '%s' not defined", qc.ActionVar)
|
||||||
@ -466,7 +481,7 @@ func (c *compilerContext) renderUpsert(qc *qcode.QCode, w io.Writer,
|
|||||||
return 0, err
|
return 0, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, err := c.renderInsert(qc, w, vars, ti); err != nil {
|
if _, err := c.renderInsert(w, qc, vars, ti); err != nil {
|
||||||
return 0, err
|
return 0, err
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -672,7 +687,7 @@ func renderCteName(w io.Writer, item kvitem) error {
|
|||||||
io.WriteString(w, item.ti.Name)
|
io.WriteString(w, item.ti.Name)
|
||||||
if item._type == itemConnect || item._type == itemDisconnect {
|
if item._type == itemConnect || item._type == itemDisconnect {
|
||||||
io.WriteString(w, `_`)
|
io.WriteString(w, `_`)
|
||||||
int2string(w, item.id)
|
int32String(w, item.id)
|
||||||
}
|
}
|
||||||
io.WriteString(w, `"`)
|
io.WriteString(w, `"`)
|
||||||
return nil
|
return nil
|
||||||
|
@ -72,7 +72,7 @@ func delete(t *testing.T) {
|
|||||||
// }
|
// }
|
||||||
// }`
|
// }`
|
||||||
|
|
||||||
// sql := `WITH "users" AS (WITH "input" AS (SELECT '{{data}}' :: json AS j) INSERT INTO "users" ("full_name", "email") SELECT "full_name", "email" FROM input i, json_populate_record(NULL::users, i.j) t WHERE false RETURNING *) SELECT json_object_agg('user', json_0) FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "users_0"."id" AS "id") AS "json_row_0")) AS "json_0" FROM (SELECT "users"."id" FROM "users" LIMIT ('1') :: integer) AS "users_0" LIMIT ('1') :: integer) AS "sel_0"`
|
// sql := `WITH "users" AS (WITH "input" AS (SELECT '$1' :: json AS j) INSERT INTO "users" ("full_name", "email") SELECT "full_name", "email" FROM input i, json_populate_record(NULL::users, i.j) t WHERE false RETURNING *) SELECT json_object_agg('user', json_0) FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "users_0"."id" AS "id") AS "json_row_0")) AS "json_0" FROM (SELECT "users"."id" FROM "users" LIMIT ('1') :: integer) AS "users_0" LIMIT ('1') :: integer) AS "sel_0"`
|
||||||
|
|
||||||
// vars := map[string]json.RawMessage{
|
// vars := map[string]json.RawMessage{
|
||||||
// "data": json.RawMessage(`{"email": "reannagreenholt@orn.com", "full_name": "Flo Barton"}`),
|
// "data": json.RawMessage(`{"email": "reannagreenholt@orn.com", "full_name": "Flo Barton"}`),
|
||||||
@ -97,7 +97,7 @@ func delete(t *testing.T) {
|
|||||||
// }
|
// }
|
||||||
// }`
|
// }`
|
||||||
|
|
||||||
// sql := `WITH "users" AS (WITH "input" AS (SELECT '{{data}}' :: json AS j) UPDATE "users" SET ("full_name", "email") = (SELECT "full_name", "email" FROM input i, json_populate_record(NULL::users, i.j) t) WHERE false RETURNING *) SELECT json_object_agg('user', json_0) FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "users_0"."id" AS "id", "users_0"."email" AS "email") AS "json_row_0")) AS "json_0" FROM (SELECT "users"."id", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LIMIT ('1') :: integer) AS "sel_0"`
|
// sql := `WITH "users" AS (WITH "input" AS (SELECT '$1' :: json AS j) UPDATE "users" SET ("full_name", "email") = (SELECT "full_name", "email" FROM input i, json_populate_record(NULL::users, i.j) t) WHERE false RETURNING *) SELECT json_object_agg('user', json_0) FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "users_0"."id" AS "id", "users_0"."email" AS "email") AS "json_row_0")) AS "json_0" FROM (SELECT "users"."id", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LIMIT ('1') :: integer) AS "sel_0"`
|
||||||
|
|
||||||
// vars := map[string]json.RawMessage{
|
// vars := map[string]json.RawMessage{
|
||||||
// "data": json.RawMessage(`{"email": "reannagreenholt@orn.com", "full_name": "Flo Barton"}`),
|
// "data": json.RawMessage(`{"email": "reannagreenholt@orn.com", "full_name": "Flo Barton"}`),
|
||||||
|
@ -139,9 +139,9 @@ func TestMain(m *testing.M) {
|
|||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
vars := psql.NewVariables(map[string]string{
|
vars := map[string]string{
|
||||||
"admin_account_id": "5",
|
"admin_account_id": "5",
|
||||||
})
|
}
|
||||||
|
|
||||||
pcompile = psql.NewCompiler(psql.Config{
|
pcompile = psql.NewCompiler(psql.Config{
|
||||||
Schema: schema,
|
Schema: schema,
|
||||||
|
@ -17,9 +17,24 @@ const (
|
|||||||
closeBlock = 500
|
closeBlock = 500
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
type Param struct {
|
||||||
ErrAllTablesSkipped = errors.New("all tables skipped. cannot render query")
|
Name string
|
||||||
)
|
Type string
|
||||||
|
IsArray bool
|
||||||
|
}
|
||||||
|
|
||||||
|
type Metadata struct {
|
||||||
|
Skipped uint32
|
||||||
|
Params []Param
|
||||||
|
pindex map[string]int
|
||||||
|
}
|
||||||
|
|
||||||
|
type compilerContext struct {
|
||||||
|
md Metadata
|
||||||
|
w io.Writer
|
||||||
|
s []qcode.Select
|
||||||
|
*Compiler
|
||||||
|
}
|
||||||
|
|
||||||
type Variables map[string]json.RawMessage
|
type Variables map[string]json.RawMessage
|
||||||
|
|
||||||
@ -40,12 +55,12 @@ func NewCompiler(conf Config) *Compiler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Compiler) AddRelationship(child, parent string, rel *DBRel) error {
|
func (co *Compiler) AddRelationship(child, parent string, rel *DBRel) error {
|
||||||
return c.schema.SetRel(child, parent, rel)
|
return co.schema.SetRel(child, parent, rel)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Compiler) IDColumn(table string) (*DBColumn, error) {
|
func (co *Compiler) IDColumn(table string) (*DBColumn, error) {
|
||||||
ti, err := c.schema.GetTable(table)
|
ti, err := co.schema.GetTable(table)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -57,65 +72,71 @@ func (c *Compiler) IDColumn(table string) (*DBColumn, error) {
|
|||||||
return ti.PrimaryCol, nil
|
return ti.PrimaryCol, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
type compilerContext struct {
|
func (co *Compiler) CompileEx(qc *qcode.QCode, vars Variables) (Metadata, []byte, error) {
|
||||||
w io.Writer
|
|
||||||
s []qcode.Select
|
|
||||||
*Compiler
|
|
||||||
}
|
|
||||||
|
|
||||||
func (co *Compiler) CompileEx(qc *qcode.QCode, vars Variables) (uint32, []byte, error) {
|
|
||||||
w := &bytes.Buffer{}
|
w := &bytes.Buffer{}
|
||||||
skipped, err := co.Compile(qc, w, vars)
|
metad, err := co.Compile(w, qc, vars)
|
||||||
return skipped, w.Bytes(), err
|
return metad, w.Bytes(), err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (co *Compiler) Compile(qc *qcode.QCode, w io.Writer, vars Variables) (uint32, error) {
|
func (co *Compiler) Compile(w io.Writer, qc *qcode.QCode, vars Variables) (Metadata, error) {
|
||||||
switch qc.Type {
|
switch qc.Type {
|
||||||
case qcode.QTQuery:
|
case qcode.QTQuery:
|
||||||
return co.compileQuery(qc, w, vars)
|
return co.compileQuery(w, qc, vars)
|
||||||
case qcode.QTInsert, qcode.QTUpdate, qcode.QTDelete, qcode.QTUpsert:
|
|
||||||
return co.compileMutation(qc, w, vars)
|
case qcode.QTInsert,
|
||||||
|
qcode.QTUpdate,
|
||||||
|
qcode.QTDelete,
|
||||||
|
qcode.QTUpsert:
|
||||||
|
return co.compileMutation(w, qc, vars)
|
||||||
}
|
}
|
||||||
|
|
||||||
return 0, fmt.Errorf("Unknown operation type %d", qc.Type)
|
return Metadata{}, fmt.Errorf("Unknown operation type %d", qc.Type)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (co *Compiler) compileQuery(qc *qcode.QCode, w io.Writer, vars Variables) (uint32, error) {
|
func (co *Compiler) compileQuery(w io.Writer, qc *qcode.QCode, vars Variables) (Metadata, error) {
|
||||||
|
return co.compileQueryWithMetadata(w, qc, vars, Metadata{})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (co *Compiler) compileQueryWithMetadata(
|
||||||
|
w io.Writer, qc *qcode.QCode, vars Variables, md Metadata) (Metadata, error) {
|
||||||
|
|
||||||
if len(qc.Selects) == 0 {
|
if len(qc.Selects) == 0 {
|
||||||
return 0, errors.New("empty query")
|
return md, errors.New("empty query")
|
||||||
}
|
}
|
||||||
|
|
||||||
c := &compilerContext{w, qc.Selects, co}
|
c := &compilerContext{md, w, qc.Selects, co}
|
||||||
|
|
||||||
st := NewIntStack()
|
st := NewIntStack()
|
||||||
i := 0
|
i := 0
|
||||||
|
|
||||||
io.WriteString(c.w, `SELECT jsonb_build_object(`)
|
io.WriteString(c.w, `SELECT jsonb_build_object(`)
|
||||||
for _, id := range qc.Roots {
|
for _, id := range qc.Roots {
|
||||||
root := &qc.Selects[id]
|
|
||||||
if root.SkipRender || len(root.Cols) == 0 {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
st.Push(root.ID + closeBlock)
|
|
||||||
st.Push(root.ID)
|
|
||||||
|
|
||||||
if i != 0 {
|
if i != 0 {
|
||||||
io.WriteString(c.w, `, `)
|
io.WriteString(c.w, `, `)
|
||||||
}
|
}
|
||||||
|
|
||||||
c.renderRootSelect(root)
|
root := &qc.Selects[id]
|
||||||
|
|
||||||
|
if root.SkipRender || len(root.Cols) == 0 {
|
||||||
|
squoted(c.w, root.FieldName)
|
||||||
|
io.WriteString(c.w, `, `)
|
||||||
|
io.WriteString(c.w, `NULL`)
|
||||||
|
|
||||||
|
} else {
|
||||||
|
st.Push(root.ID + closeBlock)
|
||||||
|
st.Push(root.ID)
|
||||||
|
c.renderRootSelect(root)
|
||||||
|
}
|
||||||
|
|
||||||
i++
|
i++
|
||||||
}
|
}
|
||||||
|
|
||||||
io.WriteString(c.w, `) as "__root" FROM `)
|
if st.Len() != 0 {
|
||||||
|
io.WriteString(c.w, `) as "__root" FROM `)
|
||||||
if i == 0 {
|
} else {
|
||||||
return 0, ErrAllTablesSkipped
|
io.WriteString(c.w, `) as "__root"`)
|
||||||
|
return c.md, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
var ignored uint32
|
|
||||||
|
|
||||||
for {
|
for {
|
||||||
if st.Len() == 0 {
|
if st.Len() == 0 {
|
||||||
break
|
break
|
||||||
@ -132,7 +153,7 @@ func (co *Compiler) compileQuery(qc *qcode.QCode, w io.Writer, vars Variables) (
|
|||||||
|
|
||||||
ti, err := c.schema.GetTable(sel.Name)
|
ti, err := c.schema.GetTable(sel.Name)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, err
|
return c.md, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if sel.ParentID == -1 {
|
if sel.ParentID == -1 {
|
||||||
@ -145,14 +166,12 @@ func (co *Compiler) compileQuery(qc *qcode.QCode, w io.Writer, vars Variables) (
|
|||||||
c.renderPluralSelect(sel, ti)
|
c.renderPluralSelect(sel, ti)
|
||||||
}
|
}
|
||||||
|
|
||||||
skipped, err := c.renderSelect(sel, ti, vars)
|
if err := c.renderSelect(sel, ti, vars); err != nil {
|
||||||
if err != nil {
|
return c.md, err
|
||||||
return 0, err
|
|
||||||
}
|
}
|
||||||
ignored |= skipped
|
|
||||||
|
|
||||||
for _, cid := range sel.Children {
|
for _, cid := range sel.Children {
|
||||||
if hasBit(skipped, uint32(cid)) {
|
if hasBit(c.md.Skipped, uint32(cid)) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
child := &c.s[cid]
|
child := &c.s[cid]
|
||||||
@ -169,7 +188,7 @@ func (co *Compiler) compileQuery(qc *qcode.QCode, w io.Writer, vars Variables) (
|
|||||||
|
|
||||||
ti, err := c.schema.GetTable(sel.Name)
|
ti, err := c.schema.GetTable(sel.Name)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, err
|
return c.md, err
|
||||||
}
|
}
|
||||||
|
|
||||||
io.WriteString(c.w, `)`)
|
io.WriteString(c.w, `)`)
|
||||||
@ -201,12 +220,12 @@ func (co *Compiler) compileQuery(qc *qcode.QCode, w io.Writer, vars Variables) (
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return ignored, nil
|
return c.md, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *compilerContext) renderPluralSelect(sel *qcode.Select, ti *DBTableInfo) error {
|
func (c *compilerContext) renderPluralSelect(sel *qcode.Select, ti *DBTableInfo) error {
|
||||||
io.WriteString(c.w, `SELECT coalesce(jsonb_agg("__sj_`)
|
io.WriteString(c.w, `SELECT coalesce(jsonb_agg("__sj_`)
|
||||||
int2string(c.w, sel.ID)
|
int32String(c.w, sel.ID)
|
||||||
io.WriteString(c.w, `"."json"), '[]') as "json"`)
|
io.WriteString(c.w, `"."json"), '[]') as "json"`)
|
||||||
|
|
||||||
if sel.Paging.Type != qcode.PtOffset {
|
if sel.Paging.Type != qcode.PtOffset {
|
||||||
@ -230,7 +249,7 @@ func (c *compilerContext) renderPluralSelect(sel *qcode.Select, ti *DBTableInfo)
|
|||||||
io.WriteString(c.w, `, CONCAT_WS(','`)
|
io.WriteString(c.w, `, CONCAT_WS(','`)
|
||||||
for i := 0; i < n; i++ {
|
for i := 0; i < n; i++ {
|
||||||
io.WriteString(c.w, `, max("__cur_`)
|
io.WriteString(c.w, `, max("__cur_`)
|
||||||
int2string(c.w, int32(i))
|
int32String(c.w, int32(i))
|
||||||
io.WriteString(c.w, `")`)
|
io.WriteString(c.w, `")`)
|
||||||
}
|
}
|
||||||
io.WriteString(c.w, `) as "cursor"`)
|
io.WriteString(c.w, `) as "cursor"`)
|
||||||
@ -246,7 +265,7 @@ func (c *compilerContext) renderRootSelect(sel *qcode.Select) error {
|
|||||||
io.WriteString(c.w, `', `)
|
io.WriteString(c.w, `', `)
|
||||||
|
|
||||||
io.WriteString(c.w, `"__sj_`)
|
io.WriteString(c.w, `"__sj_`)
|
||||||
int2string(c.w, sel.ID)
|
int32String(c.w, sel.ID)
|
||||||
io.WriteString(c.w, `"."json"`)
|
io.WriteString(c.w, `"."json"`)
|
||||||
|
|
||||||
if sel.Paging.Type != qcode.PtOffset {
|
if sel.Paging.Type != qcode.PtOffset {
|
||||||
@ -255,16 +274,14 @@ func (c *compilerContext) renderRootSelect(sel *qcode.Select) error {
|
|||||||
io.WriteString(c.w, `_cursor', `)
|
io.WriteString(c.w, `_cursor', `)
|
||||||
|
|
||||||
io.WriteString(c.w, `"__sj_`)
|
io.WriteString(c.w, `"__sj_`)
|
||||||
int2string(c.w, sel.ID)
|
int32String(c.w, sel.ID)
|
||||||
io.WriteString(c.w, `"."cursor"`)
|
io.WriteString(c.w, `"."cursor"`)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *compilerContext) initSelect(sel *qcode.Select, ti *DBTableInfo, vars Variables) (uint32, []*qcode.Column, error) {
|
func (c *compilerContext) initSelect(sel *qcode.Select, ti *DBTableInfo, vars Variables) ([]*qcode.Column, error) {
|
||||||
var skipped uint32
|
|
||||||
|
|
||||||
cols := make([]*qcode.Column, 0, len(sel.Cols))
|
cols := make([]*qcode.Column, 0, len(sel.Cols))
|
||||||
colmap := make(map[string]struct{}, len(sel.Cols))
|
colmap := make(map[string]struct{}, len(sel.Cols))
|
||||||
|
|
||||||
@ -306,9 +323,7 @@ func (c *compilerContext) initSelect(sel *qcode.Select, ti *DBTableInfo, vars Va
|
|||||||
|
|
||||||
rel, err := c.schema.GetRel(child.Name, ti.Name)
|
rel, err := c.schema.GetRel(child.Name, ti.Name)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, nil, err
|
return nil, err
|
||||||
//skipped |= (1 << uint(id))
|
|
||||||
//continue
|
|
||||||
}
|
}
|
||||||
|
|
||||||
switch rel.Type {
|
switch rel.Type {
|
||||||
@ -334,16 +349,15 @@ func (c *compilerContext) initSelect(sel *qcode.Select, ti *DBTableInfo, vars Va
|
|||||||
if _, ok := colmap[rel.Left.Col]; !ok {
|
if _, ok := colmap[rel.Left.Col]; !ok {
|
||||||
cols = append(cols, &qcode.Column{Table: ti.Name, Name: rel.Left.Col, FieldName: rel.Right.Col})
|
cols = append(cols, &qcode.Column{Table: ti.Name, Name: rel.Left.Col, FieldName: rel.Right.Col})
|
||||||
colmap[rel.Left.Col] = struct{}{}
|
colmap[rel.Left.Col] = struct{}{}
|
||||||
skipped |= (1 << uint(id))
|
c.md.Skipped |= (1 << uint(id))
|
||||||
}
|
}
|
||||||
|
|
||||||
default:
|
default:
|
||||||
return 0, nil, fmt.Errorf("unknown relationship %s", rel)
|
return nil, fmt.Errorf("unknown relationship %s", rel)
|
||||||
//skipped |= (1 << uint(id))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return skipped, cols, nil
|
return cols, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// This
|
// This
|
||||||
@ -412,7 +426,7 @@ func (c *compilerContext) addSeekPredicate(sel *qcode.Select) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *compilerContext) renderSelect(sel *qcode.Select, ti *DBTableInfo, vars Variables) (uint32, error) {
|
func (c *compilerContext) renderSelect(sel *qcode.Select, ti *DBTableInfo, vars Variables) error {
|
||||||
var rel *DBRel
|
var rel *DBRel
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
@ -421,13 +435,13 @@ func (c *compilerContext) renderSelect(sel *qcode.Select, ti *DBTableInfo, vars
|
|||||||
|
|
||||||
rel, err = c.schema.GetRel(ti.Name, parent.Name)
|
rel, err = c.schema.GetRel(ti.Name, parent.Name)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
skipped, childCols, err := c.initSelect(sel, ti, vars)
|
childCols, err := c.initSelect(sel, ti, vars)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// SELECT
|
// SELECT
|
||||||
@ -437,13 +451,13 @@ func (c *compilerContext) renderSelect(sel *qcode.Select, ti *DBTableInfo, vars
|
|||||||
// }
|
// }
|
||||||
|
|
||||||
io.WriteString(c.w, `SELECT to_jsonb("__sr_`)
|
io.WriteString(c.w, `SELECT to_jsonb("__sr_`)
|
||||||
int2string(c.w, sel.ID)
|
int32String(c.w, sel.ID)
|
||||||
io.WriteString(c.w, `".*) `)
|
io.WriteString(c.w, `".*) `)
|
||||||
|
|
||||||
if sel.Paging.Type != qcode.PtOffset {
|
if sel.Paging.Type != qcode.PtOffset {
|
||||||
for i := range sel.OrderBy {
|
for i := range sel.OrderBy {
|
||||||
io.WriteString(c.w, `- '__cur_`)
|
io.WriteString(c.w, `- '__cur_`)
|
||||||
int2string(c.w, int32(i))
|
int32String(c.w, int32(i))
|
||||||
io.WriteString(c.w, `' `)
|
io.WriteString(c.w, `' `)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -453,15 +467,15 @@ func (c *compilerContext) renderSelect(sel *qcode.Select, ti *DBTableInfo, vars
|
|||||||
if sel.Paging.Type != qcode.PtOffset {
|
if sel.Paging.Type != qcode.PtOffset {
|
||||||
for i := range sel.OrderBy {
|
for i := range sel.OrderBy {
|
||||||
io.WriteString(c.w, `, "__cur_`)
|
io.WriteString(c.w, `, "__cur_`)
|
||||||
int2string(c.w, int32(i))
|
int32String(c.w, int32(i))
|
||||||
io.WriteString(c.w, `"`)
|
io.WriteString(c.w, `"`)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
io.WriteString(c.w, `FROM (SELECT `)
|
io.WriteString(c.w, `FROM (SELECT `)
|
||||||
|
|
||||||
if err := c.renderColumns(sel, ti, skipped); err != nil {
|
if err := c.renderColumns(sel, ti); err != nil {
|
||||||
return 0, err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if sel.Paging.Type != qcode.PtOffset {
|
if sel.Paging.Type != qcode.PtOffset {
|
||||||
@ -469,7 +483,7 @@ func (c *compilerContext) renderSelect(sel *qcode.Select, ti *DBTableInfo, vars
|
|||||||
io.WriteString(c.w, `, LAST_VALUE(`)
|
io.WriteString(c.w, `, LAST_VALUE(`)
|
||||||
colWithTableID(c.w, ti.Name, sel.ID, ob.Col)
|
colWithTableID(c.w, ti.Name, sel.ID, ob.Col)
|
||||||
io.WriteString(c.w, `) OVER() AS "__cur_`)
|
io.WriteString(c.w, `) OVER() AS "__cur_`)
|
||||||
int2string(c.w, int32(i))
|
int32String(c.w, int32(i))
|
||||||
io.WriteString(c.w, `"`)
|
io.WriteString(c.w, `"`)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -477,9 +491,8 @@ func (c *compilerContext) renderSelect(sel *qcode.Select, ti *DBTableInfo, vars
|
|||||||
io.WriteString(c.w, ` FROM (`)
|
io.WriteString(c.w, ` FROM (`)
|
||||||
|
|
||||||
// FROM (SELECT .... )
|
// FROM (SELECT .... )
|
||||||
err = c.renderBaseSelect(sel, ti, rel, childCols, skipped)
|
if err = c.renderBaseSelect(sel, ti, rel, childCols); err != nil {
|
||||||
if err != nil {
|
return err
|
||||||
return skipped, err
|
|
||||||
}
|
}
|
||||||
|
|
||||||
//fmt.Fprintf(w, `) AS "%s_%d"`, c.sel.Name, c.sel.ID)
|
//fmt.Fprintf(w, `) AS "%s_%d"`, c.sel.Name, c.sel.ID)
|
||||||
@ -488,7 +501,7 @@ func (c *compilerContext) renderSelect(sel *qcode.Select, ti *DBTableInfo, vars
|
|||||||
|
|
||||||
// END-FROM
|
// END-FROM
|
||||||
|
|
||||||
return skipped, nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *compilerContext) renderLateralJoin(sel *qcode.Select) error {
|
func (c *compilerContext) renderLateralJoin(sel *qcode.Select) error {
|
||||||
@ -538,7 +551,7 @@ func (c *compilerContext) renderJoinByName(table, parent string, id int32) error
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *compilerContext) renderColumns(sel *qcode.Select, ti *DBTableInfo, skipped uint32) error {
|
func (c *compilerContext) renderColumns(sel *qcode.Select, ti *DBTableInfo) error {
|
||||||
i := 0
|
i := 0
|
||||||
var cn string
|
var cn string
|
||||||
|
|
||||||
@ -574,7 +587,7 @@ func (c *compilerContext) renderColumns(sel *qcode.Select, ti *DBTableInfo, skip
|
|||||||
|
|
||||||
i += c.renderRemoteRelColumns(sel, ti, i)
|
i += c.renderRemoteRelColumns(sel, ti, i)
|
||||||
|
|
||||||
return c.renderJoinColumns(sel, ti, skipped, i)
|
return c.renderJoinColumns(sel, ti, i)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *compilerContext) renderRemoteRelColumns(sel *qcode.Select, ti *DBTableInfo, colsRendered int) int {
|
func (c *compilerContext) renderRemoteRelColumns(sel *qcode.Select, ti *DBTableInfo, colsRendered int) int {
|
||||||
@ -599,12 +612,12 @@ func (c *compilerContext) renderRemoteRelColumns(sel *qcode.Select, ti *DBTableI
|
|||||||
return i
|
return i
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *compilerContext) renderJoinColumns(sel *qcode.Select, ti *DBTableInfo, skipped uint32, colsRendered int) error {
|
func (c *compilerContext) renderJoinColumns(sel *qcode.Select, ti *DBTableInfo, colsRendered int) error {
|
||||||
// columns previously rendered
|
// columns previously rendered
|
||||||
i := colsRendered
|
i := colsRendered
|
||||||
|
|
||||||
for _, id := range sel.Children {
|
for _, id := range sel.Children {
|
||||||
if hasBit(skipped, uint32(id)) {
|
if hasBit(c.md.Skipped, uint32(id)) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
childSel := &c.s[id]
|
childSel := &c.s[id]
|
||||||
@ -620,13 +633,13 @@ func (c *compilerContext) renderJoinColumns(sel *qcode.Select, ti *DBTableInfo,
|
|||||||
}
|
}
|
||||||
|
|
||||||
io.WriteString(c.w, `"__sj_`)
|
io.WriteString(c.w, `"__sj_`)
|
||||||
int2string(c.w, childSel.ID)
|
int32String(c.w, childSel.ID)
|
||||||
io.WriteString(c.w, `"."json"`)
|
io.WriteString(c.w, `"."json"`)
|
||||||
alias(c.w, childSel.FieldName)
|
alias(c.w, childSel.FieldName)
|
||||||
|
|
||||||
if childSel.Paging.Type != qcode.PtOffset {
|
if childSel.Paging.Type != qcode.PtOffset {
|
||||||
io.WriteString(c.w, `, "__sj_`)
|
io.WriteString(c.w, `, "__sj_`)
|
||||||
int2string(c.w, childSel.ID)
|
int32String(c.w, childSel.ID)
|
||||||
io.WriteString(c.w, `"."cursor" AS "`)
|
io.WriteString(c.w, `"."cursor" AS "`)
|
||||||
io.WriteString(c.w, childSel.FieldName)
|
io.WriteString(c.w, childSel.FieldName)
|
||||||
io.WriteString(c.w, `_cursor"`)
|
io.WriteString(c.w, `_cursor"`)
|
||||||
@ -639,7 +652,7 @@ func (c *compilerContext) renderJoinColumns(sel *qcode.Select, ti *DBTableInfo,
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (c *compilerContext) renderBaseSelect(sel *qcode.Select, ti *DBTableInfo, rel *DBRel,
|
func (c *compilerContext) renderBaseSelect(sel *qcode.Select, ti *DBTableInfo, rel *DBRel,
|
||||||
childCols []*qcode.Column, skipped uint32) error {
|
childCols []*qcode.Column) error {
|
||||||
isRoot := (rel == nil)
|
isRoot := (rel == nil)
|
||||||
isFil := (sel.Where != nil && sel.Where.Op != qcode.OpNop)
|
isFil := (sel.Where != nil && sel.Where.Op != qcode.OpNop)
|
||||||
hasOrder := len(sel.OrderBy) != 0
|
hasOrder := len(sel.OrderBy) != 0
|
||||||
@ -654,7 +667,7 @@ func (c *compilerContext) renderBaseSelect(sel *qcode.Select, ti *DBTableInfo, r
|
|||||||
c.renderDistinctOn(sel, ti)
|
c.renderDistinctOn(sel, ti)
|
||||||
}
|
}
|
||||||
|
|
||||||
realColsRendered, isAgg, err := c.renderBaseColumns(sel, ti, childCols, skipped)
|
realColsRendered, isAgg, err := c.renderBaseColumns(sel, ti, childCols)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -781,11 +794,13 @@ func (c *compilerContext) renderCursorCTE(sel *qcode.Select) error {
|
|||||||
io.WriteString(c.w, `, `)
|
io.WriteString(c.w, `, `)
|
||||||
}
|
}
|
||||||
io.WriteString(c.w, `a[`)
|
io.WriteString(c.w, `a[`)
|
||||||
int2string(c.w, int32(i+1))
|
int32String(c.w, int32(i+1))
|
||||||
io.WriteString(c.w, `] as `)
|
io.WriteString(c.w, `] as `)
|
||||||
quoted(c.w, ob.Col)
|
quoted(c.w, ob.Col)
|
||||||
}
|
}
|
||||||
io.WriteString(c.w, ` FROM string_to_array('{{cursor}}', ',') as a) `)
|
io.WriteString(c.w, ` FROM string_to_array(`)
|
||||||
|
c.renderValueExp(Param{Name: "cursor", Type: "json"})
|
||||||
|
io.WriteString(c.w, `, ',') as a) `)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1026,9 +1041,9 @@ func (c *compilerContext) renderOp(ex *qcode.Exp, ti *DBTableInfo) error {
|
|||||||
case qcode.OpLesserThan:
|
case qcode.OpLesserThan:
|
||||||
io.WriteString(c.w, `<`)
|
io.WriteString(c.w, `<`)
|
||||||
case qcode.OpIn:
|
case qcode.OpIn:
|
||||||
io.WriteString(c.w, `IN`)
|
io.WriteString(c.w, `= ANY`)
|
||||||
case qcode.OpNotIn:
|
case qcode.OpNotIn:
|
||||||
io.WriteString(c.w, `NOT IN`)
|
io.WriteString(c.w, `!= ANY`)
|
||||||
case qcode.OpLike:
|
case qcode.OpLike:
|
||||||
io.WriteString(c.w, `LIKE`)
|
io.WriteString(c.w, `LIKE`)
|
||||||
case qcode.OpNotLike:
|
case qcode.OpNotLike:
|
||||||
@ -1078,12 +1093,13 @@ func (c *compilerContext) renderOp(ex *qcode.Exp, ti *DBTableInfo) error {
|
|||||||
io.WriteString(c.w, `((`)
|
io.WriteString(c.w, `((`)
|
||||||
colWithTable(c.w, ti.Name, ti.TSVCol.Name)
|
colWithTable(c.w, ti.Name, ti.TSVCol.Name)
|
||||||
if c.schema.ver >= 110000 {
|
if c.schema.ver >= 110000 {
|
||||||
io.WriteString(c.w, `) @@ websearch_to_tsquery('{{`)
|
io.WriteString(c.w, `) @@ websearch_to_tsquery(`)
|
||||||
} else {
|
} else {
|
||||||
io.WriteString(c.w, `) @@ to_tsquery('{{`)
|
io.WriteString(c.w, `) @@ to_tsquery(`)
|
||||||
}
|
}
|
||||||
io.WriteString(c.w, ex.Val)
|
c.renderValueExp(Param{Name: ex.Val, Type: "string"})
|
||||||
io.WriteString(c.w, `}}'))`)
|
io.WriteString(c.w, `))`)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
|
||||||
default:
|
default:
|
||||||
@ -1169,15 +1185,25 @@ func (c *compilerContext) renderVal(ex *qcode.Exp, vars map[string]string, col *
|
|||||||
val, ok := vars[ex.Val]
|
val, ok := vars[ex.Val]
|
||||||
switch {
|
switch {
|
||||||
case ok && strings.HasPrefix(val, "sql:"):
|
case ok && strings.HasPrefix(val, "sql:"):
|
||||||
io.WriteString(c.w, ` (`)
|
io.WriteString(c.w, `(`)
|
||||||
io.WriteString(c.w, val[4:])
|
c.renderVar(val[4:], c.renderValueExp)
|
||||||
io.WriteString(c.w, `)`)
|
io.WriteString(c.w, `)`)
|
||||||
|
|
||||||
case ok:
|
case ok:
|
||||||
squoted(c.w, val)
|
squoted(c.w, val)
|
||||||
|
|
||||||
|
case ex.Op == qcode.OpIn || ex.Op == qcode.OpNotIn:
|
||||||
|
io.WriteString(c.w, `(ARRAY(SELECT json_array_elements_text(`)
|
||||||
|
c.renderValueExp(Param{Name: ex.Val, Type: col.Type, IsArray: true})
|
||||||
|
io.WriteString(c.w, `))`)
|
||||||
|
|
||||||
|
io.WriteString(c.w, ` :: `)
|
||||||
|
io.WriteString(c.w, col.Type)
|
||||||
|
io.WriteString(c.w, `[])`)
|
||||||
|
return
|
||||||
|
|
||||||
default:
|
default:
|
||||||
io.WriteString(c.w, ` '{{`)
|
c.renderValueExp(Param{Name: ex.Val, Type: col.Type, IsArray: false})
|
||||||
io.WriteString(c.w, ex.Val)
|
|
||||||
io.WriteString(c.w, `}}'`)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
case qcode.ValRef:
|
case qcode.ValRef:
|
||||||
@ -1191,6 +1217,54 @@ func (c *compilerContext) renderVal(ex *qcode.Exp, vars map[string]string, col *
|
|||||||
io.WriteString(c.w, col.Type)
|
io.WriteString(c.w, col.Type)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (c *compilerContext) renderValueExp(p Param) {
|
||||||
|
io.WriteString(c.w, `$`)
|
||||||
|
if v, ok := c.md.pindex[p.Name]; ok {
|
||||||
|
int32String(c.w, int32(v))
|
||||||
|
|
||||||
|
} else {
|
||||||
|
c.md.Params = append(c.md.Params, p)
|
||||||
|
n := len(c.md.Params)
|
||||||
|
|
||||||
|
if c.md.pindex == nil {
|
||||||
|
c.md.pindex = make(map[string]int)
|
||||||
|
}
|
||||||
|
c.md.pindex[p.Name] = n
|
||||||
|
int32String(c.w, int32(n))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *compilerContext) renderVar(vv string, fn func(Param)) {
|
||||||
|
f, s := -1, 0
|
||||||
|
|
||||||
|
for i := range vv {
|
||||||
|
v := vv[i]
|
||||||
|
switch {
|
||||||
|
case (i > 0 && vv[i-1] != '\\' && v == '$') || v == '$':
|
||||||
|
if (i - s) > 0 {
|
||||||
|
io.WriteString(c.w, vv[s:i])
|
||||||
|
}
|
||||||
|
f = i
|
||||||
|
|
||||||
|
case (v < 'a' && v > 'z') &&
|
||||||
|
(v < 'A' && v > 'Z') &&
|
||||||
|
(v < '0' && v > '9') &&
|
||||||
|
v != '_' &&
|
||||||
|
f != -1 &&
|
||||||
|
(i-f) > 1:
|
||||||
|
fn(Param{Name: vv[f+1 : i]})
|
||||||
|
s = i
|
||||||
|
f = -1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if f != -1 && (len(vv)-f) > 1 {
|
||||||
|
fn(Param{Name: vv[f+1:]})
|
||||||
|
} else {
|
||||||
|
io.WriteString(c.w, vv[s:])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func funcPrefixLen(fm map[string]*DBFunction, fn string) int {
|
func funcPrefixLen(fm map[string]*DBFunction, fn string) int {
|
||||||
switch {
|
switch {
|
||||||
case strings.HasPrefix(fn, "avg_"):
|
case strings.HasPrefix(fn, "avg_"):
|
||||||
@ -1242,7 +1316,7 @@ func aliasWithID(w io.Writer, alias string, id int32) {
|
|||||||
io.WriteString(w, ` AS "`)
|
io.WriteString(w, ` AS "`)
|
||||||
io.WriteString(w, alias)
|
io.WriteString(w, alias)
|
||||||
io.WriteString(w, `_`)
|
io.WriteString(w, `_`)
|
||||||
int2string(w, id)
|
int32String(w, id)
|
||||||
io.WriteString(w, `"`)
|
io.WriteString(w, `"`)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1259,7 +1333,7 @@ func colWithTableID(w io.Writer, table string, id int32, col string) {
|
|||||||
io.WriteString(w, table)
|
io.WriteString(w, table)
|
||||||
if id >= 0 {
|
if id >= 0 {
|
||||||
io.WriteString(w, `_`)
|
io.WriteString(w, `_`)
|
||||||
int2string(w, id)
|
int32String(w, id)
|
||||||
}
|
}
|
||||||
io.WriteString(w, `"."`)
|
io.WriteString(w, `"."`)
|
||||||
io.WriteString(w, col)
|
io.WriteString(w, col)
|
||||||
@ -1280,7 +1354,7 @@ func squoted(w io.Writer, identifier string) {
|
|||||||
|
|
||||||
const charset = "0123456789"
|
const charset = "0123456789"
|
||||||
|
|
||||||
func int2string(w io.Writer, val int32) {
|
func int32String(w io.Writer, val int32) {
|
||||||
if val < 10 {
|
if val < 10 {
|
||||||
w.Write([]byte{charset[val]})
|
w.Write([]byte{charset[val]})
|
||||||
return
|
return
|
||||||
|
@ -32,6 +32,20 @@ func withComplexArgs(t *testing.T) {
|
|||||||
compileGQLToPSQL(t, gql, nil, "user")
|
compileGQLToPSQL(t, gql, nil, "user")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func withWhereIn(t *testing.T) {
|
||||||
|
gql := `query {
|
||||||
|
products(where: { id: { in: $list } }) {
|
||||||
|
id
|
||||||
|
}
|
||||||
|
}`
|
||||||
|
|
||||||
|
vars := map[string]json.RawMessage{
|
||||||
|
"list": json.RawMessage(`[1,2,3]`),
|
||||||
|
}
|
||||||
|
|
||||||
|
compileGQLToPSQL(t, gql, vars, "user")
|
||||||
|
}
|
||||||
|
|
||||||
func withWhereAndList(t *testing.T) {
|
func withWhereAndList(t *testing.T) {
|
||||||
gql := `query {
|
gql := `query {
|
||||||
products(
|
products(
|
||||||
@ -367,6 +381,7 @@ func blockedFunctions(t *testing.T) {
|
|||||||
|
|
||||||
func TestCompileQuery(t *testing.T) {
|
func TestCompileQuery(t *testing.T) {
|
||||||
t.Run("withComplexArgs", withComplexArgs)
|
t.Run("withComplexArgs", withComplexArgs)
|
||||||
|
t.Run("withWhereIn", withWhereIn)
|
||||||
t.Run("withWhereAndList", withWhereAndList)
|
t.Run("withWhereAndList", withWhereAndList)
|
||||||
t.Run("withWhereIsNull", withWhereIsNull)
|
t.Run("withWhereIsNull", withWhereIsNull)
|
||||||
t.Run("withWhereMultiOr", withWhereMultiOr)
|
t.Run("withWhereMultiOr", withWhereMultiOr)
|
||||||
@ -429,7 +444,7 @@ func BenchmarkCompile(b *testing.B) {
|
|||||||
b.Fatal(err)
|
b.Fatal(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err = pcompile.Compile(qc, w, nil)
|
_, err = pcompile.Compile(w, qc, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
b.Fatal(err)
|
b.Fatal(err)
|
||||||
}
|
}
|
||||||
@ -450,7 +465,7 @@ func BenchmarkCompileParallel(b *testing.B) {
|
|||||||
b.Fatal(err)
|
b.Fatal(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err = pcompile.Compile(qc, w, nil)
|
_, err = pcompile.Compile(w, qc, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
b.Fatal(err)
|
b.Fatal(err)
|
||||||
}
|
}
|
||||||
|
@ -1,25 +1,25 @@
|
|||||||
=== RUN TestCompileInsert
|
=== RUN TestCompileInsert
|
||||||
=== RUN TestCompileInsert/simpleInsert
|
=== RUN TestCompileInsert/simpleInsert
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id" FROM (SELECT "users"."id" FROM "users" LIMIT ('1') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id" FROM (SELECT "users"."id" FROM "users" LIMIT ('1') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileInsert/singleInsert
|
=== RUN TestCompileInsert/singleInsert
|
||||||
WITH "_sg_input" AS (SELECT '{{insert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description", "price", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'user_id' AS bigint) FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (INSERT INTO "products" ("name", "description", "price", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'user_id' AS bigint) FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileInsert/bulkInsert
|
=== RUN TestCompileInsert/bulkInsert
|
||||||
WITH "_sg_input" AS (SELECT '{{insert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileInsert/simpleInsertWithPresets
|
=== RUN TestCompileInsert/simpleInsertWithPresets
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), 'now' :: timestamp without time zone, 'now' :: timestamp without time zone, '{{user_id}}' :: bigint FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id" FROM (SELECT "products"."id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), 'now' :: timestamp without time zone, 'now' :: timestamp without time zone, $2 :: bigint FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id" FROM (SELECT "products"."id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileInsert/nestedInsertManyToMany
|
=== RUN TestCompileInsert/nestedInsertManyToMany
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "price") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i RETURNING *), "customers" AS (INSERT INTO "customers" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *), "purchases" AS (INSERT INTO "purchases" ("sale_type", "quantity", "due_date", "customer_id", "product_id") SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone), "customers"."id", "products"."id" FROM "_sg_input" i, "customers", "products" RETURNING *) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "customers" AS (INSERT INTO "customers" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i RETURNING *), "purchases" AS (INSERT INTO "purchases" ("sale_type", "quantity", "due_date", "product_id", "customer_id") SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone), "products"."id", "customers"."id" FROM "_sg_input" i, "products", "customers" RETURNING *) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "customers" AS (INSERT INTO "customers" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i RETURNING *), "purchases" AS (INSERT INTO "purchases" ("sale_type", "quantity", "due_date", "product_id", "customer_id") SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone), "products"."id", "customers"."id" FROM "_sg_input" i, "products", "customers" RETURNING *) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (INSERT INTO "products" ("name", "price") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i RETURNING *), "customers" AS (INSERT INTO "customers" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *), "purchases" AS (INSERT INTO "purchases" ("sale_type", "quantity", "due_date", "customer_id", "product_id") SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone), "customers"."id", "products"."id" FROM "_sg_input" i, "customers", "products" RETURNING *) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileInsert/nestedInsertOneToMany
|
=== RUN TestCompileInsert/nestedInsertOneToMany
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "users"."id" FROM "_sg_input" i, "users" RETURNING *) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "users"."id" FROM "_sg_input" i, "users" RETURNING *) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileInsert/nestedInsertOneToOne
|
=== RUN TestCompileInsert/nestedInsertOneToOne
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "users"."id" FROM "_sg_input" i, "users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "users"."id" FROM "_sg_input" i, "users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileInsert/nestedInsertOneToManyWithConnect
|
=== RUN TestCompileInsert/nestedInsertOneToManyWithConnect
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i RETURNING *), "products" AS ( UPDATE "products" SET "user_id" = "users"."id" FROM "users" WHERE ("products"."id"= ((i.j->'product'->'connect'->>'id'))::bigint) RETURNING "products".*) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i RETURNING *), "products" AS ( UPDATE "products" SET "user_id" = "users"."id" FROM "users" WHERE ("products"."id"= ((i.j->'product'->'connect'->>'id'))::bigint) RETURNING "products".*) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileInsert/nestedInsertOneToOneWithConnect
|
=== RUN TestCompileInsert/nestedInsertOneToOneWithConnect
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint LIMIT 1), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "_x_users"."id" FROM "_sg_input" i, "_x_users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user", "__sj_2"."json" AS "tags" FROM (SELECT "products"."id", "products"."name", "products"."user_id", "products"."tags" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "tags_2"."id" AS "id", "tags_2"."name" AS "name" FROM (SELECT "tags"."id", "tags"."name" FROM "tags" WHERE ((("tags"."slug") = any ("products_0"."tags"))) LIMIT ('20') :: integer) AS "tags_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint LIMIT 1), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "_x_users"."id" FROM "_sg_input" i, "_x_users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user", "__sj_2"."json" AS "tags" FROM (SELECT "products"."id", "products"."name", "products"."user_id", "products"."tags" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "tags_2"."id" AS "id", "tags_2"."name" AS "name" FROM (SELECT "tags"."id", "tags"."name" FROM "tags" WHERE ((("tags"."slug") = any ("products_0"."tags"))) LIMIT ('20') :: integer) AS "tags_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileInsert/nestedInsertOneToOneWithConnectArray
|
=== RUN TestCompileInsert/nestedInsertOneToOneWithConnectArray
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id" = ANY((select a::bigint AS list from json_array_elements_text((i.j->'user'->'connect'->>'id')::json) AS a)) LIMIT 1), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "_x_users"."id" FROM "_sg_input" i, "_x_users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id" = ANY((select a::bigint AS list from json_array_elements_text((i.j->'user'->'connect'->>'id')::json) AS a)) LIMIT 1), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "_x_users"."id" FROM "_sg_input" i, "_x_users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
--- PASS: TestCompileInsert (0.02s)
|
--- PASS: TestCompileInsert (0.02s)
|
||||||
--- PASS: TestCompileInsert/simpleInsert (0.00s)
|
--- PASS: TestCompileInsert/simpleInsert (0.00s)
|
||||||
--- PASS: TestCompileInsert/singleInsert (0.00s)
|
--- PASS: TestCompileInsert/singleInsert (0.00s)
|
||||||
@ -33,14 +33,14 @@ WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id"
|
|||||||
--- PASS: TestCompileInsert/nestedInsertOneToOneWithConnectArray (0.00s)
|
--- PASS: TestCompileInsert/nestedInsertOneToOneWithConnectArray (0.00s)
|
||||||
=== RUN TestCompileMutate
|
=== RUN TestCompileMutate
|
||||||
=== RUN TestCompileMutate/singleUpsert
|
=== RUN TestCompileMutate/singleUpsert
|
||||||
WITH "_sg_input" AS (SELECT '{{upsert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i RETURNING *) ON CONFLICT (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i RETURNING *) ON CONFLICT (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileMutate/singleUpsertWhere
|
=== RUN TestCompileMutate/singleUpsertWhere
|
||||||
WITH "_sg_input" AS (SELECT '{{upsert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i RETURNING *) ON CONFLICT (id) WHERE (("products"."price") > '3' :: numeric(7,2)) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i RETURNING *) ON CONFLICT (id) WHERE (("products"."price") > '3' :: numeric(7,2)) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileMutate/bulkUpsert
|
=== RUN TestCompileMutate/bulkUpsert
|
||||||
WITH "_sg_input" AS (SELECT '{{upsert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i RETURNING *) ON CONFLICT (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i RETURNING *) ON CONFLICT (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileMutate/delete
|
=== RUN TestCompileMutate/delete
|
||||||
WITH "products" AS (DELETE FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") = '1' :: bigint)) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
WITH "products" AS (DELETE FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") = '1' :: bigint)) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
--- PASS: TestCompileMutate (0.00s)
|
--- PASS: TestCompileMutate (0.01s)
|
||||||
--- PASS: TestCompileMutate/singleUpsert (0.00s)
|
--- PASS: TestCompileMutate/singleUpsert (0.00s)
|
||||||
--- PASS: TestCompileMutate/singleUpsertWhere (0.00s)
|
--- PASS: TestCompileMutate/singleUpsertWhere (0.00s)
|
||||||
--- PASS: TestCompileMutate/bulkUpsert (0.00s)
|
--- PASS: TestCompileMutate/bulkUpsert (0.00s)
|
||||||
@ -48,6 +48,8 @@ WITH "products" AS (DELETE FROM "products" WHERE (((("products"."price") > '0' :
|
|||||||
=== RUN TestCompileQuery
|
=== RUN TestCompileQuery
|
||||||
=== RUN TestCompileQuery/withComplexArgs
|
=== RUN TestCompileQuery/withComplexArgs
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT DISTINCT ON ("products"."price") "products"."id", "products"."name", "products"."price" FROM "products" WHERE (((("products"."id") < '28' :: bigint) AND (("products"."id") >= '20' :: bigint) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) ORDER BY "products"."price" DESC LIMIT ('30') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT DISTINCT ON ("products"."price") "products"."id", "products"."name", "products"."price" FROM "products" WHERE (((("products"."id") < '28' :: bigint) AND (("products"."id") >= '20' :: bigint) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) ORDER BY "products"."price" DESC LIMIT ('30') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
|
=== RUN TestCompileQuery/withWhereIn
|
||||||
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id" FROM (SELECT "products"."id" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") = ANY (ARRAY(SELECT json_array_elements_text($1)) :: bigint[])))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/withWhereAndList
|
=== RUN TestCompileQuery/withWhereAndList
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE (((("products"."price") > '10' :: numeric(7,2)) AND NOT (("products"."id") IS NULL) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE (((("products"."price") > '10' :: numeric(7,2)) AND NOT (("products"."id") IS NULL) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/withWhereIsNull
|
=== RUN TestCompileQuery/withWhereIsNull
|
||||||
@ -55,9 +57,9 @@ SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT
|
|||||||
=== RUN TestCompileQuery/withWhereMultiOr
|
=== RUN TestCompileQuery/withWhereMultiOr
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND ((("products"."price") < '20' :: numeric(7,2)) OR (("products"."price") > '10' :: numeric(7,2)) OR NOT (("products"."id") IS NULL)))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND ((("products"."price") < '20' :: numeric(7,2)) OR (("products"."price") > '10' :: numeric(7,2)) OR NOT (("products"."id") IS NULL)))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/fetchByID
|
=== RUN TestCompileQuery/fetchByID
|
||||||
SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") = '{{id}}' :: bigint))) LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") = $1 :: bigint))) LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/searchQuery
|
=== RUN TestCompileQuery/searchQuery
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."search_rank" AS "search_rank", "products_0"."search_headline_description" AS "search_headline_description" FROM (SELECT "products"."id", "products"."name", ts_rank("products"."tsv", websearch_to_tsquery('{{query}}')) AS "search_rank", ts_headline("products"."description", websearch_to_tsquery('{{query}}')) AS "search_headline_description" FROM "products" WHERE ((("products"."tsv") @@ websearch_to_tsquery('{{query}}'))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."search_rank" AS "search_rank", "products_0"."search_headline_description" AS "search_headline_description" FROM (SELECT "products"."id", "products"."name", ts_rank("products"."tsv", websearch_to_tsquery($1)) AS "search_rank", ts_headline("products"."description", websearch_to_tsquery($1)) AS "search_headline_description" FROM "products" WHERE ((("products"."tsv") @@ websearch_to_tsquery($1))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/oneToMany
|
=== RUN TestCompileQuery/oneToMany
|
||||||
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."email" AS "email", "__sj_1"."json" AS "products" FROM (SELECT "users"."email", "users"."id" FROM "users" LIMIT ('20') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id")) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."email" AS "email", "__sj_1"."json" AS "products" FROM (SELECT "users"."email", "users"."id" FROM "users" LIMIT ('20') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id")) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/oneToManyReverse
|
=== RUN TestCompileQuery/oneToManyReverse
|
||||||
@ -77,9 +79,9 @@ SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT
|
|||||||
=== RUN TestCompileQuery/aggFunctionWithFilter
|
=== RUN TestCompileQuery/aggFunctionWithFilter
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."max_price" AS "max_price" FROM (SELECT "products"."id", max("products"."price") AS "max_price" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") > '10' :: bigint))) GROUP BY "products"."id" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."max_price" AS "max_price" FROM (SELECT "products"."id", max("products"."price") AS "max_price" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") > '10' :: bigint))) GROUP BY "products"."id" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/syntheticTables
|
=== RUN TestCompileQuery/syntheticTables
|
||||||
SELECT jsonb_build_object('me', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT FROM (SELECT "users"."email" FROM "users" WHERE ((("users"."id") = '{{user_id}}' :: bigint)) LIMIT ('1') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0"
|
SELECT jsonb_build_object('me', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT FROM (SELECT "users"."email" FROM "users" WHERE ((("users"."id") = $1 :: bigint)) LIMIT ('1') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/queryWithVariables
|
=== RUN TestCompileQuery/queryWithVariables
|
||||||
SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE (((("products"."price") = '{{product_price}}' :: numeric(7,2)) AND (("products"."id") = '{{product_id}}' :: bigint) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE (((("products"."price") = $1 :: numeric(7,2)) AND (("products"."id") = $2 :: bigint) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/withWhereOnRelations
|
=== RUN TestCompileQuery/withWhereOnRelations
|
||||||
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" WHERE (NOT EXISTS (SELECT 1 FROM products WHERE (("products"."user_id") = ("users"."id")) AND ((("products"."price") > '3' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" WHERE (NOT EXISTS (SELECT 1 FROM products WHERE (("products"."user_id") = ("users"."id")) AND ((("products"."price") > '3' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/multiRoot
|
=== RUN TestCompileQuery/multiRoot
|
||||||
@ -87,15 +89,16 @@ SELECT jsonb_build_object('customer', "__sj_0"."json", 'user', "__sj_1"."json",
|
|||||||
=== RUN TestCompileQuery/jsonColumnAsTable
|
=== RUN TestCompileQuery/jsonColumnAsTable
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "tag_count" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "tag_count_1"."count" AS "count", "__sj_2"."json" AS "tags" FROM (SELECT "tag_count"."count", "tag_count"."tag_id" FROM "products", json_to_recordset("products"."tag_count") AS "tag_count"(tag_id bigint, count int) WHERE ((("products"."id") = ("products_0"."id"))) LIMIT ('1') :: integer) AS "tag_count_1" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "tags_2"."name" AS "name" FROM (SELECT "tags"."name" FROM "tags" WHERE ((("tags"."id") = ("tag_count_1"."tag_id"))) LIMIT ('20') :: integer) AS "tags_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true')) AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "tag_count" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "tag_count_1"."count" AS "count", "__sj_2"."json" AS "tags" FROM (SELECT "tag_count"."count", "tag_count"."tag_id" FROM "products", json_to_recordset("products"."tag_count") AS "tag_count"(tag_id bigint, count int) WHERE ((("products"."id") = ("products_0"."id"))) LIMIT ('1') :: integer) AS "tag_count_1" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "tags_2"."name" AS "name" FROM (SELECT "tags"."name" FROM "tags" WHERE ((("tags"."id") = ("tag_count_1"."tag_id"))) LIMIT ('20') :: integer) AS "tags_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true')) AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/withCursor
|
=== RUN TestCompileQuery/withCursor
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json", 'products_cursor', "__sj_0"."cursor") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json", CONCAT_WS(',', max("__cur_0"), max("__cur_1")) as "cursor" FROM (SELECT to_jsonb("__sr_0".*) - '__cur_0' - '__cur_1' AS "json", "__cur_0", "__cur_1"FROM (SELECT "products_0"."name" AS "name", LAST_VALUE("products_0"."price") OVER() AS "__cur_0", LAST_VALUE("products_0"."id") OVER() AS "__cur_1" FROM (WITH "__cur" AS (SELECT a[1] as "price", a[2] as "id" FROM string_to_array('{{cursor}}', ',') as a) SELECT "products"."name", "products"."id", "products"."price" FROM "products", "__cur" WHERE (((("products"."price") < "__cur"."price" :: numeric(7,2)) OR ((("products"."price") = "__cur"."price" :: numeric(7,2)) AND (("products"."id") > "__cur"."id" :: bigint)))) ORDER BY "products"."price" DESC, "products"."id" ASC LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json", 'products_cursor', "__sj_0"."cursor") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json", CONCAT_WS(',', max("__cur_0"), max("__cur_1")) as "cursor" FROM (SELECT to_jsonb("__sr_0".*) - '__cur_0' - '__cur_1' AS "json", "__cur_0", "__cur_1"FROM (SELECT "products_0"."name" AS "name", LAST_VALUE("products_0"."price") OVER() AS "__cur_0", LAST_VALUE("products_0"."id") OVER() AS "__cur_1" FROM (WITH "__cur" AS (SELECT a[1] as "price", a[2] as "id" FROM string_to_array($1, ',') as a) SELECT "products"."name", "products"."id", "products"."price" FROM "products", "__cur" WHERE (((("products"."price") < "__cur"."price" :: numeric(7,2)) OR ((("products"."price") = "__cur"."price" :: numeric(7,2)) AND (("products"."id") > "__cur"."id" :: bigint)))) ORDER BY "products"."price" DESC, "products"."id" ASC LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/nullForAuthRequiredInAnon
|
=== RUN TestCompileQuery/nullForAuthRequiredInAnon
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", NULL AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", NULL AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/blockedQuery
|
=== RUN TestCompileQuery/blockedQuery
|
||||||
SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE (false) LIMIT ('1') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0"
|
SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE (false) LIMIT ('1') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/blockedFunctions
|
=== RUN TestCompileQuery/blockedFunctions
|
||||||
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."email" AS "email" FROM (SELECT , "users"."email" FROM "users" WHERE (false) GROUP BY "users"."email" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."email" AS "email" FROM (SELECT , "users"."email" FROM "users" WHERE (false) GROUP BY "users"."email" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
--- PASS: TestCompileQuery (0.02s)
|
--- PASS: TestCompileQuery (0.03s)
|
||||||
--- PASS: TestCompileQuery/withComplexArgs (0.00s)
|
--- PASS: TestCompileQuery/withComplexArgs (0.00s)
|
||||||
|
--- PASS: TestCompileQuery/withWhereIn (0.00s)
|
||||||
--- PASS: TestCompileQuery/withWhereAndList (0.00s)
|
--- PASS: TestCompileQuery/withWhereAndList (0.00s)
|
||||||
--- PASS: TestCompileQuery/withWhereIsNull (0.00s)
|
--- PASS: TestCompileQuery/withWhereIsNull (0.00s)
|
||||||
--- PASS: TestCompileQuery/withWhereMultiOr (0.00s)
|
--- PASS: TestCompileQuery/withWhereMultiOr (0.00s)
|
||||||
@ -121,23 +124,23 @@ SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coa
|
|||||||
--- PASS: TestCompileQuery/blockedFunctions (0.00s)
|
--- PASS: TestCompileQuery/blockedFunctions (0.00s)
|
||||||
=== RUN TestCompileUpdate
|
=== RUN TestCompileUpdate
|
||||||
=== RUN TestCompileUpdate/singleUpdate
|
=== RUN TestCompileUpdate/singleUpdate
|
||||||
WITH "_sg_input" AS (SELECT '{{update}}' :: json AS j), "products" AS (UPDATE "products" SET ("name", "description") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i) WHERE ((("products"."id") = '1' :: bigint) AND (("products"."id") = '{{id}}' :: bigint)) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (UPDATE "products" SET ("name", "description") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i) WHERE ((("products"."id") = '1' :: bigint) AND (("products"."id") = $2 :: bigint)) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileUpdate/simpleUpdateWithPresets
|
=== RUN TestCompileUpdate/simpleUpdateWithPresets
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (UPDATE "products" SET ("name", "price", "updated_at") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), 'now' :: timestamp without time zone FROM "_sg_input" i) WHERE (("products"."user_id") = '{{user_id}}' :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id" FROM (SELECT "products"."id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (UPDATE "products" SET ("name", "price", "updated_at") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), 'now' :: timestamp without time zone FROM "_sg_input" i) WHERE (("products"."user_id") = $2 :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id" FROM (SELECT "products"."id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileUpdate/nestedUpdateManyToMany
|
=== RUN TestCompileUpdate/nestedUpdateManyToMany
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "purchases" AS (UPDATE "purchases" SET ("sale_type", "quantity", "due_date") = (SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("purchases"."id") = '{{id}}' :: bigint) RETURNING "purchases".*), "products" AS (UPDATE "products" SET ("name", "price") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i) FROM "purchases" WHERE (("products"."id") = ("purchases"."product_id")) RETURNING "products".*), "customers" AS (UPDATE "customers" SET ("full_name", "email") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i) FROM "purchases" WHERE (("customers"."id") = ("purchases"."customer_id")) RETURNING "customers".*) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "purchases" AS (UPDATE "purchases" SET ("sale_type", "quantity", "due_date") = (SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("purchases"."id") = $2 :: bigint) RETURNING "purchases".*), "products" AS (UPDATE "products" SET ("name", "price") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i) FROM "purchases" WHERE (("products"."id") = ("purchases"."product_id")) RETURNING "products".*), "customers" AS (UPDATE "customers" SET ("full_name", "email") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i) FROM "purchases" WHERE (("customers"."id") = ("purchases"."customer_id")) RETURNING "customers".*) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "purchases" AS (UPDATE "purchases" SET ("sale_type", "quantity", "due_date") = (SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("purchases"."id") = '{{id}}' :: bigint) RETURNING "purchases".*), "customers" AS (UPDATE "customers" SET ("full_name", "email") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i) FROM "purchases" WHERE (("customers"."id") = ("purchases"."customer_id")) RETURNING "customers".*), "products" AS (UPDATE "products" SET ("name", "price") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i) FROM "purchases" WHERE (("products"."id") = ("purchases"."product_id")) RETURNING "products".*) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "purchases" AS (UPDATE "purchases" SET ("sale_type", "quantity", "due_date") = (SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("purchases"."id") = $2 :: bigint) RETURNING "purchases".*), "customers" AS (UPDATE "customers" SET ("full_name", "email") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i) FROM "purchases" WHERE (("customers"."id") = ("purchases"."customer_id")) RETURNING "customers".*), "products" AS (UPDATE "products" SET ("name", "price") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i) FROM "purchases" WHERE (("products"."id") = ("purchases"."product_id")) RETURNING "products".*) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileUpdate/nestedUpdateOneToMany
|
=== RUN TestCompileUpdate/nestedUpdateOneToMany
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (UPDATE "users" SET ("full_name", "email", "created_at", "updated_at") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("users"."id") = '8' :: bigint) RETURNING "users".*), "products" AS (UPDATE "products" SET ("name", "price", "created_at", "updated_at") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) FROM "users" WHERE (("products"."user_id") = ("users"."id") AND "products"."id"= ((i.j->'product'->'where'->>'id'))::bigint) RETURNING "products".*) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "users" AS (UPDATE "users" SET ("full_name", "email", "created_at", "updated_at") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("users"."id") = '8' :: bigint) RETURNING "users".*), "products" AS (UPDATE "products" SET ("name", "price", "created_at", "updated_at") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) FROM "users" WHERE (("products"."user_id") = ("users"."id") AND "products"."id"= ((i.j->'product'->'where'->>'id'))::bigint) RETURNING "products".*) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileUpdate/nestedUpdateOneToOne
|
=== RUN TestCompileUpdate/nestedUpdateOneToOne
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (UPDATE "products" SET ("name", "price", "created_at", "updated_at") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("products"."id") = '{{id}}' :: bigint) RETURNING "products".*), "users" AS (UPDATE "users" SET ("email") = (SELECT CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i) FROM "products" WHERE (("users"."id") = ("products"."user_id")) RETURNING "users".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (UPDATE "products" SET ("name", "price", "created_at", "updated_at") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("products"."id") = $2 :: bigint) RETURNING "products".*), "users" AS (UPDATE "users" SET ("email") = (SELECT CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i) FROM "products" WHERE (("users"."id") = ("products"."user_id")) RETURNING "users".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileUpdate/nestedUpdateOneToManyWithConnect
|
=== RUN TestCompileUpdate/nestedUpdateOneToManyWithConnect
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (UPDATE "users" SET ("full_name", "email", "created_at", "updated_at") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("users"."id") = '{{id}}' :: bigint) RETURNING "users".*), "products_c" AS ( UPDATE "products" SET "user_id" = "users"."id" FROM "users" WHERE ("products"."id"= ((i.j->'product'->'connect'->>'id'))::bigint) RETURNING "products".*), "products_d" AS ( UPDATE "products" SET "user_id" = NULL FROM "users" WHERE ("products"."id"= ((i.j->'product'->'disconnect'->>'id'))::bigint) RETURNING "products".*), "products" AS (SELECT * FROM "products_c" UNION ALL SELECT * FROM "products_d") SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "users" AS (UPDATE "users" SET ("full_name", "email", "created_at", "updated_at") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("users"."id") = $2 :: bigint) RETURNING "users".*), "products_c" AS ( UPDATE "products" SET "user_id" = "users"."id" FROM "users" WHERE ("products"."id"= ((i.j->'product'->'connect'->>'id'))::bigint) RETURNING "products".*), "products_d" AS ( UPDATE "products" SET "user_id" = NULL FROM "users" WHERE ("products"."id"= ((i.j->'product'->'disconnect'->>'id'))::bigint) RETURNING "products".*), "products" AS (SELECT * FROM "products_c" UNION ALL SELECT * FROM "products_d") SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileUpdate/nestedUpdateOneToOneWithConnect
|
=== RUN TestCompileUpdate/nestedUpdateOneToOneWithConnect
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint AND "users"."email"= ((i.j->'user'->'connect'->>'email'))::character varying LIMIT 1), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = '{{product_id}}' :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint AND "users"."email"= ((i.j->'user'->'connect'->>'email'))::character varying LIMIT 1), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = $2 :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."email"= ((i.j->'user'->'connect'->>'email'))::character varying AND "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint LIMIT 1), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = '{{product_id}}' :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."email"= ((i.j->'user'->'connect'->>'email'))::character varying AND "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint LIMIT 1), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = $2 :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileUpdate/nestedUpdateOneToOneWithDisconnect
|
=== RUN TestCompileUpdate/nestedUpdateOneToOneWithDisconnect
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT * FROM (VALUES(NULL::bigint)) AS LOOKUP("id")), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = '{{id}}' :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."user_id" AS "user_id" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT * FROM (VALUES(NULL::bigint)) AS LOOKUP("id")), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = $2 :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."user_id" AS "user_id" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
--- PASS: TestCompileUpdate (0.02s)
|
--- PASS: TestCompileUpdate (0.02s)
|
||||||
--- PASS: TestCompileUpdate/singleUpdate (0.00s)
|
--- PASS: TestCompileUpdate/singleUpdate (0.00s)
|
||||||
--- PASS: TestCompileUpdate/simpleUpdateWithPresets (0.00s)
|
--- PASS: TestCompileUpdate/simpleUpdateWithPresets (0.00s)
|
||||||
@ -148,4 +151,4 @@ WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT * FR
|
|||||||
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithConnect (0.00s)
|
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithConnect (0.00s)
|
||||||
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithDisconnect (0.00s)
|
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithDisconnect (0.00s)
|
||||||
PASS
|
PASS
|
||||||
ok github.com/dosco/super-graph/core/internal/psql 0.306s
|
ok github.com/dosco/super-graph/core/internal/psql (cached)
|
||||||
|
@ -10,8 +10,8 @@ import (
|
|||||||
"github.com/dosco/super-graph/core/internal/util"
|
"github.com/dosco/super-graph/core/internal/util"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (c *compilerContext) renderUpdate(qc *qcode.QCode, w io.Writer,
|
func (c *compilerContext) renderUpdate(
|
||||||
vars Variables, ti *DBTableInfo) (uint32, error) {
|
w io.Writer, qc *qcode.QCode, vars Variables, ti *DBTableInfo) (uint32, error) {
|
||||||
|
|
||||||
update, ok := vars[qc.ActionVar]
|
update, ok := vars[qc.ActionVar]
|
||||||
if !ok {
|
if !ok {
|
||||||
@ -21,9 +21,10 @@ func (c *compilerContext) renderUpdate(qc *qcode.QCode, w io.Writer,
|
|||||||
return 0, fmt.Errorf("variable '%s' is empty", qc.ActionVar)
|
return 0, fmt.Errorf("variable '%s' is empty", qc.ActionVar)
|
||||||
}
|
}
|
||||||
|
|
||||||
io.WriteString(c.w, `WITH "_sg_input" AS (SELECT '{{`)
|
io.WriteString(c.w, `WITH "_sg_input" AS (SELECT `)
|
||||||
io.WriteString(c.w, qc.ActionVar)
|
c.renderValueExp(Param{Name: qc.ActionVar, Type: "json"})
|
||||||
io.WriteString(c.w, `}}' :: json AS j)`)
|
// io.WriteString(c.w, qc.ActionVar)
|
||||||
|
io.WriteString(c.w, ` :: json AS j)`)
|
||||||
|
|
||||||
st := util.NewStack()
|
st := util.NewStack()
|
||||||
st.Push(kvitem{_type: itemUpdate, key: ti.Name, val: update, ti: ti})
|
st.Push(kvitem{_type: itemUpdate, key: ti.Name, val: update, ti: ti})
|
||||||
@ -84,11 +85,11 @@ func (c *compilerContext) renderUpdateStmt(w io.Writer, qc *qcode.QCode, item re
|
|||||||
io.WriteString(w, `UPDATE `)
|
io.WriteString(w, `UPDATE `)
|
||||||
quoted(w, ti.Name)
|
quoted(w, ti.Name)
|
||||||
io.WriteString(w, ` SET (`)
|
io.WriteString(w, ` SET (`)
|
||||||
renderInsertUpdateColumns(w, qc, jt, ti, sk, false)
|
c.renderInsertUpdateColumns(qc, jt, ti, sk, false)
|
||||||
renderNestedUpdateRelColumns(w, item.kvitem, false)
|
renderNestedUpdateRelColumns(w, item.kvitem, false)
|
||||||
|
|
||||||
io.WriteString(w, `) = (SELECT `)
|
io.WriteString(w, `) = (SELECT `)
|
||||||
renderInsertUpdateColumns(w, qc, jt, ti, sk, true)
|
c.renderInsertUpdateColumns(qc, jt, ti, sk, true)
|
||||||
renderNestedUpdateRelColumns(w, item.kvitem, true)
|
renderNestedUpdateRelColumns(w, item.kvitem, true)
|
||||||
|
|
||||||
io.WriteString(w, ` FROM "_sg_input" i`)
|
io.WriteString(w, ` FROM "_sg_input" i`)
|
||||||
@ -122,7 +123,7 @@ func (c *compilerContext) renderUpdateStmt(w io.Writer, qc *qcode.QCode, item re
|
|||||||
|
|
||||||
} else {
|
} else {
|
||||||
if qc.Selects[0].Where != nil {
|
if qc.Selects[0].Where != nil {
|
||||||
io.WriteString(w, ` WHERE `)
|
io.WriteString(w, `WHERE `)
|
||||||
if err := c.renderWhere(&qc.Selects[0], ti); err != nil {
|
if err := c.renderWhere(&qc.Selects[0], ti); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -197,8 +198,9 @@ func renderNestedUpdateRelTables(w io.Writer, item kvitem) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *compilerContext) renderDelete(qc *qcode.QCode, w io.Writer,
|
func (c *compilerContext) renderDelete(
|
||||||
vars Variables, ti *DBTableInfo) (uint32, error) {
|
w io.Writer, qc *qcode.QCode, vars Variables, ti *DBTableInfo) (uint32, error) {
|
||||||
|
|
||||||
root := &qc.Selects[0]
|
root := &qc.Selects[0]
|
||||||
|
|
||||||
io.WriteString(c.w, `WITH `)
|
io.WriteString(c.w, `WITH `)
|
||||||
|
@ -223,7 +223,7 @@ func nestedUpdateOneToOneWithDisconnect(t *testing.T) {
|
|||||||
// }
|
// }
|
||||||
// }`
|
// }`
|
||||||
|
|
||||||
// sql := `WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (SELECT * FROM (VALUES(NULL::bigint)) AS LOOKUP("id")), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT "t"."name", "t"."price", "users"."id" FROM "_sg_input" i, "users", json_populate_record(NULL::products, i.j) t) WHERE (("products"."id") = 2) RETURNING "products".*) SELECT json_object_agg('product', json_0) FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."user_id" AS "user_id") AS "json_row_0")) AS "json_0" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LIMIT ('1') :: integer) AS "sel_0"`
|
// sql := `WITH "_sg_input" AS (SELECT $1 :: json AS j), "users" AS (SELECT * FROM (VALUES(NULL::bigint)) AS LOOKUP("id")), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT "t"."name", "t"."price", "users"."id" FROM "_sg_input" i, "users", json_populate_record(NULL::products, i.j) t) WHERE (("products"."id") = 2) RETURNING "products".*) SELECT json_object_agg('product', json_0) FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."user_id" AS "user_id") AS "json_row_0")) AS "json_0" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LIMIT ('1') :: integer) AS "sel_0"`
|
||||||
|
|
||||||
// vars := map[string]json.RawMessage{
|
// vars := map[string]json.RawMessage{
|
||||||
// "data": json.RawMessage(`{
|
// "data": json.RawMessage(`{
|
||||||
|
@ -1,13 +0,0 @@
|
|||||||
package psql
|
|
||||||
|
|
||||||
import "regexp"
|
|
||||||
|
|
||||||
func NewVariables(varlist map[string]string) map[string]string {
|
|
||||||
re := regexp.MustCompile(`(?mi)\$([a-zA-Z0-9_.]+)`)
|
|
||||||
vars := make(map[string]string, len(varlist))
|
|
||||||
|
|
||||||
for k, v := range varlist {
|
|
||||||
vars[k] = re.ReplaceAllString(v, `{{$1}}`)
|
|
||||||
}
|
|
||||||
return vars
|
|
||||||
}
|
|
@ -1,13 +1,13 @@
|
|||||||
package qcode
|
package qcode
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"regexp"
|
|
||||||
"sort"
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Config struct {
|
type Config struct {
|
||||||
Blocklist []string
|
DefaultBlock bool
|
||||||
|
Blocklist []string
|
||||||
}
|
}
|
||||||
|
|
||||||
type QueryConfig struct {
|
type QueryConfig struct {
|
||||||
@ -46,8 +46,7 @@ type TRConfig struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type trval struct {
|
type trval struct {
|
||||||
readOnly bool
|
query struct {
|
||||||
query struct {
|
|
||||||
limit string
|
limit string
|
||||||
fil *Exp
|
fil *Exp
|
||||||
filNU bool
|
filNU bool
|
||||||
@ -132,12 +131,3 @@ func mapToList(m map[string]string) []string {
|
|||||||
sort.Strings(list)
|
sort.Strings(list)
|
||||||
return list
|
return list
|
||||||
}
|
}
|
||||||
|
|
||||||
var varRe = regexp.MustCompile(`\$([a-zA-Z0-9_]+)`)
|
|
||||||
|
|
||||||
func parsePresets(m map[string]string) map[string]string {
|
|
||||||
for k, v := range m {
|
|
||||||
m[k] = varRe.ReplaceAllString(v, `{{$1}}`)
|
|
||||||
}
|
|
||||||
return m
|
|
||||||
}
|
|
||||||
|
@ -170,9 +170,10 @@ const (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type Compiler struct {
|
type Compiler struct {
|
||||||
db bool // default block tables if not defined in anon role
|
|
||||||
tr map[string]map[string]*trval
|
tr map[string]map[string]*trval
|
||||||
bl map[string]struct{}
|
bl map[string]struct{}
|
||||||
|
|
||||||
|
defBlock bool
|
||||||
}
|
}
|
||||||
|
|
||||||
var expPool = sync.Pool{
|
var expPool = sync.Pool{
|
||||||
@ -180,7 +181,7 @@ var expPool = sync.Pool{
|
|||||||
}
|
}
|
||||||
|
|
||||||
func NewCompiler(c Config) (*Compiler, error) {
|
func NewCompiler(c Config) (*Compiler, error) {
|
||||||
co := &Compiler{}
|
co := &Compiler{defBlock: c.DefaultBlock}
|
||||||
co.tr = make(map[string]map[string]*trval)
|
co.tr = make(map[string]map[string]*trval)
|
||||||
co.bl = make(map[string]struct{}, len(c.Blocklist))
|
co.bl = make(map[string]struct{}, len(c.Blocklist))
|
||||||
|
|
||||||
@ -227,7 +228,7 @@ func (com *Compiler) AddRole(role, table string, trc TRConfig) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
trv.insert.cols = listToMap(trc.Insert.Columns)
|
trv.insert.cols = listToMap(trc.Insert.Columns)
|
||||||
trv.insert.psmap = parsePresets(trc.Insert.Presets)
|
trv.insert.psmap = trc.Insert.Presets
|
||||||
trv.insert.pslist = mapToList(trv.insert.psmap)
|
trv.insert.pslist = mapToList(trv.insert.psmap)
|
||||||
trv.insert.block = trc.Insert.Block
|
trv.insert.block = trc.Insert.Block
|
||||||
|
|
||||||
@ -237,7 +238,7 @@ func (com *Compiler) AddRole(role, table string, trc TRConfig) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
trv.update.cols = listToMap(trc.Update.Columns)
|
trv.update.cols = listToMap(trc.Update.Columns)
|
||||||
trv.update.psmap = parsePresets(trc.Update.Presets)
|
trv.update.psmap = trc.Update.Presets
|
||||||
trv.update.pslist = mapToList(trv.update.psmap)
|
trv.update.pslist = mapToList(trv.update.psmap)
|
||||||
trv.update.block = trc.Update.Block
|
trv.update.block = trc.Update.Block
|
||||||
|
|
||||||
@ -333,59 +334,82 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
trv := com.getRole(role, field.Name)
|
trv := com.getRole(role, field.Name)
|
||||||
|
skipRender := false
|
||||||
|
|
||||||
switch action {
|
if trv != nil {
|
||||||
case QTQuery:
|
switch action {
|
||||||
if trv.query.block {
|
case QTQuery:
|
||||||
continue
|
if trv.query.block {
|
||||||
|
skipRender = true
|
||||||
|
}
|
||||||
|
|
||||||
|
case QTInsert:
|
||||||
|
if trv.insert.block {
|
||||||
|
return fmt.Errorf("%s, insert blocked: %s", role, field.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
case QTUpdate:
|
||||||
|
if trv.update.block {
|
||||||
|
return fmt.Errorf("%s, update blocked: %s", role, field.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
case QTDelete:
|
||||||
|
if trv.delete.block {
|
||||||
|
return fmt.Errorf("%s, delete blocked: %s", role, field.Name)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
case QTInsert:
|
} else if role == "anon" {
|
||||||
if trv.insert.block {
|
skipRender = com.defBlock
|
||||||
return fmt.Errorf("insert blocked: %s", field.Name)
|
|
||||||
}
|
|
||||||
|
|
||||||
case QTUpdate:
|
|
||||||
if trv.update.block {
|
|
||||||
return fmt.Errorf("update blocked: %s", field.Name)
|
|
||||||
}
|
|
||||||
|
|
||||||
case QTDelete:
|
|
||||||
if trv.delete.block {
|
|
||||||
return fmt.Errorf("delete blocked: %s", field.Name)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
selects = append(selects, Select{
|
selects = append(selects, Select{
|
||||||
ID: id,
|
ID: id,
|
||||||
ParentID: parentID,
|
ParentID: parentID,
|
||||||
Name: field.Name,
|
Name: field.Name,
|
||||||
Children: make([]int32, 0, 5),
|
SkipRender: skipRender,
|
||||||
Allowed: trv.allowedColumns(action),
|
|
||||||
Functions: true,
|
|
||||||
})
|
})
|
||||||
s := &selects[(len(selects) - 1)]
|
s := &selects[(len(selects) - 1)]
|
||||||
|
|
||||||
switch action {
|
|
||||||
case QTQuery:
|
|
||||||
s.Functions = !trv.query.disable.funcs
|
|
||||||
s.Paging.Limit = trv.query.limit
|
|
||||||
|
|
||||||
case QTInsert:
|
|
||||||
s.PresetMap = trv.insert.psmap
|
|
||||||
s.PresetList = trv.insert.pslist
|
|
||||||
|
|
||||||
case QTUpdate:
|
|
||||||
s.PresetMap = trv.update.psmap
|
|
||||||
s.PresetList = trv.update.pslist
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(field.Alias) != 0 {
|
if len(field.Alias) != 0 {
|
||||||
s.FieldName = field.Alias
|
s.FieldName = field.Alias
|
||||||
} else {
|
} else {
|
||||||
s.FieldName = s.Name
|
s.FieldName = s.Name
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if s.ParentID == -1 {
|
||||||
|
qc.Roots = append(qc.Roots, s.ID)
|
||||||
|
} else {
|
||||||
|
p := &selects[s.ParentID]
|
||||||
|
p.Children = append(p.Children, s.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
if skipRender {
|
||||||
|
id++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
s.Children = make([]int32, 0, 5)
|
||||||
|
s.Functions = true
|
||||||
|
|
||||||
|
if trv != nil {
|
||||||
|
s.Allowed = trv.allowedColumns(action)
|
||||||
|
|
||||||
|
switch action {
|
||||||
|
case QTQuery:
|
||||||
|
s.Functions = !trv.query.disable.funcs
|
||||||
|
s.Paging.Limit = trv.query.limit
|
||||||
|
|
||||||
|
case QTInsert:
|
||||||
|
s.PresetMap = trv.insert.psmap
|
||||||
|
s.PresetList = trv.insert.pslist
|
||||||
|
|
||||||
|
case QTUpdate:
|
||||||
|
s.PresetMap = trv.update.psmap
|
||||||
|
s.PresetList = trv.update.pslist
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
err := com.compileArgs(qc, s, field.Args, role)
|
err := com.compileArgs(qc, s, field.Args, role)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@ -394,13 +418,6 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
|
|||||||
// Order is important AddFilters must come after compileArgs
|
// Order is important AddFilters must come after compileArgs
|
||||||
com.AddFilters(qc, s, role)
|
com.AddFilters(qc, s, role)
|
||||||
|
|
||||||
if s.ParentID == -1 {
|
|
||||||
qc.Roots = append(qc.Roots, s.ID)
|
|
||||||
} else {
|
|
||||||
p := &selects[s.ParentID]
|
|
||||||
p.Children = append(p.Children, s.ID)
|
|
||||||
}
|
|
||||||
|
|
||||||
s.Cols = make([]Column, 0, len(field.Children))
|
s.Cols = make([]Column, 0, len(field.Children))
|
||||||
action = QTQuery
|
action = QTQuery
|
||||||
|
|
||||||
@ -440,14 +457,10 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
|
|||||||
|
|
||||||
func (com *Compiler) AddFilters(qc *QCode, sel *Select, role string) {
|
func (com *Compiler) AddFilters(qc *QCode, sel *Select, role string) {
|
||||||
var fil *Exp
|
var fil *Exp
|
||||||
var nu bool // user required (or not) in this filter
|
var nu bool // need user_id (or not) in this filter
|
||||||
|
|
||||||
if trv, ok := com.tr[role][sel.Name]; ok {
|
if trv, ok := com.tr[role][sel.Name]; ok {
|
||||||
fil, nu = trv.filter(qc.Type)
|
fil, nu = trv.filter(qc.Type)
|
||||||
|
|
||||||
} else if com.db && role == "anon" {
|
|
||||||
// Tables not defined under the anon role will not be rendered
|
|
||||||
sel.SkipRender = true
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if fil == nil {
|
if fil == nil {
|
||||||
@ -838,14 +851,17 @@ func (com *Compiler) compileArgAfterBefore(sel *Select, arg *Arg, pt PagingType)
|
|||||||
return nil, false
|
return nil, false
|
||||||
}
|
}
|
||||||
|
|
||||||
var zeroTrv = &trval{}
|
// var zeroTrv = &trval{}
|
||||||
|
|
||||||
func (com *Compiler) getRole(role, field string) *trval {
|
func (com *Compiler) getRole(role, field string) *trval {
|
||||||
if trv, ok := com.tr[role][field]; ok {
|
if trv, ok := com.tr[role][field]; ok {
|
||||||
return trv
|
return trv
|
||||||
} else {
|
|
||||||
return zeroTrv
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
// } else {
|
||||||
|
// return zeroTrv
|
||||||
|
// }
|
||||||
}
|
}
|
||||||
|
|
||||||
func AddFilter(sel *Select, fil *Exp) {
|
func AddFilter(sel *Select, fil *Exp) {
|
||||||
@ -1015,10 +1031,15 @@ func setListVal(ex *Exp, node *Node) {
|
|||||||
case NodeFloat:
|
case NodeFloat:
|
||||||
ex.ListType = ValFloat
|
ex.ListType = ValFloat
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
ex.Val = node.Val
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
for i := range node.Children {
|
for i := range node.Children {
|
||||||
ex.ListVal = append(ex.ListVal, node.Children[i].Val)
|
ex.ListVal = append(ex.ListVal, node.Children[i].Val)
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func setWhereColName(ex *Exp, node *Node) {
|
func setWhereColName(ex *Exp, node *Node) {
|
||||||
|
@ -11,14 +11,11 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/dosco/super-graph/core/internal/allow"
|
"github.com/dosco/super-graph/core/internal/allow"
|
||||||
"github.com/dosco/super-graph/core/internal/psql"
|
|
||||||
"github.com/dosco/super-graph/core/internal/qcode"
|
"github.com/dosco/super-graph/core/internal/qcode"
|
||||||
"github.com/valyala/fasttemplate"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type preparedItem struct {
|
type preparedItem struct {
|
||||||
sd *sql.Stmt
|
sd *sql.Stmt
|
||||||
args [][]byte
|
|
||||||
st stmt
|
st stmt
|
||||||
roleArg bool
|
roleArg bool
|
||||||
}
|
}
|
||||||
@ -103,9 +100,6 @@ func (sg *SuperGraph) prepareStmt(item allow.Item) error {
|
|||||||
// logger.Debug().Msgf("Prepared statement 'query %s' (anon)", item.Name)
|
// logger.Debug().Msgf("Prepared statement 'query %s' (anon)", item.Name)
|
||||||
|
|
||||||
stmts2, err := sg.buildRoleStmt(qb, vars, "anon")
|
stmts2, err := sg.buildRoleStmt(qb, vars, "anon")
|
||||||
if err == psql.ErrAllTablesSkipped {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -121,9 +115,6 @@ func (sg *SuperGraph) prepareStmt(item allow.Item) error {
|
|||||||
// logger.Debug().Msgf("Prepared statement 'mutation %s' (%s)", item.Name, role.Name)
|
// logger.Debug().Msgf("Prepared statement 'mutation %s' (%s)", item.Name, role.Name)
|
||||||
|
|
||||||
stmts, err := sg.buildRoleStmt(qb, vars, role.Name)
|
stmts, err := sg.buildRoleStmt(qb, vars, role.Name)
|
||||||
if err == psql.ErrAllTablesSkipped {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -139,16 +130,13 @@ func (sg *SuperGraph) prepareStmt(item allow.Item) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (sg *SuperGraph) prepare(ct context.Context, st []stmt, key string) error {
|
func (sg *SuperGraph) prepare(ct context.Context, st []stmt, key string) error {
|
||||||
finalSQL, am := processTemplate(st[0].sql)
|
sd, err := sg.db.PrepareContext(ct, st[0].sql)
|
||||||
|
|
||||||
sd, err := sg.db.Prepare(finalSQL)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("prepare failed: %v: %s", err, finalSQL)
|
return fmt.Errorf("prepare failed: %v: %s", err, st[0].sql)
|
||||||
}
|
}
|
||||||
|
|
||||||
sg.prepared[key] = &preparedItem{
|
sg.prepared[key] = &preparedItem{
|
||||||
sd: sd,
|
sd: sd,
|
||||||
args: am,
|
|
||||||
st: st[0],
|
st: st[0],
|
||||||
roleArg: len(st) > 1,
|
roleArg: len(st) > 1,
|
||||||
}
|
}
|
||||||
@ -163,10 +151,11 @@ func (sg *SuperGraph) prepareRoleStmt(tx *sql.Tx) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
rq := strings.ReplaceAll(sg.conf.RolesQuery, "$user_id", "$1")
|
||||||
w := &bytes.Buffer{}
|
w := &bytes.Buffer{}
|
||||||
|
|
||||||
io.WriteString(w, `SELECT (CASE WHEN EXISTS (`)
|
io.WriteString(w, `SELECT (CASE WHEN EXISTS (`)
|
||||||
io.WriteString(w, sg.conf.RolesQuery)
|
io.WriteString(w, rq)
|
||||||
io.WriteString(w, `) THEN `)
|
io.WriteString(w, `) THEN `)
|
||||||
|
|
||||||
io.WriteString(w, `(SELECT (CASE`)
|
io.WriteString(w, `(SELECT (CASE`)
|
||||||
@ -181,14 +170,12 @@ func (sg *SuperGraph) prepareRoleStmt(tx *sql.Tx) error {
|
|||||||
io.WriteString(w, `'`)
|
io.WriteString(w, `'`)
|
||||||
}
|
}
|
||||||
|
|
||||||
io.WriteString(w, ` ELSE {{role}} END) FROM (`)
|
io.WriteString(w, ` ELSE $2 END) FROM (`)
|
||||||
io.WriteString(w, sg.conf.RolesQuery)
|
io.WriteString(w, sg.conf.RolesQuery)
|
||||||
io.WriteString(w, `) AS "_sg_auth_roles_query" LIMIT 1) `)
|
io.WriteString(w, `) AS "_sg_auth_roles_query" LIMIT 1) `)
|
||||||
io.WriteString(w, `ELSE 'anon' END) FROM (VALUES (1)) AS "_sg_auth_filler" LIMIT 1; `)
|
io.WriteString(w, `ELSE 'anon' END) FROM (VALUES (1)) AS "_sg_auth_filler" LIMIT 1; `)
|
||||||
|
|
||||||
roleSQL, _ := processTemplate(w.String())
|
sg.getRole, err = tx.Prepare(w.String())
|
||||||
|
|
||||||
sg.getRole, err = tx.Prepare(roleSQL)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -196,36 +183,6 @@ func (sg *SuperGraph) prepareRoleStmt(tx *sql.Tx) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func processTemplate(tmpl string) (string, [][]byte) {
|
|
||||||
st := struct {
|
|
||||||
vmap map[string]int
|
|
||||||
am [][]byte
|
|
||||||
i int
|
|
||||||
}{
|
|
||||||
vmap: make(map[string]int),
|
|
||||||
am: make([][]byte, 0, 5),
|
|
||||||
i: 0,
|
|
||||||
}
|
|
||||||
|
|
||||||
execFunc := func(w io.Writer, tag string) (int, error) {
|
|
||||||
if n, ok := st.vmap[tag]; ok {
|
|
||||||
return w.Write([]byte(fmt.Sprintf("$%d", n)))
|
|
||||||
}
|
|
||||||
st.am = append(st.am, []byte(tag))
|
|
||||||
st.i++
|
|
||||||
st.vmap[tag] = st.i
|
|
||||||
return w.Write([]byte(fmt.Sprintf("$%d", st.i)))
|
|
||||||
}
|
|
||||||
|
|
||||||
t1 := fasttemplate.New(tmpl, `'{{`, `}}'`)
|
|
||||||
ts1 := t1.ExecuteFuncString(execFunc)
|
|
||||||
|
|
||||||
t2 := fasttemplate.New(ts1, `{{`, `}}`)
|
|
||||||
ts2 := t2.ExecuteFuncString(execFunc)
|
|
||||||
|
|
||||||
return ts2, st.am
|
|
||||||
}
|
|
||||||
|
|
||||||
func (sg *SuperGraph) initAllowList() error {
|
func (sg *SuperGraph) initAllowList() error {
|
||||||
var ac allow.Config
|
var ac allow.Config
|
||||||
var err error
|
var err error
|
||||||
|
@ -21,7 +21,7 @@ func (sg *SuperGraph) execRemoteJoin(st *stmt, data []byte, hdr http.Header) ([]
|
|||||||
// fetch the field name used within the db response json
|
// fetch the field name used within the db response json
|
||||||
// that are used to mark insertion points and the mapping between
|
// that are used to mark insertion points and the mapping between
|
||||||
// those field names and their select objects
|
// those field names and their select objects
|
||||||
fids, sfmap := sg.parentFieldIds(h, sel, st.skipped)
|
fids, sfmap := sg.parentFieldIds(h, sel, st.md.Skipped)
|
||||||
|
|
||||||
// fetch the field values of the marked insertion points
|
// fetch the field values of the marked insertion points
|
||||||
// these values contain the id to be used with fetching remote data
|
// these values contain the id to be used with fetching remote data
|
||||||
|
@ -1,40 +0,0 @@
|
|||||||
01a106d5.06939d67.js,1589776216137,2e1ce67f6cf79a8a8e2070fc4ea4a6104ac73a5b26a1ab10b62f6cd8e45a8074
|
|
||||||
1.1c32171f.js.LICENSE.txt,1589776216144,31b4d50dbbd144da150dcdcf0ccef8f6cf8b6b5204d5c9adde3b24466777fad5
|
|
||||||
0e384e19.7f29b403.js,1589776216137,e2c3882226f2a601b65e4bb1fdb771296c1946f9f125c90af4a8f451dfd2c867
|
|
||||||
19.fdfbe826.js.LICENSE.txt,1589776216145,6ad95a8099127a8d42b5ace6d148064b1d3e922174f08d75d0ee2220ebeacd0b
|
|
||||||
17896441.183211f5.js,1589776216137,7736db62d7498a8d3a10a617b1bdfac08c8f29dc03329f4ad3320f2571c223c0
|
|
||||||
20ac7829.c04b4a1e.js,1589776216137,5b95f479848ccd6959630d4a24bd551d0dbc74457911e9b6f3498655bfaf8ea7
|
|
||||||
1.1c32171f.js,1589776216137,5441b74bfad9f5a37ba0e6123621c73c3e3b9064bda6b9dcf62fdb7381bf8e41
|
|
||||||
2.8f12478f.js,1589776216137,3ac7ca0df8fca86145f5decbd86c8adfbc6b5b11a5be96fc96cc9bc33d6306e6
|
|
||||||
395f47e2.28d67f37.js,1589776216137,8a9b6bc2afdd99ca2b1827c8289352fab6163e30151b9701c29a7863b6cd00b6
|
|
||||||
404.html,1589776218438,0a748eaa7614b1982623360ba8554c0f498b0796ead3cc429a2c84d287084b50
|
|
||||||
3d9c95a4.c89589de.js,1589776216137,d5c45e5a3671f303683451d448e2e5d5b464f041cde683af6e824b9e7f951412
|
|
||||||
9225b3a9.a5e6036b.js,1589776216137,ec9a0d4b34d8751f74348d0da369625a18f320c9ed5ab3c5ccf047ead2551bd8
|
|
||||||
741df2ae.e13b96b2.js,1589776216137,12028f0cbdf783ac91ea42db64d91190ebd4df24cc74162f953aacc75d16d078
|
|
||||||
969d212d.9fc45877.js,1589776216138,8323c9f2db042bfaa2ebba43d9500bed881a694d0bfc27fd796cec95bb032dc5
|
|
||||||
c4f5d8e4.47e70b85.js,1589776216145,6f986b48720724e7c8a715812b5f6625c71c8eca258bb4b410a447eb5da52734
|
|
||||||
index.html,1589776218438,89f81ec3d3be439a827bd61448dcaddb71c33422df7baa88a7bbcdf784dbc0b2
|
|
||||||
98ce8162.b5ace15d.js,1589776216137,935e1c6dd08f7e9d0d00221559b95f0f649e28ddf64be6bbb7b3e65bae1aba72
|
|
||||||
main.e30d99cd.js.LICENSE.txt,1589776216144,1d906c3b83eacffe298d21eeb73e6e73e96310983224783d236195098e6765a7
|
|
||||||
runtime~main.366c29ad.js,1589776216145,0e550cc9522cd99c5fa4097c7db629eef56127a7f8ade0b7c9954cc8f6a01239
|
|
||||||
5043549d.62508ecf.js,1589776216137,383959b80d2b0c6416e83c9640ea03c666fe92c407e13a6f022b58072feeafd2
|
|
||||||
99e04881.197dcef6.js,1589776216144,af99883cbd4d58fbac7cbf814be33032b77bc8daf856aed54bdf0bf27ed5708d
|
|
||||||
sitemap.xml,1589776218455,660ed269bf0306ba47ecdfb638e487147784d614c43c6c4a8e84194973baf183
|
|
||||||
styles.9155f1d2.js,1589776216137,f1e0863928710e67338dc88c37f47ef3ff164d36c4bba40d005561094c9c3284
|
|
||||||
db32d859.a032827a.js,1589776216145,36d575ffad747898726a97cb7a3551e636f744218595bea5c060536eb8d8390f
|
|
||||||
docs/advanced/index.html,1589776218439,31171870786a597597de9417978a27253581c013962e39959ae4c0777bf86c28
|
|
||||||
docs/deploy/index.html,1589776218440,7a4735edb93006311b704e62b843bf89bc4354fdf0fdc22a0c5802e39878c193
|
|
||||||
docs/home/index.html,1589776218440,c7fbb0c1084c6ef8858775c5083b6b416b8188942d4402a5a625eadb3bc00942
|
|
||||||
docs/intro/index.html,1589776218440,c7a50ae98c0b279f422e55c2eeb9f7ba1c7c1a8bcac07be11fd6e05ced224094
|
|
||||||
img/super-graph-logo.svg,1589776218438,66a865c4936f44ea811464b967f221b615b7553e85dca0d6f1ef620da3911857
|
|
||||||
docs/react/index.html,1589776218440,f76fc976f3491d9aacf19ce3b34bee1339f87c673a9da95f192683615618f210
|
|
||||||
docs/why/index.html,1589776218440,4aa380fe4e5d8476645e368d1f708d5d1344331c572383db823c3499fa0c99cc
|
|
||||||
docs/security/index.html,1589776218440,0c7d466dc143935db8c02a448952cae2465635e4b6782b1682449bbd56807917
|
|
||||||
styles.8ee0cad4.css,1589776216137,34b2e79c5c5b1f7afda4376e422e8ccb2c3c04213ca09d788f0c68ecf153d6e6
|
|
||||||
docs/config/index.html,1589776218440,25b6e87a42c163ac966e80acebca8708f56ae95ba8f3ed8b98ff7fd70ca5a222
|
|
||||||
docs/internals/index.html,1589776218440,b6f2136a1c832f421a46329fb1f39269d820c55a0dfc9351848271a5501d8e6e
|
|
||||||
docs/start/index.html,1589776218440,485ec2c61117d8940d8028f34d51d421995a814d5b9d4d5a1870adaed48aec2c
|
|
||||||
docs/graphql/index.html,1589776218440,3bd79f703fe67656884f3121bfddc3a4fc4d9e5bb2bf9271c94014058fbbd806
|
|
||||||
main.e30d99cd.js,1589776216144,98a4087d6f537aaddbc1225aaabfb4d12d1394772deb618d4d457685cee59311
|
|
||||||
19.fdfbe826.js,1589776216144,b8abb73aea5fc0aa50d7e8b8bd38984e3b3aec62de2faf66fb3e55fd1428f8a7
|
|
||||||
server.bundle.js,1589776218438,826db37f1de931e8b088c1ff20b4a3c2fe0c3d54d9ff4020e500f0df1b83a616
|
|
@ -157,7 +157,9 @@ func main() {
|
|||||||
}
|
}
|
||||||
}`
|
}`
|
||||||
|
|
||||||
res, err := sg.GraphQL(context.Background(), query, nil)
|
ctx = context.WithValue(ctx, core.UserIDKey, 1)
|
||||||
|
|
||||||
|
res, err := sg.GraphQL(ctx, query, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
|
@ -25,6 +25,8 @@ Super Graph has a rich feature set like integrating with your existing Ruby on R
|
|||||||
- Fuzz tested for security
|
- Fuzz tested for security
|
||||||
- Database migrations tool
|
- Database migrations tool
|
||||||
- Database seeding tool
|
- Database seeding tool
|
||||||
|
- Works with Postgres and Yugabyte DB
|
||||||
|
- OpenCensus Support: Zipkin, Prometheus, X-Ray, Stackdriver
|
||||||
|
|
||||||
## Try the demo app
|
## Try the demo app
|
||||||
|
|
||||||
|
249
docs/website/docs/seed.md
Normal file
249
docs/website/docs/seed.md
Normal file
@ -0,0 +1,249 @@
|
|||||||
|
---
|
||||||
|
id: seed
|
||||||
|
title: Database Seeding
|
||||||
|
sidebar_label: Seed Scripts
|
||||||
|
---
|
||||||
|
|
||||||
|
While developing it's often useful to be able to have fake data available in the database. Fake data can help with building the UI and save you time when trying to get the GraphQL query correct. Super Graph has the ability do this for you. All you have to do is write a seed script `config/seed.js` (In Javascript) and use the `db:seed` command line option. Below is an example of kind of things you can do in a seed script.
|
||||||
|
|
||||||
|
## Creating fake users
|
||||||
|
|
||||||
|
Since all mutations and queries are in standard GraphQL you can use all the features available in Super Graph GraphQL.
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var users = [];
|
||||||
|
|
||||||
|
for (i = 0; i < 20; i++) {
|
||||||
|
var data = {
|
||||||
|
slug: util.make_slug(fake.first_name() + "-" + fake.last_name()),
|
||||||
|
first_name: fake.first_name(),
|
||||||
|
last_name: fake.last_name(),
|
||||||
|
picture_url: fake.avatar_url(),
|
||||||
|
email: fake.email(),
|
||||||
|
bio: fake.sentence(10),
|
||||||
|
};
|
||||||
|
|
||||||
|
var res = graphql(" \
|
||||||
|
mutation { \
|
||||||
|
user(insert: $data) { \
|
||||||
|
id \
|
||||||
|
} \
|
||||||
|
}", { data: data });
|
||||||
|
|
||||||
|
users.push(res.user);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Inserting the users fake blog posts
|
||||||
|
|
||||||
|
Another example highlighting how the `connect` syntax of Super Graph GraphQL can be used to connect inserted posts
|
||||||
|
to random users that were previously created. For futher details checkout the [seed script](/seed) documentation.
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var posts = [];
|
||||||
|
|
||||||
|
for (i = 0; i < 1500; i++) {
|
||||||
|
var user.id = users[Math.floor(Math.random() * 10)];
|
||||||
|
|
||||||
|
var data = {
|
||||||
|
slug: util.make_slug(fake.sentence(3) + i),
|
||||||
|
body: fake.sentence(100),
|
||||||
|
published: true,
|
||||||
|
thread: {
|
||||||
|
connect: { user: user.id }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var res = graphql(" \
|
||||||
|
mutation { \
|
||||||
|
post(insert: $data) { \
|
||||||
|
id \
|
||||||
|
} \
|
||||||
|
}",
|
||||||
|
{ data: data },
|
||||||
|
{ user_id: u.id })
|
||||||
|
|
||||||
|
posts.push(res.post.slug)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Insert a large number of rows efficiently
|
||||||
|
|
||||||
|
This feature uses the `COPY` functionality available in Postgres this is the best way to
|
||||||
|
insert a large number of rows into a table. The `import_csv` function reads in a CSV file using the first
|
||||||
|
line of the file as column names.
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
import_csv("post_tags", "./tags.csv");
|
||||||
|
```
|
||||||
|
|
||||||
|
## A list of fake data functions available to you.
|
||||||
|
|
||||||
|
```
|
||||||
|
person
|
||||||
|
name
|
||||||
|
name_prefix
|
||||||
|
name_suffix
|
||||||
|
first_name
|
||||||
|
last_name
|
||||||
|
gender
|
||||||
|
ssn
|
||||||
|
contact
|
||||||
|
email
|
||||||
|
phone
|
||||||
|
phone_formatted
|
||||||
|
username
|
||||||
|
password
|
||||||
|
|
||||||
|
// Address
|
||||||
|
address
|
||||||
|
city
|
||||||
|
country
|
||||||
|
country_abr
|
||||||
|
state
|
||||||
|
state_abr
|
||||||
|
street
|
||||||
|
street_name
|
||||||
|
street_number
|
||||||
|
street_prefix
|
||||||
|
street_suffix
|
||||||
|
zip
|
||||||
|
latitude
|
||||||
|
latitude_in_range
|
||||||
|
longitude
|
||||||
|
longitude_in_range
|
||||||
|
|
||||||
|
// Beer
|
||||||
|
beer_alcohol
|
||||||
|
beer_hop
|
||||||
|
beer_ibu
|
||||||
|
beer_blg
|
||||||
|
beer_malt
|
||||||
|
beer_name
|
||||||
|
beer_style
|
||||||
|
beer_yeast
|
||||||
|
|
||||||
|
// Cars
|
||||||
|
car
|
||||||
|
car_type
|
||||||
|
car_maker
|
||||||
|
car_model
|
||||||
|
|
||||||
|
// Text
|
||||||
|
word
|
||||||
|
sentence
|
||||||
|
paragraph
|
||||||
|
question
|
||||||
|
quote
|
||||||
|
|
||||||
|
// Misc
|
||||||
|
generate
|
||||||
|
boolean
|
||||||
|
uuid
|
||||||
|
|
||||||
|
// Colors
|
||||||
|
color
|
||||||
|
hex_color
|
||||||
|
rgb_color
|
||||||
|
safe_color
|
||||||
|
|
||||||
|
// Internet
|
||||||
|
url
|
||||||
|
image_url
|
||||||
|
avatar_url
|
||||||
|
domain_name
|
||||||
|
domain_suffix
|
||||||
|
ipv4_address
|
||||||
|
ipv6_address
|
||||||
|
http_method
|
||||||
|
user_agent
|
||||||
|
user_agent_firefox
|
||||||
|
user_agent_chrome
|
||||||
|
user_agent_opera
|
||||||
|
user_agent_safari
|
||||||
|
|
||||||
|
// Date / Time
|
||||||
|
date
|
||||||
|
date_range
|
||||||
|
nano_second
|
||||||
|
second
|
||||||
|
minute
|
||||||
|
hour
|
||||||
|
month
|
||||||
|
day
|
||||||
|
weekday
|
||||||
|
year
|
||||||
|
timezone
|
||||||
|
timezone_abv
|
||||||
|
timezone_full
|
||||||
|
timezone_offset
|
||||||
|
|
||||||
|
// Payment
|
||||||
|
price
|
||||||
|
credit_card
|
||||||
|
credit_card_cvv
|
||||||
|
credit_card_number
|
||||||
|
credit_card_type
|
||||||
|
currency
|
||||||
|
currency_long
|
||||||
|
currency_short
|
||||||
|
|
||||||
|
// Company
|
||||||
|
bs
|
||||||
|
buzzword
|
||||||
|
company
|
||||||
|
company_suffix
|
||||||
|
job
|
||||||
|
job_description
|
||||||
|
job_level
|
||||||
|
job_title
|
||||||
|
|
||||||
|
// Hacker
|
||||||
|
hacker_abbreviation
|
||||||
|
hacker_adjective
|
||||||
|
hacker_noun
|
||||||
|
hacker_phrase
|
||||||
|
hacker_verb
|
||||||
|
|
||||||
|
//Hipster
|
||||||
|
hipster_word
|
||||||
|
hipster_paragraph
|
||||||
|
hipster_sentence
|
||||||
|
|
||||||
|
// File
|
||||||
|
file_extension
|
||||||
|
file_mine_type
|
||||||
|
|
||||||
|
// Numbers
|
||||||
|
number
|
||||||
|
numerify
|
||||||
|
int8
|
||||||
|
int16
|
||||||
|
int32
|
||||||
|
int64
|
||||||
|
uint8
|
||||||
|
uint16
|
||||||
|
uint32
|
||||||
|
uint64
|
||||||
|
float32
|
||||||
|
float32_range
|
||||||
|
float64
|
||||||
|
float64_range
|
||||||
|
shuffle_ints
|
||||||
|
mac_address
|
||||||
|
|
||||||
|
// String
|
||||||
|
digit
|
||||||
|
letter
|
||||||
|
lexify
|
||||||
|
rand_string
|
||||||
|
numerify
|
||||||
|
```
|
||||||
|
|
||||||
|
## Some more utility functions
|
||||||
|
|
||||||
|
```
|
||||||
|
shuffle_strings(string_array)
|
||||||
|
make_slug(text)
|
||||||
|
make_slug_lang(text, lang)
|
||||||
|
```
|
@ -10,7 +10,7 @@ You can then add your database schema to the migrations, maybe create some seed
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Download and install Super Graph. You will need Go 1.14 or above
|
# Download and install Super Graph. You will need Go 1.14 or above
|
||||||
go get https://github.com/dosco/super-graph
|
go get github.com/dosco/super-graph
|
||||||
```
|
```
|
||||||
|
|
||||||
And then create and launch your new app
|
And then create and launch your new app
|
||||||
@ -96,179 +96,6 @@ var post_count = import_csv("posts", "posts.csv");
|
|||||||
|
|
||||||
You can generate the following fake data for your seeding purposes. Below is the list of fake data functions supported by the built-in fake data library. For example `fake.image_url()` will generate a fake image url or `fake.shuffle_strings(['hello', 'world', 'cool'])` will generate a randomly shuffled version of that array of strings or `fake.rand_string(['hello', 'world', 'cool'])` will return a random string from the array provided.
|
You can generate the following fake data for your seeding purposes. Below is the list of fake data functions supported by the built-in fake data library. For example `fake.image_url()` will generate a fake image url or `fake.shuffle_strings(['hello', 'world', 'cool'])` will generate a randomly shuffled version of that array of strings or `fake.rand_string(['hello', 'world', 'cool'])` will return a random string from the array provided.
|
||||||
|
|
||||||
```
|
|
||||||
// Person
|
|
||||||
person
|
|
||||||
name
|
|
||||||
name_prefix
|
|
||||||
name_suffix
|
|
||||||
first_name
|
|
||||||
last_name
|
|
||||||
gender
|
|
||||||
ssn
|
|
||||||
contact
|
|
||||||
email
|
|
||||||
phone
|
|
||||||
phone_formatted
|
|
||||||
username
|
|
||||||
password
|
|
||||||
|
|
||||||
// Address
|
|
||||||
address
|
|
||||||
city
|
|
||||||
country
|
|
||||||
country_abr
|
|
||||||
state
|
|
||||||
state_abr
|
|
||||||
status_code
|
|
||||||
street
|
|
||||||
street_name
|
|
||||||
street_number
|
|
||||||
street_prefix
|
|
||||||
street_suffix
|
|
||||||
zip
|
|
||||||
latitude
|
|
||||||
latitude_in_range
|
|
||||||
longitude
|
|
||||||
longitude_in_range
|
|
||||||
|
|
||||||
// Beer
|
|
||||||
beer_alcohol
|
|
||||||
beer_hop
|
|
||||||
beer_ibu
|
|
||||||
beer_blg
|
|
||||||
beer_malt
|
|
||||||
beer_name
|
|
||||||
beer_style
|
|
||||||
beer_yeast
|
|
||||||
|
|
||||||
// Cars
|
|
||||||
car
|
|
||||||
car_type
|
|
||||||
car_maker
|
|
||||||
car_model
|
|
||||||
|
|
||||||
// Text
|
|
||||||
word
|
|
||||||
sentence
|
|
||||||
paragraph
|
|
||||||
question
|
|
||||||
quote
|
|
||||||
|
|
||||||
// Misc
|
|
||||||
generate
|
|
||||||
boolean
|
|
||||||
uuid
|
|
||||||
|
|
||||||
// Colors
|
|
||||||
color
|
|
||||||
hex_color
|
|
||||||
rgb_color
|
|
||||||
safe_color
|
|
||||||
|
|
||||||
// Internet
|
|
||||||
url
|
|
||||||
image_url
|
|
||||||
domain_name
|
|
||||||
domain_suffix
|
|
||||||
ipv4_address
|
|
||||||
ipv6_address
|
|
||||||
simple_status_code
|
|
||||||
http_method
|
|
||||||
user_agent
|
|
||||||
user_agent_firefox
|
|
||||||
user_agent_chrome
|
|
||||||
user_agent_opera
|
|
||||||
user_agent_safari
|
|
||||||
|
|
||||||
// Date / Time
|
|
||||||
date
|
|
||||||
date_range
|
|
||||||
nano_second
|
|
||||||
second
|
|
||||||
minute
|
|
||||||
hour
|
|
||||||
month
|
|
||||||
day
|
|
||||||
weekday
|
|
||||||
year
|
|
||||||
timezone
|
|
||||||
timezone_abv
|
|
||||||
timezone_full
|
|
||||||
timezone_offset
|
|
||||||
|
|
||||||
// Payment
|
|
||||||
price
|
|
||||||
credit_card
|
|
||||||
credit_card_cvv
|
|
||||||
credit_card_number
|
|
||||||
credit_card_number_luhn
|
|
||||||
credit_card_type
|
|
||||||
currency
|
|
||||||
currency_long
|
|
||||||
currency_short
|
|
||||||
|
|
||||||
// Company
|
|
||||||
bs
|
|
||||||
buzzword
|
|
||||||
company
|
|
||||||
company_suffix
|
|
||||||
job
|
|
||||||
job_description
|
|
||||||
job_level
|
|
||||||
job_title
|
|
||||||
|
|
||||||
// Hacker
|
|
||||||
hacker_abbreviation
|
|
||||||
hacker_adjective
|
|
||||||
hacker_ingverb
|
|
||||||
hacker_noun
|
|
||||||
hacker_phrase
|
|
||||||
hacker_verb
|
|
||||||
|
|
||||||
//Hipster
|
|
||||||
hipster_word
|
|
||||||
hipster_paragraph
|
|
||||||
hipster_sentence
|
|
||||||
|
|
||||||
// File
|
|
||||||
file_extension
|
|
||||||
file_mine_type
|
|
||||||
|
|
||||||
// Numbers
|
|
||||||
number
|
|
||||||
numerify
|
|
||||||
int8
|
|
||||||
int16
|
|
||||||
int32
|
|
||||||
int64
|
|
||||||
uint8
|
|
||||||
uint16
|
|
||||||
uint32
|
|
||||||
uint64
|
|
||||||
float32
|
|
||||||
float32_range
|
|
||||||
float64
|
|
||||||
float64_range
|
|
||||||
shuffle_ints
|
|
||||||
mac_address
|
|
||||||
|
|
||||||
//String
|
|
||||||
digit
|
|
||||||
letter
|
|
||||||
lexify
|
|
||||||
shuffle_strings
|
|
||||||
numerify
|
|
||||||
```
|
|
||||||
|
|
||||||
Other utility functions
|
|
||||||
|
|
||||||
```
|
|
||||||
shuffle_strings(string_array)
|
|
||||||
make_slug(text)
|
|
||||||
make_slug_lang(text, lang)
|
|
||||||
```
|
|
||||||
|
|
||||||
### Migrations
|
### Migrations
|
||||||
|
|
||||||
Easy database migrations is the most important thing when building products backend by a relational database. We make it super easy to manage and migrate your database.
|
Easy database migrations is the most important thing when building products backend by a relational database. We make it super easy to manage and migrate your database.
|
||||||
|
82
docs/website/docs/telemetry.md
Normal file
82
docs/website/docs/telemetry.md
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
---
|
||||||
|
id: telemetry
|
||||||
|
title: Tracing and Metrics
|
||||||
|
sidebar_label: Telemetry
|
||||||
|
---
|
||||||
|
|
||||||
|
import useBaseUrl from '@docusaurus/useBaseUrl'; // Add to the top of the file below the front matter.
|
||||||
|
|
||||||
|
Having observability and telemetry is at the core of any production ready service. Super Graph has built-in support for OpenCensus for tracing requests all the way from HTTP to the database and providing all kinds of metrics.
|
||||||
|
|
||||||
|
OpenCensus has a concept called exporters these are external services that can consume this data and make to give you graphs, charts, alerting etc. Super Graph again has built-in support for Zipkin, Prometheus, Google Stackdriver and the AWS X-Ray exporters.
|
||||||
|
|
||||||
|
## Telemetry config
|
||||||
|
|
||||||
|
The `telemetry` section of the standard config files is where you set values to configure this feature to your needs.
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
telemetry:
|
||||||
|
debug: true
|
||||||
|
interval: 5s
|
||||||
|
metrics:
|
||||||
|
exporter: "prometheus"
|
||||||
|
endpoint: ""
|
||||||
|
namespace: "web api"
|
||||||
|
key: "1234xyz"
|
||||||
|
tracing:
|
||||||
|
exporter: "zipkin"
|
||||||
|
endpoint: "http://zipkin:9411/api/v2/spans"
|
||||||
|
sample: 0.2
|
||||||
|
include_query: false
|
||||||
|
include_params: false
|
||||||
|
```
|
||||||
|
|
||||||
|
**debug**: Enabling debug enables an embedded web ui to test and debug tracing and metrics. This UI called `zPages` is provided by OpenCensus and will be made available on the `/telemetry` path. For more information on using `zPages` https://opencensus.io/zpages/. Remeber to disable this in production.
|
||||||
|
|
||||||
|
**interval**: This controls the interval setting for OpenCensus metrics collection. This deafaults to `5 seconds` if not set.
|
||||||
|
|
||||||
|
**metric.exporters** Setting this enables metrics collection. The supported values for this field are `prometheus` and `stackdriver`. The Prometheus exporter requires `metric.namespace` to be set. The Sackdriver exporter requires the `metric.key` to be set to the Google Cloud Project ID.
|
||||||
|
|
||||||
|
**metric.endpoint** Is not currently used by any of the exporters.
|
||||||
|
|
||||||
|
**tracing.exporter** Setting this enables request tracing. The supported values for this field are `zipkin`, `aws` and `xray`. Zipkin requires `tracing.endpoint` to be set. AWS and Xray are the same and do not require any addiitonal settings.
|
||||||
|
|
||||||
|
**tracing.sample** This controls what percentage of the requests should be traced. By default `0.5` or 50% of the requests are traced, `always` is also a valid value for this field and it means all requests will be traced.
|
||||||
|
|
||||||
|
**include_query** Include the Super Graph SQL query to the trace. Be careful with this setting in production it will add the entire SQL query to the trace. This can be veru useful to debug slow requests.
|
||||||
|
|
||||||
|
**include_params** Include the Super Graph SQL query parameters to the trace. Be careful with this setting in production it will it can potentially leak sensitive user information into tracing logs.
|
||||||
|
|
||||||
|
## Using Zipkin
|
||||||
|
|
||||||
|
Zipkin is a really great open source request tracing project. It's easy to add to your current Super Graph app as a way to test tracing in development. Add the following to the Super Graph generated `docker-compose.yml` file. Also add `zipkin` in your current apps `depends_on` list. Once setup the Zipkin UI is available at http://localhost:9411
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
your_api:
|
||||||
|
...
|
||||||
|
depends_on:
|
||||||
|
- db
|
||||||
|
- zipkin
|
||||||
|
|
||||||
|
zipkin:
|
||||||
|
image: openzipkin/zipkin-slim
|
||||||
|
container_name: zipkin
|
||||||
|
# Environment settings are defined here https://github.com/openzipkin/zipkin/blob/master/zipkin-server/README.md#environment-variables
|
||||||
|
environment:
|
||||||
|
- STORAGE_TYPE=mem
|
||||||
|
# Uncomment to enable self-tracing
|
||||||
|
# - SELF_TRACING_ENABLED=true
|
||||||
|
# Uncomment to enable debug logging
|
||||||
|
# - JAVA_OPTS=-Dorg.slf4j.simpleLogger.log.zipkin2=debug
|
||||||
|
ports:
|
||||||
|
# Port used for the Zipkin UI and HTTP Api
|
||||||
|
- 9411:9411
|
||||||
|
```
|
||||||
|
|
||||||
|
### Zipkin HTTP to DB traces
|
||||||
|
|
||||||
|
<img alt="Zipkin Traces" src={useBaseUrl("img/zipkin1.png")} />
|
||||||
|
|
||||||
|
### Zipkin trace details
|
||||||
|
|
||||||
|
<img alt="Zipkin Traces" src={useBaseUrl('img/zipkin2.png')} />
|
@ -9,7 +9,9 @@ module.exports = {
|
|||||||
"react",
|
"react",
|
||||||
"advanced",
|
"advanced",
|
||||||
"security",
|
"security",
|
||||||
|
"telemetry",
|
||||||
"config",
|
"config",
|
||||||
|
"seed",
|
||||||
"deploy",
|
"deploy",
|
||||||
"internals",
|
"internals",
|
||||||
],
|
],
|
||||||
|
BIN
docs/website/static/img/zipkin1.png
Normal file
BIN
docs/website/static/img/zipkin1.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 112 KiB |
BIN
docs/website/static/img/zipkin2.png
Normal file
BIN
docs/website/static/img/zipkin2.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 141 KiB |
3
go.mod
3
go.mod
@ -37,12 +37,11 @@ require (
|
|||||||
github.com/spf13/pflag v1.0.5 // indirect
|
github.com/spf13/pflag v1.0.5 // indirect
|
||||||
github.com/spf13/viper v1.6.3
|
github.com/spf13/viper v1.6.3
|
||||||
github.com/stretchr/testify v1.5.1
|
github.com/stretchr/testify v1.5.1
|
||||||
github.com/valyala/fasttemplate v1.1.0
|
|
||||||
go.opencensus.io v0.22.3
|
go.opencensus.io v0.22.3
|
||||||
go.uber.org/zap v1.14.1
|
go.uber.org/zap v1.14.1
|
||||||
golang.org/x/crypto v0.0.0-20200414173820-0848c9571904
|
golang.org/x/crypto v0.0.0-20200414173820-0848c9571904
|
||||||
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4 // indirect
|
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4 // indirect
|
||||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 // indirect
|
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543
|
||||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 // indirect
|
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 // indirect
|
||||||
gopkg.in/ini.v1 v1.55.0 // indirect
|
gopkg.in/ini.v1 v1.55.0 // indirect
|
||||||
)
|
)
|
||||||
|
6
go.sum
6
go.sum
@ -113,6 +113,7 @@ github.com/gogo/googleapis v1.1.0/go.mod h1:gf4bu3Q80BeJ6H1S1vYPm8/ELATdvryBaNFG
|
|||||||
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
|
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
|
||||||
github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
|
github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
|
||||||
github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4=
|
github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4=
|
||||||
|
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58=
|
||||||
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
|
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
|
||||||
github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||||
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6 h1:ZgQEtGgCBiWRM39fZuwSd1LwSqqSW0hOdXCYYDX0R3I=
|
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6 h1:ZgQEtGgCBiWRM39fZuwSd1LwSqqSW0hOdXCYYDX0R3I=
|
||||||
@ -131,6 +132,7 @@ github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ
|
|||||||
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
|
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
|
||||||
github.com/google/go-cmp v0.3.0 h1:crn/baboCvb5fXaQ0IJ1SGTsTVrWpDsCWC8EGETZijY=
|
github.com/google/go-cmp v0.3.0 h1:crn/baboCvb5fXaQ0IJ1SGTsTVrWpDsCWC8EGETZijY=
|
||||||
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||||
|
github.com/google/go-cmp v0.3.1 h1:Xye71clBPdm5HgqGwUkwhbynsUJZhDbS20FvLhQ2izg=
|
||||||
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||||
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
|
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
|
||||||
@ -142,7 +144,9 @@ github.com/googleapis/gax-go/v2 v2.0.5 h1:sjZBwGj9Jlw33ImPtvFviGYvseOtDM7hkSKB7+
|
|||||||
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
|
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
|
||||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8=
|
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8=
|
||||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
||||||
|
github.com/gorilla/context v1.1.1 h1:AWwleXJkX/nhcU9bZSnZoi3h/qGYqQAGhq6zZe/aQW8=
|
||||||
github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg=
|
github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg=
|
||||||
|
github.com/gorilla/mux v1.6.2 h1:Pgr17XVTNXAk3q/r4CpKzC5xBM/qW1uVLV+IhRZpIIk=
|
||||||
github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
|
github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
|
||||||
github.com/gorilla/websocket v1.4.0 h1:WDFjx/TMzVgy9VdMMQi2K2Emtwi2QcUQsztZ/zLaH/Q=
|
github.com/gorilla/websocket v1.4.0 h1:WDFjx/TMzVgy9VdMMQi2K2Emtwi2QcUQsztZ/zLaH/Q=
|
||||||
github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
|
github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
|
||||||
@ -369,8 +373,6 @@ github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6Kllzaw
|
|||||||
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
|
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
|
||||||
github.com/valyala/fasttemplate v1.0.1 h1:tY9CJiPnMXf1ERmG2EyK7gNUd+c6RKGD0IfU8WdUSz8=
|
github.com/valyala/fasttemplate v1.0.1 h1:tY9CJiPnMXf1ERmG2EyK7gNUd+c6RKGD0IfU8WdUSz8=
|
||||||
github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8=
|
github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8=
|
||||||
github.com/valyala/fasttemplate v1.1.0 h1:RZqt0yGBsps8NGvLSGW804QQqCUYYLsaOjTVHy1Ocw4=
|
|
||||||
github.com/valyala/fasttemplate v1.1.0/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8=
|
|
||||||
github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU=
|
github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU=
|
||||||
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
|
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
|
||||||
github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q=
|
github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q=
|
||||||
|
@ -62,9 +62,11 @@ type Serv struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
Tracing struct {
|
Tracing struct {
|
||||||
Exporter string
|
Exporter string
|
||||||
Endpoint string
|
Endpoint string
|
||||||
Sample string
|
Sample string
|
||||||
|
IncludeQuery bool `mapstructure:"include_query"`
|
||||||
|
IncludeParams bool `mapstructure:"include_params"`
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -298,9 +298,9 @@ func ExtractErrorLine(source string, position int) (ErrorLineExtract, error) {
|
|||||||
|
|
||||||
func getMigrationVars() map[string]interface{} {
|
func getMigrationVars() map[string]interface{} {
|
||||||
return map[string]interface{}{
|
return map[string]interface{}{
|
||||||
"app_name": strings.Title(conf.AppName),
|
"AppName": strings.Title(conf.AppName),
|
||||||
"app_name_slug": strings.ToLower(strings.Replace(conf.AppName, " ", "_", -1)),
|
"AppNameSlug": strings.ToLower(strings.Replace(conf.AppName, " ", "_", -1)),
|
||||||
"env": strings.ToLower(os.Getenv("GO_ENV")),
|
"Env": strings.ToLower(os.Getenv("GO_ENV")),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2,8 +2,7 @@ package serv
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"fmt"
|
"html/template"
|
||||||
"io"
|
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"os"
|
"os"
|
||||||
"path"
|
"path"
|
||||||
@ -11,7 +10,6 @@ import (
|
|||||||
|
|
||||||
rice "github.com/GeertJohan/go.rice"
|
rice "github.com/GeertJohan/go.rice"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
"github.com/valyala/fasttemplate"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func cmdNew(cmd *cobra.Command, args []string) {
|
func cmdNew(cmd *cobra.Command, args []string) {
|
||||||
@ -21,8 +19,8 @@ func cmdNew(cmd *cobra.Command, args []string) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
tmpl := newTempl(map[string]string{
|
tmpl := newTempl(map[string]string{
|
||||||
"app_name": strings.Title(strings.Join(args, " ")),
|
"AppName": strings.Title(strings.Join(args, " ")),
|
||||||
"app_name_slug": strings.ToLower(strings.Join(args, "_")),
|
"AppNameSlug": strings.ToLower(strings.Join(args, "_")),
|
||||||
})
|
})
|
||||||
|
|
||||||
// Create app folder and add relevant files
|
// Create app folder and add relevant files
|
||||||
@ -121,19 +119,16 @@ func newTempl(data map[string]string) *Templ {
|
|||||||
func (t *Templ) get(name string) ([]byte, error) {
|
func (t *Templ) get(name string) ([]byte, error) {
|
||||||
v := t.MustString(name)
|
v := t.MustString(name)
|
||||||
b := bytes.Buffer{}
|
b := bytes.Buffer{}
|
||||||
tmpl := fasttemplate.New(v, "{%", "%}")
|
|
||||||
|
|
||||||
_, err := tmpl.ExecuteFunc(&b, func(w io.Writer, tag string) (int, error) {
|
|
||||||
if val, ok := t.data[strings.TrimSpace(tag)]; ok {
|
|
||||||
return w.Write([]byte(val))
|
|
||||||
}
|
|
||||||
return 0, fmt.Errorf("unknown template variable '%s'", tag)
|
|
||||||
})
|
|
||||||
|
|
||||||
|
tmpl, err := template.New(name).Parse(v)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if err := tmpl.Execute(&b, t.data); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
return b.Bytes(), nil
|
return b.Bytes(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -17,6 +17,7 @@ import (
|
|||||||
"github.com/dop251/goja"
|
"github.com/dop251/goja"
|
||||||
"github.com/dosco/super-graph/core"
|
"github.com/dosco/super-graph/core"
|
||||||
"github.com/gosimple/slug"
|
"github.com/gosimple/slug"
|
||||||
|
"github.com/jackc/pgx/v4"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -27,6 +28,7 @@ func cmdDBSeed(cmd *cobra.Command, args []string) {
|
|||||||
log.Fatalf("ERR failed to read config: %s", err)
|
log.Fatalf("ERR failed to read config: %s", err)
|
||||||
}
|
}
|
||||||
conf.Production = false
|
conf.Production = false
|
||||||
|
conf.DefaultBlock = false
|
||||||
|
|
||||||
db, err = initDB(conf, true, false)
|
db, err = initDB(conf, true, false)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -51,7 +53,7 @@ func cmdDBSeed(cmd *cobra.Command, args []string) {
|
|||||||
|
|
||||||
vm := goja.New()
|
vm := goja.New()
|
||||||
vm.Set("graphql", graphQLFn)
|
vm.Set("graphql", graphQLFn)
|
||||||
//vm.Set("import_csv", importCSV)
|
vm.Set("import_csv", importCSV)
|
||||||
|
|
||||||
console := vm.NewObject()
|
console := vm.NewObject()
|
||||||
console.Set("log", logFunc) //nolint: errcheck
|
console.Set("log", logFunc) //nolint: errcheck
|
||||||
@ -181,34 +183,42 @@ func (c *csvSource) Err() error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// func importCSV(table, filename string) int64 {
|
func importCSV(table, filename string) int64 {
|
||||||
// if filename[0] != '/' {
|
if filename[0] != '/' {
|
||||||
// filename = path.Join(conf.ConfigPathUsed(), filename)
|
filename = path.Join(confPath, filename)
|
||||||
// }
|
}
|
||||||
|
|
||||||
// s, err := NewCSVSource(filename)
|
s, err := NewCSVSource(filename)
|
||||||
// if err != nil {
|
if err != nil {
|
||||||
// log.Fatalf("ERR %s", err)
|
log.Fatalf("ERR %v", err)
|
||||||
// }
|
}
|
||||||
|
|
||||||
// var cols []string
|
var cols []string
|
||||||
// colval, _ := s.Values()
|
colval, _ := s.Values()
|
||||||
|
|
||||||
// for _, c := range colval {
|
for _, c := range colval {
|
||||||
// cols = append(cols, c.(string))
|
cols = append(cols, c.(string))
|
||||||
// }
|
}
|
||||||
|
|
||||||
// n, err := db.Exec(fmt.Sprintf("COPY %s FROM STDIN WITH "),
|
conn, err := acquireConn(db)
|
||||||
// cols,
|
if err != nil {
|
||||||
// s)
|
log.Fatalf("ERR %v", err)
|
||||||
|
}
|
||||||
|
//nolint: errcheck
|
||||||
|
defer releaseConn(db, conn)
|
||||||
|
|
||||||
// if err != nil {
|
n, err := conn.CopyFrom(
|
||||||
// err = fmt.Errorf("%w (line no %d)", err, s.i)
|
context.Background(),
|
||||||
// log.Fatalf("ERR %s", err)
|
pgx.Identifier{table},
|
||||||
// }
|
cols,
|
||||||
|
s)
|
||||||
|
|
||||||
// return n
|
if err != nil {
|
||||||
// }
|
log.Fatalf("ERR %v", fmt.Errorf("%w (line no %d)", err, s.i))
|
||||||
|
}
|
||||||
|
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
|
||||||
//nolint: errcheck
|
//nolint: errcheck
|
||||||
func logFunc(args ...interface{}) {
|
func logFunc(args ...interface{}) {
|
||||||
@ -377,11 +387,6 @@ func setFakeFuncs(f *goja.Object) {
|
|||||||
f.Set("hipster_paragraph", gofakeit.HipsterParagraph)
|
f.Set("hipster_paragraph", gofakeit.HipsterParagraph)
|
||||||
f.Set("hipster_sentence", gofakeit.HipsterSentence)
|
f.Set("hipster_sentence", gofakeit.HipsterSentence)
|
||||||
|
|
||||||
//Languages
|
|
||||||
//f.Set("language", gofakeit.Language)
|
|
||||||
//f.Set("language_abbreviation", gofakeit.LanguageAbbreviation)
|
|
||||||
//f.Set("language_abbreviation", gofakeit.LanguageAbbreviation)
|
|
||||||
|
|
||||||
// File
|
// File
|
||||||
f.Set("file_extension", gofakeit.FileExtension)
|
f.Set("file_extension", gofakeit.FileExtension)
|
||||||
f.Set("file_mine_type", gofakeit.FileMimeType)
|
f.Set("file_mine_type", gofakeit.FileMimeType)
|
||||||
@ -410,8 +415,6 @@ func setFakeFuncs(f *goja.Object) {
|
|||||||
f.Set("lexify", gofakeit.Lexify)
|
f.Set("lexify", gofakeit.Lexify)
|
||||||
f.Set("rand_string", getRandValue)
|
f.Set("rand_string", getRandValue)
|
||||||
f.Set("numerify", gofakeit.Numerify)
|
f.Set("numerify", gofakeit.Numerify)
|
||||||
|
|
||||||
//f.Set("programming_language", gofakeit.ProgrammingLanguage)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
//nolint: errcheck
|
//nolint: errcheck
|
||||||
|
@ -69,6 +69,8 @@ func newViper(configPath, configFile string) *viper.Viper {
|
|||||||
vi.SetDefault("auth_fail_block", "always")
|
vi.SetDefault("auth_fail_block", "always")
|
||||||
vi.SetDefault("seed_file", "seed.js")
|
vi.SetDefault("seed_file", "seed.js")
|
||||||
|
|
||||||
|
vi.SetDefault("default_block", true)
|
||||||
|
|
||||||
vi.SetDefault("database.type", "postgres")
|
vi.SetDefault("database.type", "postgres")
|
||||||
vi.SetDefault("database.host", "localhost")
|
vi.SetDefault("database.host", "localhost")
|
||||||
vi.SetDefault("database.port", 5432)
|
vi.SetDefault("database.port", 5432)
|
||||||
|
@ -7,8 +7,8 @@ import (
|
|||||||
|
|
||||||
var healthyResponse = []byte("All's Well")
|
var healthyResponse = []byte("All's Well")
|
||||||
|
|
||||||
func health(w http.ResponseWriter, _ *http.Request) {
|
func health(w http.ResponseWriter, r *http.Request) {
|
||||||
ct, cancel := context.WithTimeout(context.Background(), conf.DB.PingTimeout)
|
ct, cancel := context.WithTimeout(r.Context(), conf.DB.PingTimeout)
|
||||||
defer cancel()
|
defer cancel()
|
||||||
|
|
||||||
if err := db.PingContext(ct); err != nil {
|
if err := db.PingContext(ct); err != nil {
|
||||||
|
@ -10,6 +10,8 @@ import (
|
|||||||
"github.com/dosco/super-graph/core"
|
"github.com/dosco/super-graph/core"
|
||||||
"github.com/dosco/super-graph/internal/serv/internal/auth"
|
"github.com/dosco/super-graph/internal/serv/internal/auth"
|
||||||
"github.com/rs/cors"
|
"github.com/rs/cors"
|
||||||
|
"go.opencensus.io/plugin/ochttp"
|
||||||
|
"go.opencensus.io/trace"
|
||||||
"go.uber.org/zap"
|
"go.uber.org/zap"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -44,7 +46,7 @@ func apiV1Handler() http.Handler {
|
|||||||
AllowCredentials: true,
|
AllowCredentials: true,
|
||||||
Debug: conf.DebugCORS,
|
Debug: conf.DebugCORS,
|
||||||
})
|
})
|
||||||
h = c.Handler(h)
|
return c.Handler(h)
|
||||||
}
|
}
|
||||||
|
|
||||||
return h
|
return h
|
||||||
@ -78,6 +80,22 @@ func apiV1(w http.ResponseWriter, r *http.Request) {
|
|||||||
doLog := true
|
doLog := true
|
||||||
res, err := sg.GraphQL(ct, req.Query, req.Vars)
|
res, err := sg.GraphQL(ct, req.Query, req.Vars)
|
||||||
|
|
||||||
|
if conf.telemetryEnabled() {
|
||||||
|
span := trace.FromContext(ct)
|
||||||
|
|
||||||
|
span.AddAttributes(
|
||||||
|
trace.StringAttribute("operation", res.OperationName()),
|
||||||
|
trace.StringAttribute("query_name", res.QueryName()),
|
||||||
|
trace.StringAttribute("role", res.Role()),
|
||||||
|
)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
span.AddAttributes(trace.StringAttribute("error", err.Error()))
|
||||||
|
}
|
||||||
|
|
||||||
|
ochttp.SetRoute(ct, apiRoute)
|
||||||
|
}
|
||||||
|
|
||||||
if !conf.Production && res.QueryName() == introspectionQuery {
|
if !conf.Production && res.QueryName() == introspectionQuery {
|
||||||
doLog = false
|
doLog = false
|
||||||
}
|
}
|
||||||
|
@ -15,7 +15,6 @@ import (
|
|||||||
"contrib.go.opencensus.io/integrations/ocsql"
|
"contrib.go.opencensus.io/integrations/ocsql"
|
||||||
"github.com/jackc/pgx/v4"
|
"github.com/jackc/pgx/v4"
|
||||||
"github.com/jackc/pgx/v4/stdlib"
|
"github.com/jackc/pgx/v4/stdlib"
|
||||||
//_ "github.com/jackc/pgx/v4/stdlib"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
@ -215,7 +214,20 @@ func initDB(c *Config, useDB, useTelemetry bool) (*sql.DB, error) {
|
|||||||
// }
|
// }
|
||||||
|
|
||||||
if useTelemetry && conf.telemetryEnabled() {
|
if useTelemetry && conf.telemetryEnabled() {
|
||||||
driverName, err = ocsql.Register(driverName, ocsql.WithAllTraceOptions(), ocsql.WithInstanceName(conf.AppName))
|
opts := ocsql.TraceOptions{
|
||||||
|
AllowRoot: true,
|
||||||
|
Ping: true,
|
||||||
|
RowsNext: true,
|
||||||
|
RowsClose: true,
|
||||||
|
RowsAffected: true,
|
||||||
|
LastInsertID: true,
|
||||||
|
Query: conf.Telemetry.Tracing.IncludeQuery,
|
||||||
|
QueryParams: conf.Telemetry.Tracing.IncludeParams,
|
||||||
|
}
|
||||||
|
opt := ocsql.WithOptions(opts)
|
||||||
|
name := ocsql.WithInstanceName(conf.AppName)
|
||||||
|
|
||||||
|
driverName, err = ocsql.Register(driverName, opt, name)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("unable to register ocsql driver: %v", err)
|
return nil, fmt.Errorf("unable to register ocsql driver: %v", err)
|
||||||
}
|
}
|
||||||
|
File diff suppressed because one or more lines are too long
@ -13,6 +13,11 @@ import (
|
|||||||
rice "github.com/GeertJohan/go.rice"
|
rice "github.com/GeertJohan/go.rice"
|
||||||
"github.com/NYTimes/gziphandler"
|
"github.com/NYTimes/gziphandler"
|
||||||
"github.com/dosco/super-graph/internal/serv/internal/auth"
|
"github.com/dosco/super-graph/internal/serv/internal/auth"
|
||||||
|
"go.opencensus.io/plugin/ochttp"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
apiRoute string = "/api/v1/graphql"
|
||||||
)
|
)
|
||||||
|
|
||||||
func initWatcher() {
|
func initWatcher() {
|
||||||
@ -76,6 +81,10 @@ func startHTTP() {
|
|||||||
MaxHeaderBytes: 1 << 20,
|
MaxHeaderBytes: 1 << 20,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if conf.telemetryEnabled() {
|
||||||
|
srv.Handler = &ochttp.Handler{Handler: routes}
|
||||||
|
}
|
||||||
|
|
||||||
idleConnsClosed := make(chan struct{})
|
idleConnsClosed := make(chan struct{})
|
||||||
go func() {
|
go func() {
|
||||||
sigint := make(chan os.Signal, 1)
|
sigint := make(chan os.Signal, 1)
|
||||||
@ -114,8 +123,6 @@ func routeHandler() (http.Handler, error) {
|
|||||||
return mux, nil
|
return mux, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
apiRoute := "/api/v1/graphql"
|
|
||||||
|
|
||||||
if len(conf.APIPath) != 0 {
|
if len(conf.APIPath) != 0 {
|
||||||
apiRoute = path.Join("/", conf.APIPath, "/v1/graphql")
|
apiRoute = path.Join("/", conf.APIPath, "/v1/graphql")
|
||||||
}
|
}
|
||||||
@ -178,6 +185,10 @@ func setActionRoutes(routes map[string]http.Handler) error {
|
|||||||
routes[p] = fn
|
routes[p] = fn
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if conf.telemetryEnabled() {
|
||||||
|
routes[p] = ochttp.WithRouteTag(routes[p], p)
|
||||||
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
67
internal/serv/stdlib.go
Normal file
67
internal/serv/stdlib.go
Normal file
@ -0,0 +1,67 @@
|
|||||||
|
package serv
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
errors "golang.org/x/xerrors"
|
||||||
|
|
||||||
|
"github.com/jackc/pgx/v4"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ctxKey int
|
||||||
|
|
||||||
|
var ctxKeyFakeTx ctxKey = 0
|
||||||
|
|
||||||
|
var errNotPgx = errors.New("not pgx *sql.DB")
|
||||||
|
|
||||||
|
var (
|
||||||
|
fakeTxMutex sync.Mutex
|
||||||
|
fakeTxConns map[*pgx.Conn]*sql.Tx
|
||||||
|
)
|
||||||
|
|
||||||
|
func acquireConn(db *sql.DB) (*pgx.Conn, error) {
|
||||||
|
var conn *pgx.Conn
|
||||||
|
ctx := context.WithValue(context.Background(), ctxKeyFakeTx, &conn)
|
||||||
|
tx, err := db.BeginTx(ctx, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if conn == nil {
|
||||||
|
if err := tx.Rollback(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return nil, errNotPgx
|
||||||
|
}
|
||||||
|
|
||||||
|
fakeTxMutex.Lock()
|
||||||
|
fakeTxConns[conn] = tx
|
||||||
|
fakeTxMutex.Unlock()
|
||||||
|
|
||||||
|
return conn, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func releaseConn(db *sql.DB, conn *pgx.Conn) error {
|
||||||
|
var tx *sql.Tx
|
||||||
|
var ok bool
|
||||||
|
|
||||||
|
if conn.PgConn().IsBusy() || conn.PgConn().TxStatus() != 'I' {
|
||||||
|
ctx, cancel := context.WithTimeout(context.Background(), time.Second)
|
||||||
|
defer cancel()
|
||||||
|
conn.Close(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
fakeTxMutex.Lock()
|
||||||
|
tx, ok = fakeTxConns[conn]
|
||||||
|
if ok {
|
||||||
|
delete(fakeTxConns, conn)
|
||||||
|
fakeTxMutex.Unlock()
|
||||||
|
} else {
|
||||||
|
fakeTxMutex.Unlock()
|
||||||
|
return errors.Errorf("can't release conn that is not acquired")
|
||||||
|
}
|
||||||
|
|
||||||
|
return tx.Rollback()
|
||||||
|
}
|
@ -103,7 +103,9 @@ func enableObservability(mux *http.ServeMux) (func(), error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("ERR OpenCensus: %s: %v", conf.Telemetry.Tracing, err)
|
return nil, fmt.Errorf("ERR OpenCensus: %s: %v",
|
||||||
|
conf.Telemetry.Tracing.Exporter,
|
||||||
|
err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if tex != nil {
|
if tex != nil {
|
||||||
|
@ -5,7 +5,7 @@ steps:
|
|||||||
[
|
[
|
||||||
"build",
|
"build",
|
||||||
"--tag",
|
"--tag",
|
||||||
"gcr.io/$PROJECT_ID/{% app_name_slug %}:latest",
|
"gcr.io/$PROJECT_ID/{{- .AppNameSlug -}}:latest",
|
||||||
"--build-arg",
|
"--build-arg",
|
||||||
"GO_ENV=production",
|
"GO_ENV=production",
|
||||||
".",
|
".",
|
||||||
@ -13,7 +13,7 @@ steps:
|
|||||||
|
|
||||||
# Push new image to Google Container Registry
|
# Push new image to Google Container Registry
|
||||||
- name: "gcr.io/cloud-builders/docker"
|
- name: "gcr.io/cloud-builders/docker"
|
||||||
args: ["push", "gcr.io/$PROJECT_ID/{% app_name_slug %}:latest"]
|
args: ["push", "gcr.io/$PROJECT_ID/{{- .AppNameSlug -}}:latest"]
|
||||||
|
|
||||||
# Deploy image to Cloud Run
|
# Deploy image to Cloud Run
|
||||||
- name: "gcr.io/cloud-builders/gcloud"
|
- name: "gcr.io/cloud-builders/gcloud"
|
||||||
@ -23,15 +23,15 @@ steps:
|
|||||||
"deploy",
|
"deploy",
|
||||||
"data",
|
"data",
|
||||||
"--image",
|
"--image",
|
||||||
"gcr.io/$PROJECT_ID/{% app_name_slug %}:latest",
|
"gcr.io/$PROJECT_ID/{{- .AppNameSlug -}}:latest",
|
||||||
"--add-cloudsql-instances",
|
"--add-cloudsql-instances",
|
||||||
"$PROJECT_ID:$REGION:{% app_name_slug %}_production",
|
"$PROJECT_ID:$REGION:{{- .AppNameSlug -}}_production",
|
||||||
"--region",
|
"--region",
|
||||||
"$REGION",
|
"$REGION",
|
||||||
"--platform",
|
"--platform",
|
||||||
"managed",
|
"managed",
|
||||||
"--update-env-vars",
|
"--update-env-vars",
|
||||||
"GO_ENV=production,SG_DATABASE_HOST=/cloudsql/$PROJECT_ID:$REGION:{% app_name_slug %}_production,SECRETS_FILE=prod.secrets.yml",
|
"GO_ENV=production,SG_DATABASE_HOST=/cloudsql/$PROJECT_ID:$REGION:{{- .AppNameSlug -}}_production,SECRETS_FILE=prod.secrets.yml",
|
||||||
"--port",
|
"--port",
|
||||||
"8080",
|
"8080",
|
||||||
"--service-account",
|
"--service-account",
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
app_name: "{% app_name %} Development"
|
app_name: "{{- .AppName }} Development"
|
||||||
host_port: 0.0.0.0:8080
|
host_port: 0.0.0.0:8080
|
||||||
web_ui: true
|
web_ui: true
|
||||||
|
|
||||||
@ -82,7 +82,7 @@ cors_debug: false
|
|||||||
auth:
|
auth:
|
||||||
# Can be 'rails', 'jwt' or 'header'
|
# Can be 'rails', 'jwt' or 'header'
|
||||||
type: rails
|
type: rails
|
||||||
cookie: _{% app_name_slug %}_session
|
cookie: _{{- .AppNameSlug -}}_session
|
||||||
|
|
||||||
# Comment this out if you want to disable setting
|
# Comment this out if you want to disable setting
|
||||||
# the user_id via a header for testing.
|
# the user_id via a header for testing.
|
||||||
@ -134,7 +134,7 @@ database:
|
|||||||
type: postgres
|
type: postgres
|
||||||
host: db
|
host: db
|
||||||
port: 5432
|
port: 5432
|
||||||
dbname: {% app_name_slug %}_development
|
dbname: {{- .AppNameSlug -}}_development
|
||||||
user: postgres
|
user: postgres
|
||||||
password: postgres
|
password: postgres
|
||||||
|
|
||||||
|
@ -9,48 +9,10 @@ services:
|
|||||||
ports:
|
ports:
|
||||||
- "5432:5432"
|
- "5432:5432"
|
||||||
|
|
||||||
# Yugabyte DB
|
{{ .AppNameSlug -}}_api:
|
||||||
# yb-master:
|
|
||||||
# image: yugabytedb/yugabyte:latest
|
|
||||||
# container_name: yb-master-n1
|
|
||||||
# command: [ "/home/yugabyte/bin/yb-master",
|
|
||||||
# "--fs_data_dirs=/mnt/disk0,/mnt/disk1",
|
|
||||||
# "--master_addresses=yb-master-n1:7100",
|
|
||||||
# "--replication_factor=1",
|
|
||||||
# "--enable_ysql=true"]
|
|
||||||
# ports:
|
|
||||||
# - "7000:7000"
|
|
||||||
# environment:
|
|
||||||
# SERVICE_7000_NAME: yb-master
|
|
||||||
|
|
||||||
# db:
|
|
||||||
# image: yugabytedb/yugabyte:latest
|
|
||||||
# container_name: yb-tserver-n1
|
|
||||||
# command: [ "/home/yugabyte/bin/yb-tserver",
|
|
||||||
# "--fs_data_dirs=/mnt/disk0,/mnt/disk1",
|
|
||||||
# "--start_pgsql_proxy",
|
|
||||||
# "--tserver_master_addrs=yb-master-n1:7100"]
|
|
||||||
# ports:
|
|
||||||
# - "9042:9042"
|
|
||||||
# - "6379:6379"
|
|
||||||
# - "5433:5433"
|
|
||||||
# - "9000:9000"
|
|
||||||
# environment:
|
|
||||||
# SERVICE_5433_NAME: ysql
|
|
||||||
# SERVICE_9042_NAME: ycql
|
|
||||||
# SERVICE_6379_NAME: yedis
|
|
||||||
# SERVICE_9000_NAME: yb-tserver
|
|
||||||
# depends_on:
|
|
||||||
# - yb-master
|
|
||||||
|
|
||||||
{% app_name_slug %}_api:
|
|
||||||
image: dosco/super-graph:latest
|
image: dosco/super-graph:latest
|
||||||
environment:
|
environment:
|
||||||
GO_ENV: "development"
|
GO_ENV: "development"
|
||||||
# Uncomment below for Yugabyte DB
|
|
||||||
# SG_DATABASE_PORT: 5433
|
|
||||||
# SG_DATABASE_USER: yugabyte
|
|
||||||
# SG_DATABASE_PASSWORD: yugabyte
|
|
||||||
volumes:
|
volumes:
|
||||||
- ./config:/config
|
- ./config:/config
|
||||||
ports:
|
ports:
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
# so I only need to overwrite some values
|
# so I only need to overwrite some values
|
||||||
inherits: dev
|
inherits: dev
|
||||||
|
|
||||||
app_name: "{% app_name %} Production"
|
app_name: "{{- .AppName }} Production"
|
||||||
host_port: 0.0.0.0:8080
|
host_port: 0.0.0.0:8080
|
||||||
web_ui: false
|
web_ui: false
|
||||||
|
|
||||||
@ -82,7 +82,7 @@ database:
|
|||||||
type: postgres
|
type: postgres
|
||||||
host: db
|
host: db
|
||||||
port: 5432
|
port: 5432
|
||||||
dbname: {% app_name_slug %}_production
|
dbname: {{- .AppNameSlug -}}_production
|
||||||
user: postgres
|
user: postgres
|
||||||
password: postgres
|
password: postgres
|
||||||
#pool_size: 10
|
#pool_size: 10
|
||||||
|
Reference in New Issue
Block a user